簡體   English   中英

VIDIOC_QBUF:設備或資源繁忙 V4L2 MEMORY USERPTR

[英]VIDIOC_QBUF: Device or resource busy V4L2 MEMORY USERPTR

以下所有內容均使用

#include <linux/videodev2.h>
#include <vector>

基本上我必須從我的相機計算機進行流式傳輸。 使用 YUV 格式我粗略地說new uint8_t[IMAGE_HEIGHT*IMAGE_WIDTH*2]應該填充(入隊)。

這個想法是我必須制作 5 幀,每幀都指向 uint8_t*。

std::vector<uint8_t*> v4l2_buffers;

另一個名為 CameraStream 的類將分配緩沖區並向包含圖片的此幀返回一個點。

為了使緩沖區入隊,應用程序將 struct v4l2_buffer 的類型字段設置為與之前用於 struct v4l2_format 類型和 struct v4l2_requestbuffers 類型的緩沖區類型相同。 應用程序還必須設置索引字段。 有效索引號的范圍從零到使用 ioctl VIDIOC_REQBUFS (struct v4l2_requestbuffers count) 分配的緩沖區數減一。 ioctl VIDIOC_QUERYBUF ioctl 返回的 struct v4l2_buffer 的內容也可以。 當緩沖區用於輸出(類型為 V4L2_BUF_TYPE_VIDEO_OUTPUT、V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE 或 V4L2_BUF_TYPE_VBI_OUTPUT)時,應用程序還必須初始化所使用的字節、字段和時間戳字段,有關詳細信息,請參閱緩沖區。 應用程序還必須將標志設置為 0。reserved2 和 reserved 字段必須設置為 0。使用多平面 API 時,m.planes 字段必須包含用戶空間指針,指向填充的 struct v4l2_plane 數組和長度字段必須設置為該數組中的元素數。 .要排隊用戶指針緩沖區應用程序將內存字段設置為 V4L2_MEMORY_USERPTR,將 m.userptr 字段設置為緩沖區的地址,將長度設置為其大小。 使用多平面 API 時,必須改用 m.userptr 和傳遞的 struct v4l2_plane 數組的 length 成員。 當使用指向該結構的指針調用 VIDIOC_QBUF 時,驅動程序設置 V4L2_BUF_FLAG_QUEUED 標志並清除標志字段中的 V4L2_BUF_FLAG_MAPPED 和 V4L2_BUF_FLAG_DONE 標志,或者返回錯誤代碼。 這個 ioctl 將緩沖區的內存頁鎖定在物理內存中,它們不能被換出到磁盤。 緩沖區保持鎖定狀態,直到出隊,直到調用 VIDIOC_STREAMOFF 或 ioctl VIDIOC_REQBUFS ioctl,或者直到設備關閉。

/*
 Allocate 5 buffers and form and abstraction to buffers with a continous loop of buffers.
 CameraChannel must require a buffer from CameraStream class.
 Pass that buffer to v4l2 to fill with frame data
*/
#include <cstdint>
#include <linux/videodev2.h>
#include <fcntl.h>
#include <iostream>
#include <sys/mman.h>
#include <string.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <vector>

#define FRAME_NUM 5

class CameraStream{  
    public:
        CameraStream(int fd);
        void allocateBuffer();  
        uint8_t *returnBufferAddress(); 
    private:
        int sfd;
        unsigned int n_buffers;
        v4l2_requestbuffers requestBuffers{0};
        std::vector<uint8_t*> v4l2_buffers;
};

相機流.cpp

#include "CameraStream.h"
#include "Camera.h"

CameraStream::CameraStream(int fd):sfd(fd){

}
void CameraStream::allocateBuffer(){

    /* This has to be the number of buffers I want to allocate in the device*/
    /* Don't forget to change BUF_NUM or FRAME_NUM */
    requestBuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    requestBuffers.memory = V4L2_MEMORY_USERPTR;
    if(-1 == xioctl(sfd,VIDIOC_REQBUFS,&requestBuffers)){
                if(EINVAL == errno) {
                        perror("Device does not support user pointer\n");
                } else {
                        perror("VIDIOC_REQBUFS");
                }
    }
    /*
     Applications call the VIDIOC_QBUF ioctl to enqueue an empty (capturing)
     or filled (output) buffer in the driver’s incoming queue. 
     The semantics depend on the selected I/O method.
     To enqueue a buffer applications set the type field of a struct v4l2_buffer 
     to the same buffer type as was previously used with struct v4l2_format 
     type and struct v4l2_requestbuffers type. Applications must also set 
     the index field. Valid index numbers range from zero to the number of 
     buffers allocated with ioctl VIDIOC_REQBUFS (struct v4l2_requestbuffers 
     count) minus one. The contents of the struct v4l2_buffer returned by a 
     ioctl VIDIOC_QUERYBUF ioctl will do as well. When the buffer is intended
     for output (type is V4L2_BUF_TYPE_VIDEO_OUTPUT, 
     V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, or V4L2_BUF_TYPE_VBI_OUTPUT) 
     applications must also initialize the bytesused, field and 
     timestamp fields, see Buffers for details.
     Applications must also set flags to 0. 
     The reserved2 and reserved fields must be set to 0. 
     When using the multi-planar API, the m.planes field 
     must contain a userspace pointer to a filled-in array
     of struct v4l2_plane and the
     length field must be set to the number of elements in that array.
    */
    for(int i = 0;i < FRAME_NUM ; i++){
       // v4l2_buffers.push_back(uint8_t[IMAGE_HEIGHT*IMAGE_WIDTH*2]);
       v4l2_buffers.push_back(new uint8_t[IMAGE_HEIGHT*IMAGE_WIDTH*2]);
    }

    struct v4l2_buffer buf;
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_USERPTR;
    buf.m.userptr = (unsigned long)&v4l2_buffers[0];
    buf.index = 0;
    buf.length = 1;
    if(xioctl(sfd,VIDIOC_QBUF,&buf) == -1){
                  perror("VIDIOC_QBUF");
    }


    /*
    This ioctl is part of the streaming I/O method. 
    It can be used to query the status of a buffer at any time 
    after buffers have been allocated with the ioctl VIDIOC_REQBUFS ioctl.
    */
    //struct v4l2_buffer buf;
    //for(int j = 0;j < IMAGE_HEIGHT*IMAGE_WIDTH*2;j++){
        //buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        //buf.index = j;
        //if(xioctl(sfd,VIDIOC_QUERYBUF,&buf) == -1){
        //           perror("VIDIOC_QUERYBUF");
       // }
    //}
    /*
    v4l2_buffers.resize(BUF_NUM,std::vector<uint8_t*>(IMAGE_WIDTH*IMAGE_HEIGHT*2));
    for(auto &frame:v4l2_buffers){
        int c = 0;
          for(auto& buffer:frame){ 
                struct v4l2_buffer buf;
                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_USERPTR;
                buf.index = c++;
                buf.m.userptr = (unsigned long)&buffer;
                buf.length = sizeof(buffer);
                if(-1 == xioctl(sfd,VIDIOC_QBUF,&buf))
                        perror("VIDIOC_QBUF");
           }
    }
    */

    /*
    memset(&(requestBuffers),0,sizeof(requestBuffers));
    requestBuffers.count = BUF_NUM;
    requestBuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    requestBuffers.memory = V4L2_MEMORY_USERPTR;
    if(-1 == xioctl(sfd,VIDIOC_REQBUFS,&requestBuffers)){
        if(EINVAL == errno){
            perror("Device does not support user pointer\n");
        }else{
            perror("VIDIOC_REQBUFS");
        }
    }
    struct v4l2_buffer buf;
    for(n_buffers = 0;n_buffers < BUF_NUM;++n_buffers){
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.index = n_buffers;
        if(xioctl(sfd, VIDIOC_QUERYBUF, &buf) == -1){
                perror("VIDIOC_QUERYBUF");
        }
        //*Create the buffer 
    }
    */
}

main.cpp 包含一個類 Camera,它只是一個 init 類

#include <iostream>
#include "Camera.h"
#include "CameraStream.h"

int main(){
    char *device = "/dev/video0";
    Camera c(device);
    c.open();
    c.showCapabilities();
    c.config(V4L2_PIX_FMT_YUYV);
    CameraStream cam(c.getFd());
    cam.allocateBuffer();

    return 0;
}

以下錯誤顯示在我的終端輸出中。

open
This device has capabilities
Device does  support this format, VIDIOC_S_FMT: Success
VIDIOC_QBUF: Device or resource busy

注意 不允許直接將排隊請求與排隊緩沖區混合使用。 如果第一個緩沖區直接排隊,然后應用程序嘗試將請求排隊,則將返回 EBUSY,反之亦然。 關閉文件描述符后,調用 VIDIOC_STREAMOFF 或調用 ioctl VIDIOC_REQBUFS 將重置對此的檢查。 對於內存到內存設備,您只能為輸出緩沖區指定 request_fd,而不能為捕獲緩沖區指定 request_fd。 嘗試為捕獲緩沖區指定此項將導致 EBADR 錯誤。

首先不要閱讀文檔,這只是一些誤導性的詞

為了將 MEMORY_USR 指針與 VIDIOC_QBUF 一起使用,首先您必須執行以下操作。

使用以下命令查詢相機的功能:

if (xioctl(mFd, VIDIOC_QUERYCAP, &capability) < 0) {
            perror("Failed to get device capabilities");
        }
    if (!(capability.capabilities & V4L2_CAP_VIDEO_CAPTURE)
            || !(capability.capabilities & V4L2_CAP_STREAMING)) 
    {
            perror("This device cannot stream video");
            exit(1);
    }
    printf("%s\n","This device has capabilities");

接下來設置格式:

v4l2_format format;

    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.pixelformat = pfmt;
    format.fmt.pix.width = 640;
    format.fmt.pix.height = 480;
    if(ioctl(mFd,VIDIOC_S_FMT,&format) == -1){
            perror("Unable to set format");
    }
    sizeImage = format.fmt.pix.sizeimage;
    std::cout<<"imgsize :\n"<<sizeImage<<std::endl;

為了能夠使用任何緩沖區,您必須設置 sizeImage(通常隨格式提供)

接下來設置請求緩沖區:

v4l2_requestbuffers bufrequest;
    CLEAR(bufrequest);
    bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    bufrequest.memory = V4L2_MEMORY_USERPTR;
    bufrequest.count = 1;
    if(-1 == xioctl(mFd,VIDIOC_REQBUFS,&bufrequest)){
                if(EINVAL == errno) {
                        perror("Device does not support user pointer\n");
                } else {
                        perror("VIDIOC_REQBUFS");
                }
    }

查詢索引為 0 的緩沖區

CLEAR(mBuffferInfo);
    mBuffferInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mBuffferInfo.memory = V4L2_MEMORY_USERPTR;
    mBuffferInfo.index = 0;
    if(-1 == xioctl(mFd,VIDIOC_QUERYBUF,&mBuffferInfo)){
        perror("VIDIOC_QUERYBUF");
    }

激活 StreamOn

type = mBuffferInfo.type;
    if(-1 == xioctl(mFd,VIDIOC_STREAMON,&type)){
        perror("STREAMON");
    }
}

使用此處的大小示例捕獲 Frame:

void Camera::captureFrame(uint8_t* frame){
     memset(frame,0,sizeImage);
     CLEAR(mBuffferInfo);
     mBuffferInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     mBuffferInfo.memory = V4L2_MEMORY_USERPTR;
     mBuffferInfo.index = 0;
     mBuffferInfo.m.userptr = (unsigned long)frame;
     mBuffferInfo.length = sizeImage;
     if(-1 == xioctl(mFd,VIDIOC_QBUF,&mBuffferInfo)){
                perror("VIDIOC_QBUF");
     }
    CLEAR(mBuffferInfo);
    mBuffferInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mBuffferInfo.memory = V4L2_MEMORY_USERPTR;
     if(-1 == xioctl(mFd,VIDIOC_DQBUF,&mBuffferInfo)){
                perror("VIDIOC_DQBUF");
     }

}

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM