简体   繁体   中英

Capturing YUYV in c++ using v4l2

I have a webcam connected to beaglebone via usb. I am coding in c++ and my goal is to capture raw UNCOMPRESSED picture from the webcam. Firstly i checked what formats are supported via command v4l2-ctl --list-formats and the result was:

        Index       : 0
        Type        : Video Capture
        Pixel Format: 'MJPG' (compressed)
        Name        : Motion-JPEG

        Index       : 1
        Type        : Video Capture
        Pixel Format: 'YUYV'
        Name        : YUYV 4:2:2

So from this I assume it has to be possible to get an uncompressed picture if i try to use YUYV format.

Knowing this I started writing a program in c++. I successfully written a program to capture a compressed picture, but when trying to capture using format YUYV it doesnt work and i really need some help to get this done.

Here is my code:

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <libv4l2.h>


template <typename typeXX>
void clear_memmory(typeXX* x) {
    memset(x, 0, sizeof(*x));
    }
    
void xioctl(int cd, int request, void *arg){
    int response;
    do{
        //ensures we get the correct response.
        response = v4l2_ioctl(cd, request, arg);
        }
    while (response == -1 && ((errno == EINTR) || (errno == EAGAIN)));

    if (response == -1) {
        fprintf(stderr, "error %d, %s\n", errno, strerror(errno));
        exit(EXIT_FAILURE);
        }
    }
    
    struct LMSBBB_buffer{
    void*  start;
    size_t length;
    };

int main(){
    
    const char* dev_name = "/dev/video0";
    int width=1920;
    int height=1080;
    
    int fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
    
    struct v4l2_format format = {0};
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.width = width;
    format.fmt.pix.height = height;
    format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;//V4L2_PIX_FMT_YUYV //V4L2_PIX_FMT_RGB24
    format.fmt.pix.field = V4L2_FIELD_NONE; //V4L2_FIELD_NONE
    xioctl(fd, VIDIOC_S_FMT, &format);

        
    printf("Device initialized.\n");
    
    
    ///request buffers  
    struct v4l2_requestbuffers req = {0};
    req.count = 2;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    xioctl(fd, VIDIOC_REQBUFS, &req);

    printf("Buffers requested.\n");
    

    ///mapping buffers  
    struct v4l2_buffer buf;
    LMSBBB_buffer* buffers;
    unsigned int i;
    buffers = (LMSBBB_buffer*) calloc(req.count, sizeof(*buffers));
    for (i = 0; i < req.count; i++) {
    clear_memmory(&(buf));

    (buf).type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    (buf).memory      = V4L2_MEMORY_MMAP;
    (buf).index       = i;

    xioctl(fd, VIDIOC_QUERYBUF, &buf);

    buffers[i].length = (buf).length;
    printf("A buff has a len of: %i\n",buffers[i].length);
    buffers[i].start = v4l2_mmap(NULL, (buf).length, PROT_READ | PROT_WRITE, MAP_SHARED,fd, (buf).m.offset);
    
    if (MAP_FAILED == buffers[i].start) {
        perror("Can not map the buffers.");
        exit(EXIT_FAILURE);
        }
    }
    printf("Buffers mapped.\n");    
    
    for (i = 0; i < req.count; i++) {
        clear_memmory(&(buf));
        (buf).type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        (buf).memory = V4L2_MEMORY_MMAP;
        (buf).index = i;
        ioctl(fd,VIDIOC_QBUF, &(buf));
        }
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ioctl(fd,VIDIOC_STREAMON, &type);
    
    
    printf("buffers queued and streaming.\n");
    
    
    
    int pic_count=0;
    ///CAPTURE
    fd_set fds;
    struct timeval tv;
    int r;
    char out_name[256];
    FILE* fout;
        

        
    do {
        FD_ZERO(&fds);
        FD_SET(fd, &fds);

        // Timeout.
        tv.tv_sec = 2;
        tv.tv_usec = 0;

        r = select(fd + 1, &fds, NULL, NULL, &tv);
        } while ((r == -1 && (errno = EINTR)));
    if (r == -1) {
        perror("select");
        exit(EXIT_FAILURE);
        }

    clear_memmory(&(buf));
    (buf).type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    (buf).memory = V4L2_MEMORY_MMAP;
    xioctl(fd,VIDIOC_DQBUF, &(buf));
    
    printf("Buff index: %i\n",(buf).index);
    sprintf(out_name, "image%03d.ppm",pic_count);
    fout = fopen(out_name, "w");
    if (!fout) {
        perror("Cannot open image");
        exit(EXIT_FAILURE);
        }
    fprintf(fout, "P6\n%d %d 255\n",width, height);
    fwrite(buffers[(buf).index].start, (buf).bytesused, 1, fout);
    fclose(fout);
    pic_count++;
    
    clear_memmory(&(buf));
    (buf).type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    (buf).memory = V4L2_MEMORY_MMAP;
    xioctl(fd,VIDIOC_DQBUF, &(buf));
    printf("Buff index: %i\n",(buf).index);
    sprintf(out_name, "image%03d.ppm",pic_count);
    fout = fopen(out_name, "w");
    if (!fout) {
        perror("Cannot open image");
        exit(EXIT_FAILURE);
        }
    fprintf(fout, "P6\n%d %d 255\n",width, height);
    fwrite(buffers[(buf).index].start, (buf).bytesused, 1, fout);
    fclose(fout);
    pic_count++;
    
    
    ///xioctl(fd,VIDIOC_QBUF, &(buf));
    
    
    return 0;
    }

in line 50, i can choose the format between V4L2_PIX_FMT_YUYV and V4L2_PIX_FMT_RGB24. for V4L2_PIX_FMT_RGB24 i get the picture, but when using V4L2_PIX_FMT_YUYV I get this error:

libv4l2: error dequeuing buf: Resource temporarily unavailable
libv4l2: error dequeuing buf: Resource temporarily unavailable
libv4l2: error dequeuing buf: Resource temporarily unavailable
libv4l2: error dequeuing buf: Resource temporarily unavailable
libv4l2: error dequeuing buf: Resource temporarily unavailable

the error lines goes for ever until i end the program manually.

Does anyone have an idea what to do? I spent over 2 weeks on this and i can't move anywhere from here. I would really appreciate any advice.

From what I see you are requesting a FullHD (1920x1080) buffer in YUYV format from a camera. You did not mention the camera type/model/specs, but if it is a generic USB-attached hardware most likely you will not get a raw FullHD YUYV buffer as an output, only the MJPEG one (which you can decode to YUV, if you hack around with libjpeg) or the decoded RGB buffer (which is pretty much the decoded MJPEG with YUV->RGB conversion) which is not mmapped.

The exact list of formats with framerates can be requested by this command, which would probably tell you it does not provide a 1920x1080 YUYV, only something smaller, like 640x480:

v4l2-ctl --list-formats

If you need video processing with "true" zero-copy access to raw YUYV camera frames, you need direct access to hardware and that specific hardware in the first place. Once you have the USB interface between your software and the camera, you get an extra indirection and that means the speed goes down. Think for a moment, the YUYV frame at 1920x1080 takes up approximately 4 Megabytes of memory. At 30 FPS this is 120 Megabytes (or 960 Megabits) per second of bus throughput. If you have a USB2.0 camera, there is just no bandwidth to support this (thus the need for MJPEG). Even at 15FPS this is 480 Megabits, not counting the USB latency and protocol overhead.

To provide some "actionable feedback" I would advice to first concentrate on the algorithms (probably, you just don't want to loose the processing speed at the very first step) which you want to apply to the image. Don't hesitate to use OpenCV for camera input and basic image processing, later you can switch to some hardware interface and hand-written algorithms.

The easier way of getting raw frames would be to use Android's camera interface and try to process the incoming frames with GLSL shaders using the GL_TEXTURE_EXTERNAL_OES extension, about which there information and code samples available. There you can connect GL textures to AHardwareBuffer instances and then use AHardwareBuffer_lock function to get raw pointers. The exact supported formats also may vary across the hardware, so do not expect this to be super-easy.

I've recently had a similar issue. In my case the camera driver needed the VIDIOC_S_PARM ioctl in order to set the frame rate and initialize the camera for the selected capture mode.

You can try to add this code after the VIDIOC_S_FMT and see if it works for you as well:

struct v4l2_streamparm streamparam;
memset(&streamparam, 0, sizeof(streamparam));
streamparam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
xioctl(fd, VIDIOC_G_PARM, &streamparam);
streamparam.parm.capture.timeperframe.numerator = 1;
streamparam.parm.capture.timeperframe.denominator = 5;
xioctl(fd, VIDIOC_S_PARM, &streamparam);

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM