繁体   English   中英

iOS:获取实时(ish)视频数据

[英]iOS: Getting real-time (ish) video data

我正在尝试编写一个程序,作为其功能的一部分,它可以连续捕获视频并实时或尽可能接近实时地计算给定帧的视频数据的平均亮度。 这是我第一次尝试任何视频东西/ iOS相机东西​​,所以除了自己的东西之外,我还整理了很多我在互联网上研究过的东西。 现在,ViewController.m文件中的以下代码将在我的设备上编译并运行,但似乎没有做任何事情:

- (void)viewDidLoad{
    [super viewDidLoad];
    _val = 0;

    //Set up the video capture session.
    NSLog(@"Setting up the capture session...\n");
    captureSession = [[AVCaptureSession alloc] init];

    //Add input.
    NSLog(@"Adding video input...\n");
    AVCaptureDevice *captureDevice = [self frontFacingCameraIfAvailable];
    if(captureDevice){
        NSError *error;
        videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
        if(!error){
            if([captureSession canAddInput:videoInputDevice])
                [captureSession addInput:videoInputDevice];
            else
                NSLog(@"Couldn't add video input.\n");

        }else{
            NSLog(@"Couldn't create video input.\n");
        }
    }else{
        NSLog(@"Couldn't create capture device.\n");
    }

    //Add output.
    NSLog(@"Adding video data output...\n");
    vidOutput = [[AVCaptureVideoDataOutput alloc] init];
    vidOutput.alwaysDiscardsLateVideoFrames = YES;
    if([captureSession canAddOutput:vidOutput])
        [captureSession addOutput:vidOutput];
    else
        NSLog(@"Couldn't add video output.\n");
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [vidOutput setVideoSettings:videoSettings];
    dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
    [vidOutput setSampleBufferDelegate:self queue:queue];

}

- (void)viewDidUnload{
    [super viewDidUnload];
    // Release any retained subviews of the main view.
}

-(AVCaptureDevice *)frontFacingCameraIfAvailable{
    NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    AVCaptureDevice *captureDevice = nil;
    for (AVCaptureDevice *device in videoDevices){
        if (device.position == AVCaptureDevicePositionFront){
            captureDevice = device;
            break;
        }
    }

    //couldn't find one on the front, so just get the default video device.
    if (!captureDevice){
        captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    }

    return captureDevice;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection: (AVCaptureConnection *)connection{
    // Create autorelease pool because we are not in the main_queue
    @autoreleasepool {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the imagebuffer
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        // Get information about the image
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        // size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        CVPlanarPixelBufferInfo_YCbCrBiPlanar *bufferInfo = (CVPlanarPixelBufferInfo_YCbCrBiPlanar *)baseAddress;
        // This just moved the pointer past the offset
        baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
        // convert the image
        UIImage *image = [self makeImage:baseAddress bufferInfo:bufferInfo width:width height:height bytesPerRow:bytesPerRow];
        // Update the display with the captured image for DEBUG purposes
        //dispatch_async(dispatch_get_main_queue(), ^{
            //[self.vImage setImage:image];
        //});
        CGImageRef cgImage = [image CGImage];
        CGDataProviderRef provider = CGImageGetDataProvider(cgImage);
        CFDataRef bitmapData = CGDataProviderCopyData(provider);
        const UInt8* data = CFDataGetBytePtr(bitmapData);
        int cols = width - 1;
        int rows = height - 1;
        float avgLuminance = 0.0;
        for(int i = 0; i < cols; i++){
            for(int j = 0; j < rows; j++){
                const UInt8* pixel = data + j*bytesPerRow + i*4;
                avgLuminance += pixel[0]*0.299 + pixel[1]*0.587 + pixel[2]*0.114;
            }
        }
        avgLuminance /= (cols*rows);
        NSLog(@"Average Luminance: %f\n", avgLuminance);

    }
}

-(UIImage *)makeImage:(uint8_t *)inBaseAddress bufferInfo:(CVPlanarPixelBufferInfo_YCbCrBiPlanar *)inBufferInfo width: (size_t)Width height:(size_t)Height bytesPerRow:(size_t)BytesPerRow{
    NSUInteger yPitch = EndianU32_BtoN(inBufferInfo->componentInfoY.rowBytes);
    uint8_t *rgbBuffer = (uint8_t *)malloc(Width * Height * 4);
    uint8_t *yBuffer = (uint8_t *)inBaseAddress;
    uint8_t val;
    int bytesPerPixel = 4;
    // for each byte in the input buffer, fill in the output buffer with four bytes
    // the first byte is the Alpha channel, then the next three contain the same
    // value of the input buffer
    for(int y = 0; y < Height*Width; y++){
        val = yBuffer[y];
        // Alpha channel
        rgbBuffer[(y*bytesPerPixel)] = 0xff;
        // next three bytes same as input
        rgbBuffer[(y*bytesPerPixel)+1] = rgbBuffer[(y*bytesPerPixel)+2] = rgbBuffer[y*bytesPerPixel+3] = val;
    }

    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbBuffer, yPitch, Height, 8,yPitch*bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    CGContextRelease(context);
    //UIImage *image = [[UIImage alloc] initWithCGImage:quartzImage scale:(CGFloat)0.5 orientation:UIImageOrientationRight];
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    CGImageRelease(quartzImage);
    free(rgbBuffer);
    return image;
}

我将.h文件设置为AVCaptureVideoDataOutputSampleBufferDelegate但我感觉我不太了解在代码中不断获取来自相机的数据所需做的事情,因为CaptureOutput方法永远不会在任何地方调用。 我应该如何/在何处调用它以获得恒定的数据流?

我是个白痴。 我需要的是:

[captureSession startRunning];

想象一下。 嘿。

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM