繁体   English   中英

IOS / Objective-C:在没有图像选择器控制器的情况下,在负载下显示实时摄像机预览

[英]IOS/Objective-C: Display live camera preview on load without image picker controller

我希望在实时相机视图中打开屏幕,而不是在屏幕上显示静态照片图像以要求提供个人资料照片。 之后,我不介意使用uipickercontroller捕获照片。 但是,我希望用户立即看到一些东西,而不是静态图像。

我需要在Swift的答案中使用AVFoundation还是在Objective-C中最简单的方法?

这是来自SO问题的在Swift中使用AVFoundation的一些代码,但是,我对Swift不满意,想在Objective-C中完成;

extension SelfieViewController:  AVCaptureVideoDataOutputSampleBufferDelegate{
    func setupAVCapture(){
        session.sessionPreset = AVCaptureSessionPreset640x480

        let devices = AVCaptureDevice.devices();
        // Loop through all the capture devices on this phone
        for device in devices {
            // Make sure this particular device supports video
            if (device.hasMediaType(AVMediaTypeVideo)) {
                // Finally check the position and confirm we've got the front camera
                if(device.position == AVCaptureDevicePosition.Front) {
                    captureDevice = device as? AVCaptureDevice
                    if captureDevice != nil {
                        beginSession()
                        break
                    }
                }
            }
        }
    }

    func beginSession(){
        var err : NSError? = nil
        var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
        if err != nil {
            println("error: \(err?.localizedDescription)")
        }
        if self.session.canAddInput(deviceInput){
            self.session.addInput(deviceInput)
        }

        self.videoDataOutput = AVCaptureVideoDataOutput()
        var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
        self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
        self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
        self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
        if session.canAddOutput(self.videoDataOutput){
            session.addOutput(self.videoDataOutput)
        }
        self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true

        self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
        self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill

        var rootLayer :CALayer = self.cameraView.layer
        rootLayer.masksToBounds=true
        self.previewLayer.frame = rootLayer.bounds
        rootLayer.addSublayer(self.previewLayer)
        session.startRunning()

    }

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        // do stuff here
    }

    // clean up AVCapture
    func stopCamera(){
        session.stopRunning()
    }

}

在此先感谢您的任何建议。

我只是将示例代码翻译为Obj-C。 如果您想了解更多,也许可以看一下我的项目FaceDetectionDemo 希望对您有所帮助。

- (void)setupAVCapture {
  NSError *error = nil;

  // Select device
  AVCaptureSession *session = [[AVCaptureSession alloc] init];
  if ([[UIDevice currentDevice] userInterfaceIdiom] == 
        UIUserInterfaceIdiomPhone) {
     [session setSessionPreset:AVCaptureSessionPreset640x480];
   } else {
      [session setSessionPreset:AVCaptureSessionPresetPhoto];
   }

   AVCaptureDevice *device = [self findFrontCamera];
   if (nil == device) {
     self.isUsingFrontFacingCamera = NO;
     device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
   }

   // get the input device
   AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput 
       deviceInputWithDevice:device error:&error];
   if (error) {
     session = nil;
     [self teardownAVCapture];
     if ([_delegate 
       respondsToSelector:@selector(FaceDetectionComponentError:error:)]) {
         __weak typeof(self) weakSelf = self;
         dispatch_async(dispatch_get_main_queue(), ^{
             [weakSelf.delegate FaceDetectionComponentError:weakSelf 
                error:error];
        });
    }
     return;
   }

   // add the input to the session
   if ([session canAddInput:deviceInput]) {
      [session addInput:deviceInput];
   }

    // Make a video data output
    self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];

    // We want RGBA, both CoreGraphics and OpenGL work well with 'RGBA'
    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
                                   [NSNumber 
      numberWithInt:kCMPixelFormat_32BGRA] forKey: 
       (id)kCVPixelBufferPixelFormatTypeKey];
    [self.videoDataOutput setVideoSettings:rgbOutputSettings];
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked

   self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", 
    DISPATCH_QUEUE_SERIAL);
   [self.videoDataOutput setSampleBufferDelegate:self 
    queue:self.videoDataOutputQueue];

   if ([session canAddOutput:self.videoDataOutput]) {
      [session addOutput:self.videoDataOutput];
   }

   [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] 
     setEnabled:YES];

   self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] 
     initWithSession:session];
   self.previewLayer.backgroundColor = [[UIColor blackColor] CGColor];
   self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;

   CALayer *rootLayer = [self.previewView layer];
   [rootLayer setMasksToBounds:YES];
   [self.previewLayer setFrame:[rootLayer bounds]];
   [rootLayer addSublayer:self.previewLayer];
   [session startRunning];
 }

- (AVCaptureDevice *)findFrontCamera {
   AVCaptureDevicePosition desiredPosition = AVCaptureDevicePositionFront;
   for (AVCaptureDevice *d in [AVCaptureDevice 
      devicesWithMediaType:AVMediaTypeVideo]) {
      if ([d position] == desiredPosition) {
         self.isUsingFrontFacingCamera = YES;
         return d;
      }
   }
   return nil;
}

// AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
  didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
    fromConnection:(AVCaptureConnection *)connection {

}

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM