繁体   English   中英

为什么我没有缓冲视频帧?

[英]Why am I not getting buffered video frames?

试图监视视频提要的帧缓冲区,但是未调用我的委托方法。 这是相关代码:

using System;
using UIKit;
using Cirrious.MvvmCross.Binding.BindingContext;
using Cirrious.MvvmCross.Binding;
using Cirrious.MvvmCross.ViewModels;
using Cirrious.MvvmCross.Touch.Views;
using ACS.FundRaising.ViewModels;
using CoreFoundation;
using Foundation;
using AVFoundation;
using CoreAnimation;
using CoreGraphics;
using CoreMedia;
using CoreVideo;
using System.Threading.Tasks;
using System.Drawing;

namespace ACS.FundRaising.Controllers
{
    [MvxViewFor(typeof(CheckScannerViewModel))]
    public partial class CaptureCheckViewController : BaseViewController, IMvxModalTouchView, IAVCaptureVideoDataOutputSampleBufferDelegate
    {
        private AVCaptureSession _session  = new AVCaptureSession();
        private AVCaptureDevice _captureDevice;
        private AVCaptureStillImageOutput _stillImageOutput;
        private AVCaptureVideoDataOutput _videoDataOutput;
        private UIView _previewView;
        private AVCaptureVideoPreviewLayer _previewLayer;
        private DispatchQueue _videoDataOutputQueue = new DispatchQueue("VideoDataOutputQueue");
        private bool _processingImage = true;
        private bool _doneFocusing = false;

        public CaptureCheckViewController (IntPtr handle) : base (handle)
        {
        }

        public override UIInterfaceOrientationMask GetSupportedInterfaceOrientations ()
        {
            return UIInterfaceOrientationMask.LandscapeRight;
        }

        override public void ViewWillAppear(bool animated)
        {
            base.ViewWillAppear(animated);
            StartCamera();
        }

        public void StartCamera()
        {
            // set _captureDevice to proper video device on phone
        }

        public void BeginSession() {
            NSError err = null;

            // Add input to session
            AVCaptureDeviceInput deviceInput = new AVCaptureDeviceInput(_captureDevice, out err);
            if (err != null)
            {
                System.Diagnostics.Debug.WriteLine(err.Description);
                return;
            }

            if (_session.CanAddInput(deviceInput))
            {
                _session.AddInput(deviceInput);
            }
            else
            {
                System.Diagnostics.Debug.WriteLine("unable to add input");
                return;
            }


            // Add preview output to session
            _stillImageOutput = new AVCaptureStillImageOutput();
            _videoDataOutput = new AVCaptureVideoDataOutput();

            if (_stillImageOutput == null && _videoDataOutput == null)
            {
                System.Diagnostics.Debug.WriteLine("error creating output connections");
                return;
            }

            _videoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            _videoDataOutput.WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
            _videoDataOutput.SetSampleBufferDelegateQueue((IAVCaptureVideoDataOutputSampleBufferDelegate)this, _videoDataOutputQueue);

            if (_session.CanAddOutput(_videoDataOutput))
            {
                _session.AddOutput(_videoDataOutput);
            }
            else
            {
                System.Diagnostics.Debug.WriteLine("cannot add output");
                return;
            }

            _stillImageOutput.OutputSettings= new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG);

            if (_session.CanAddOutput(_stillImageOutput))
            {
                _session.AddOutput(_stillImageOutput);
            }
            else
            {
                System.Diagnostics.Debug.WriteLine("cannot add output");
                return;
            }

            _videoDataOutput.ConnectionFromMediaType(AVMediaType.Video).Enabled = true;
            _previewLayer = new AVCaptureVideoPreviewLayer(_session);
            if (_previewLayer == null) 
            {
                System.Diagnostics.Debug.WriteLine("cannot create preview layer");
                return;
            }

            _previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect;
            _previewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.LandscapeRight;

            CALayer rootLayer = _previewView.Layer;
            rootLayer.MasksToBounds = true;
            _previewLayer.Frame = rootLayer.Bounds;
            rootLayer.AddSublayer(_previewLayer);

            _session.StartRunning();
            _processingImage = false;
        }

        public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            System.Diagnostics.Debug.WriteLine("frame captured...");
            if (!_doneFocusing || _processingImage)
            {
                System.Diagnostics.Debug.WriteLine("not focused or already processing image");
                return;
            }

            _processingImage = true;

            // custom library provides this extension
            var image = new UIImage().ImageFromSampleBuffer(sampleBuffer);
            processImage(image);
        }

    }
}

我想念什么? 为什么不调用OutputOutputSampleBuffer?

您必须通过调用_videoDataOutput.SetSampleBufferDelegate(this, DispatchQueue.CurrentQueue)来分配_videoDataOutput.SetSampleBufferDelegate(this, DispatchQueue.CurrentQueue)

我需要导出该方法,以便AVFoundation可以使用它。

[Export("captureOutput:didOutputSampleBuffer:fromConnection:")]
 public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
    { // do work here }

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM