簡體   English   中英

Swift的視頻緩沖器輸出

[英]Video Buffer Output with Swift

我的目標是獲取視頻緩沖區並最終將其轉換為NSData,但我不了解如何正確訪問緩沖區。 我具有captureOutput函數,但是如果轉換緩沖區,則不能成功,並且我不確定是否實際上在緩沖區中收集了任何東西。 這全都是使用快速代碼的,我已經找到了一些使用Objective-C的示例,但是我不能很好地理解Obj-c代碼以弄清楚它。

var captureDevice : AVCaptureDevice?
var videoCaptureOutput = AVCaptureVideoDataOutput()
var bounds: CGRect = UIScreen.mainScreen().bounds
let captureSession = AVCaptureSession()
var captureConnection: AVCaptureMovieFileOutput?


override func viewDidLoad() {
    super.viewDidLoad()
    captureSession.sessionPreset = AVCaptureSessionPreset640x480
    let devices = AVCaptureDevice.devices()

    for device in devices {
        if (device.hasMediaType(AVMediaTypeVideo)) {
            if device.position == AVCaptureDevicePosition.Back {
                captureDevice = device as? AVCaptureDevice
                if captureDevice != nil {
                    beginSession()
                }
            }
        }
    }
}

func beginSession() {
    var screenWidth:CGFloat = bounds.size.width
    var screenHeight:CGFloat = bounds.size.height
    var err : NSError? = nil
    captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err)!)

    if err != nil {
        println("Error: \(err?.localizedDescription)")
    }

    videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA]
    videoCaptureOutput.alwaysDiscardsLateVideoFrames = true


    captureSession.addOutput(videoCaptureOutput)


    videoCaptureOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL))
    if captureSession.canAddOutput(self.videoCaptureOutput) {
        captureSession.addOutput(self.videoCaptureOutput)
    }

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
      // I think this is where I can get the buffer info.

    }

AVCaptureVideoDataOutputSampleBufferDelegate方法中, captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)可以獲取緩沖區信息

let formatDescription: CMFormatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)

CVPixelBufferLockBaseAddress(imageBuffer, 0)
var imagePointer: UnsafeMutablePointer<Void> = CVPixelBufferGetBaseAddress(imageBuffer)

let bufferSize: (width: Int, height: Int) = (CVPixelBufferGetHeight(imageBuffer), CVPixelBufferGetWidth(imageBuffer))

println("Buffer Size: \(bufferSize.width):\(bufferSize.height)")

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM