簡體   English   中英

func captureOutput永遠不會被調用

[英]func captureOutput is never called

添加喜歡為我實時記錄的每個幀添加過濾器,並在UIImageView顯示過濾后的圖像,如果有人可以幫助的話,那就太好了。 但是從不調用captureoutput,這是我的代碼。

class Measurement: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {
    @IBOutlet weak var cameraPreview: UIView!
    @IBOutlet weak var imageView: UIImageView!

    override func viewDidLoad() {
            super.viewDidLoad()

            setupCameraSession()
                toggleTorch(on: true)

        }

        override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)

            view.layer.addSublayer(previewLayer)

            cameraSession.startRunning()
        }

        lazy var cameraSession: AVCaptureSession = {
            let s = AVCaptureSession()

            s.sessionPreset = AVCaptureSession.Preset.low
            return s
        }()

        lazy var previewLayer: AVCaptureVideoPreviewLayer = {
            let preview =  AVCaptureVideoPreviewLayer(session: self.cameraSession)
            preview.position = CGPoint(x:182,y: 485)
            preview.videoGravity = AVLayerVideoGravity.resizeAspectFill
            preview.connection?.videoOrientation = AVCaptureVideoOrientation.portrait

            preview.bounds = imageView.bounds
            //preview.position = CGPoint(x:self.view.bounds.midX,y: self.view.bounds.midY)
            imageView.layer.addSublayer(preview)

            return preview
        }()

        func toggleTorch(on: Bool) {
            guard let device = AVCaptureDevice.default(for: .video) else { return }

            if device.hasTorch {
                do {
                    try device.lockForConfiguration()

                    if on == true {
                        device.torchMode = .on
                    } else {
                        device.torchMode = .off
                    }

                    device.unlockForConfiguration()
                } catch {
                    print("Torch could not be used")
                }
            } else {
                print("Torch is not available")
            }
        }
        func setupCameraSession() {
            let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)

            do {
                let deviceInput = try AVCaptureDeviceInput(device: captureDevice!)

                cameraSession.beginConfiguration()

                if (cameraSession.canAddInput(deviceInput) == true) {
                    cameraSession.addInput(deviceInput)
                    print("Processing Data.")
                }

                let dataOutput = AVCaptureVideoDataOutput()
                dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)] as [String : AnyObject]
                dataOutput.alwaysDiscardsLateVideoFrames = true

                 print("Processing Data.")
                if (cameraSession.canAddOutput(dataOutput) == true) {
                    cameraSession.addOutput(dataOutput)
                    print("Processing Data.")
                }

                cameraSession.commitConfiguration()

                let queue = DispatchQueue(label: "com.invasivecode.videoQueue")
                dataOutput.setSampleBufferDelegate(self, queue: queue)

            }
            catch let error as NSError {
                print("\(error), \(error.localizedDescription)")
            }
        }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {


        print("Processing Data.")


        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return  }
        //let chromaKeyFilter = colorCubeFilterForChromaKey(hueAngle: 120)
        let ciImage = CIImage(cvPixelBuffer: imageBuffer)

        let context = CIContext()
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return  }

        let image = UIImage(cgImage: cgImage)
        if let chromaKeyFilter = CIFilter(name: "CISepiaTone") {
            let beginImage = CIImage(image: image)
            chromaKeyFilter.setValue(beginImage, forKey: kCIInputImageKey)
            chromaKeyFilter.setValue(0.5, forKey: kCIInputIntensityKey)

            if let output = chromaKeyFilter.outputImage {
                if let cgimg = context.createCGImage(output, from: output.extent) {
                    let processedImage = UIImage(cgImage: cgimg)
                    // do something interesting with the processed image
                    imageView.image = processedImage
                }
            }
        }
    }








func captureOutput(_ captureOutput: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        // Here you can count how many frames are dopped
    }
func startCapture() {
    print("\(self.classForCoder)/" + #function)
    if cameraSession.isRunning {

        print("already running")
        return
    }
    cameraSession.startRunning()
    toggleTorch(on: true)


}

您需要設置代表

dataOutput.sampleBufferDelegate = self

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM