簡體   English   中英

黑屏使用截圖並在Swift4中共享

[英]Black screen using screenshot and share in Swift4

我正在嘗試獲取屏幕快照並與Swift4共享,但是我所得到的只是黑屏圖片-無法弄清楚。 我嘗試了幾種方法,但是沒有什么可以使它變得更好。 我使用的代碼記錄在下面。 我真的很感謝您的幫助。 我認為它涉及層次,但是當我剛開始學習時可以提供幫助嗎?

碼:

   import UIKit
import AVFoundation
import Vision

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

  let session = AVCaptureSession()

  var previewLayer: AVCaptureVideoPreviewLayer!

  let captureQueue = DispatchQueue(label: "captureQueue")

  var gradientLayer: CAGradientLayer!

  var visionRequests = [VNRequest]()

    var recognitionThreshold : Float = 0.25

    @IBOutlet weak var thresholdStackView: UIStackView!
    @IBOutlet weak var threshholdLabel: UILabel!
    @IBOutlet weak var threshholdSlider: UISlider!
    @IBOutlet weak var share: UIButton!


    @IBOutlet weak var previewView: UIView!
  @IBOutlet weak var resultView: UILabel!

  override func viewDidLoad() {
    super.viewDidLoad()

    guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
      fatalError("No video camera available")
    }
    do {

      previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
      previewView.layer.addSublayer(previewLayer)

      gradientLayer = CAGradientLayer()
      gradientLayer.colors = [
        UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.7).cgColor,
        UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.0).cgColor,
      ]
      gradientLayer.locations = [0.0, 0.3]
      self.previewView.layer.addSublayer(gradientLayer)

      let cameraInput = try AVCaptureDeviceInput(device: camera)

      let videoOutput = AVCaptureVideoDataOutput()
      videoOutput.setSampleBufferDelegate(self, queue: captureQueue)
      videoOutput.alwaysDiscardsLateVideoFrames = true
      videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
      session.sessionPreset = .high

      session.addInput(cameraInput)
      session.addOutput(videoOutput)

      let conn = videoOutput.connection(with: .video)
      conn?.videoOrientation = .portrait

      session.startRunning()

      guard let resNet50Model = try? VNCoreMLModel(for: food().model) else {
        fatalError("Could not load model")
      }

      let classificationRequest = VNCoreMLRequest(model: resNet50Model, completionHandler: handleClassifications)
      classificationRequest.imageCropAndScaleOption = .centerCrop
      visionRequests = [classificationRequest]
    } catch {
      fatalError(error.localizedDescription)
    }

    updateThreshholdLabel()
  }

    func updateThreshholdLabel () {
        self.threshholdLabel.text = "Threshold: " + String(format: "%.2f", recognitionThreshold)
    }

  override func viewDidLayoutSubviews() {
    super.viewDidLayoutSubviews()
    previewLayer.frame = self.previewView.bounds;
    gradientLayer.frame = self.previewView.bounds;

    let orientation: UIDeviceOrientation = UIDevice.current.orientation;
    switch (orientation) {
    case .portrait:
        previewLayer?.connection?.videoOrientation = .portrait
    case .landscapeRight:
        previewLayer?.connection?.videoOrientation = .landscapeLeft
    case .landscapeLeft:
        previewLayer?.connection?.videoOrientation = .landscapeRight
    case .portraitUpsideDown:
        previewLayer?.connection?.videoOrientation = .portraitUpsideDown
    default:
        previewLayer?.connection?.videoOrientation = .portrait
    }
  }

  func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
      return
    }

    connection.videoOrientation = .portrait

    var requestOptions:[VNImageOption: Any] = [:]

    if let cameraIntrinsicData = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) {
      requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
    }


    let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .upMirrored, options: requestOptions)
    do {
      try imageRequestHandler.perform(self.visionRequests)
    } catch {
      print(error)
    }
  }

    @IBAction func userTapped(sender: Any) {
        self.thresholdStackView.isHidden = !self.thresholdStackView.isHidden
    }

    @IBAction func share2(_ sender: Any) {
        //Set the default sharing message.
        let message = "Hello!"
        let link = NSURL(string: "http://url.com/")
        // Screenshot:
        UIGraphicsBeginImageContextWithOptions(self.view.frame.size, true, 0.0)
        self.view.drawHierarchy(in: self.view.frame, afterScreenUpdates: false)
        let img = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()

        //Set the link, message, image to share.
        if let link = link, let img = img {
            let objectsToShare = [message,link,img] as [Any]
            let activityVC = UIActivityViewController(activityItems: objectsToShare, applicationActivities: nil)
            activityVC.excludedActivityTypes = [UIActivityType.airDrop, UIActivityType.addToReadingList]
            self.present(activityVC, animated: true, completion: nil)

        }

    }

    @IBAction func sliderValueChanged(slider: UISlider) {
        self.recognitionThreshold = slider.value
        updateThreshholdLabel()
    }

  func handleClassifications(request: VNRequest, error: Error?) {
    if let theError = error {
      print("Error: \(theError.localizedDescription)")
      return
    }
    guard let observations = request.results else {
      print("No results")
      return
    }

    let classifications = observations[0...4] // top 4 results
        .flatMap({ $0 as? VNClassificationObservation })
        .flatMap({$0.confidence > recognitionThreshold ? $0 : nil})
      .map({ "\($0.identifier) \(String(format:"%.2f", $0.confidence))" })
        .joined(separator: "\n")

    DispatchQueue.main.async {
        self.resultView.text = classifications
    }

  }
}

嘗試更換:

UIGraphicsBeginImageContextWithOptions(self.view.frame.size, true, 0.0)

有:

UIGraphicsBeginImageContextWithOptions(self.view.bounds.size, false, UIScreen.main.scale)

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM