简体   繁体   中英

Crop CGRect from UIImage taken from camera

I have a view controller which takes a photo with a circular view in the center. After taking a photo, I need to crop the CGRect with which I created the circular view. I need to crop the rectangle, not the circle. I tried https://stackoverflow.com/a/57258806/12411655 and many other solutions, but it doesn't crop CGRect that I need. How do I convert the CGRect in the view's coordinates to UIImage's coordinates?

class CircularCameraViewController: UIViewController {
    
       var captureSession: AVCaptureSession!
       var capturePhotoOutput: AVCapturePhotoOutput!
       var cropRect: CGRect!
    
    public lazy var shutterButton: ShutterButton = {
        let button = ShutterButton()
        button.translatesAutoresizingMaskIntoConstraints = false
        button.addTarget(self, action: #selector(capturePhoto), for: .touchUpInside)
        return button
    }()
    
    private lazy var cancelButton: UIButton = {
        let button = UIButton()
        button.setTitle("Cancel", for: .normal)
        button.translatesAutoresizingMaskIntoConstraints = false
        button.addTarget(self, action: #selector(dismissCamera), for: .touchUpInside)
        return button
    }()
    
    private lazy var flashButton: UIButton = {
           let image = UIImage(named: "flash", in: Bundle(for: ScannerViewController.self), compatibleWith: nil)?.withRenderingMode(.alwaysTemplate)
           let button = UIButton()
           button.setImage(image, for: .normal)
           button.translatesAutoresizingMaskIntoConstraints = false
           button.addTarget(self, action: #selector(toggleFlash), for: .touchUpInside)
           button.tintColor = .white
           
           return button
       }()
    
       override func viewDidLoad() {
           super.viewDidLoad()
            setupCamera()
            setupPhotoOutput()
            setupViews()
            setupConstraints()
            captureSession.startRunning()
       }
    
    override func viewDidAppear(_ animated: Bool) {
      super.viewDidAppear(animated)
      
    }
    
    override func viewWillDisappear(_ animated: Bool) {
      captureSession.stopRunning()
    }

    
    private func setupCamera() {
        
      let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
      var input: AVCaptureDeviceInput
      do {
        input = try AVCaptureDeviceInput(device: captureDevice!)
      } catch {
        fatalError("Error configuring capture device: \(error)");
      }
      captureSession = AVCaptureSession()
      captureSession.addInput(input)
      
      // Setup the preview view.
      let videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
      videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
      videoPreviewLayer.frame = view.layer.bounds
      view.layer.addSublayer(videoPreviewLayer)
        
        let camPreviewBounds = view.bounds
        cropRect = CGRect(
            x: camPreviewBounds.minX + (camPreviewBounds.width - 150) * 0.5,
            y: camPreviewBounds.minY + (camPreviewBounds.height - 150) * 0.5,
            width: 150,
            height: 150
        )
        
        let path = UIBezierPath(roundedRect: camPreviewBounds, cornerRadius: 0)
        path.append(UIBezierPath(ovalIn: cropRect))

        let layer = CAShapeLayer()
        layer.path = path.cgPath
        layer.fillRule = CAShapeLayerFillRule.evenOdd;
        layer.fillColor = UIColor.black.cgColor
        layer.opacity = 0.5;

        view.layer.addSublayer(layer)
    }
    
    private func setupViews() {
        view.addSubview(shutterButton)
        view.addSubview(flashButton)
        view.addSubview(cancelButton)

    }
    
    private func setupConstraints() {
        var cancelButtonConstraints = [NSLayoutConstraint]()
        var shutterButtonConstraints = [NSLayoutConstraint]()
        var flashConstraints = [NSLayoutConstraint]()

        shutterButtonConstraints = [
            shutterButton.centerXAnchor.constraint(equalTo: view.centerXAnchor),
            shutterButton.widthAnchor.constraint(equalToConstant: 65.0),
            shutterButton.heightAnchor.constraint(equalToConstant: 65.0)
        ]
        
        flashConstraints = [
            flashButton.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 24.0),
            flashButton.topAnchor.constraint(equalTo: view.topAnchor, constant: 30)
        ]
        
        if #available(iOS 11.0, *) {
               cancelButtonConstraints = [
                   cancelButton.leftAnchor.constraint(equalTo: view.safeAreaLayoutGuide.leftAnchor, constant: 24.0),
                   view.safeAreaLayoutGuide.bottomAnchor.constraint(equalTo: cancelButton.bottomAnchor, constant: (65.0 / 2) - 10.0)
            ]
               let shutterButtonBottomConstraint = view.safeAreaLayoutGuide.bottomAnchor.constraint(equalTo: shutterButton.bottomAnchor, constant: 8.0)
               shutterButtonConstraints.append(shutterButtonBottomConstraint)
                   
               } else {
                   cancelButtonConstraints = [
                       cancelButton.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 24.0),
                       view.bottomAnchor.constraint(equalTo: cancelButton.bottomAnchor, constant: (65.0 / 2) - 10.0)
                   ]
                   let shutterButtonBottomConstraint = view.bottomAnchor.constraint(equalTo: shutterButton.bottomAnchor, constant: 8.0)
                   shutterButtonConstraints.append(shutterButtonBottomConstraint)
               
               }
               NSLayoutConstraint.activate(cancelButtonConstraints + shutterButtonConstraints + flashConstraints)
    }
    
    private func setupPhotoOutput() {
      capturePhotoOutput = AVCapturePhotoOutput()
      capturePhotoOutput.isHighResolutionCaptureEnabled = true
      captureSession.addOutput(capturePhotoOutput!)
    }
    
    @objc func dismissCamera() {
        self.dismiss(animated: true, completion: nil)
    }
    
    @objc private func toggleFlash() {
        if let avDevice = AVCaptureDevice.default(for: AVMediaType.video) {
            if (avDevice.hasTorch) {
                do {
                    try avDevice.lockForConfiguration()
                } catch {
                    print("aaaa")
                }

                if avDevice.isTorchActive {
                    avDevice.torchMode = AVCaptureDevice.TorchMode.off
                } else {
                    avDevice.torchMode = AVCaptureDevice.TorchMode.on
                }
            }
            // unlock your device
            avDevice.unlockForConfiguration()
        }
    }

}


extension CircularCameraViewController : AVCapturePhotoCaptureDelegate {
    
    @objc private func capturePhoto() {
          let photoSettings = AVCapturePhotoSettings()
          photoSettings.isAutoStillImageStabilizationEnabled = true
          photoSettings.isHighResolutionPhotoEnabled = true
          photoSettings.flashMode = .auto
          // Set ourselves as the delegate for `capturePhoto`.
          capturePhotoOutput?.capturePhoto(with: photoSettings, delegate: self)
    }
    
    @available(iOS 11.0, *)
    func photoOutput(_ output: AVCapturePhotoOutput,
                     didFinishProcessingPhoto photo: AVCapturePhoto,
                     error: Error?) {
          guard error == nil else {
            fatalError("Failed to capture photo: \(String(describing: error))")
          }
          guard let imageData = photo.fileDataRepresentation() else {
            fatalError("Failed to convert pixel buffer")
          }
          guard let image = UIImage(data: imageData) else {
            fatalError("Failed to convert image data to UIImage")
          }
        
          guard let croppedImg = image.cropToRect(rect: cropRect) else {
            fatalError("Failed to crop image")
          }
          
        UIImageWriteToSavedPhotosAlbum(croppedImg, nil, nil, nil);

    }
    
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
        guard error == nil, let photoSample = photoSampleBuffer else {
          fatalError("Failed to capture photo: \(String(describing: error))")
        }
        guard let imgData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSample, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
          fatalError("Failed to get image data: \(String(describing: error))")
        }
        guard let image = UIImage(data: imgData) else {
        fatalError("Failed to convert image data to UIImage: \(String(describing: error))")
    }
        
        
  }
}

UIImage extension:

func cropToRect(rect: CGRect!) -> UIImage? {

            let scaledRect = CGRect(x: rect.origin.x * self.scale, y: rect.origin.y * self.scale, width: rect.size.width * self.scale, height: rect.size.height * self.scale);


            guard let imageRef: CGImage = self.cgImage?.cropping(to:scaledRect)
            else {
                return nil
            }

            let croppedImage: UIImage = UIImage(cgImage: imageRef, scale: self.scale, orientation: self.imageOrientation)
            return croppedImage
        }
    

When cropping an image, you need to scale the "crop rect" from its size relative to the image size.

Also, when capturing from the camera, you need to take .imageOrientation into account.

Try changing your UIImage extension to this:

extension UIImage {
    func cropToRect(rect: CGRect, viewSize: CGSize) -> UIImage? {
        
        var cr = rect

        switch self.imageOrientation {
        case .right, .rightMirrored, .left, .leftMirrored:
            // rotate the crop rect if needed
            cr.origin.x = rect.origin.y
            cr.origin.y = rect.origin.x
            cr.size.width = rect.size.height
            cr.size.height = rect.size.width
            
        default:
            break
        }
        
        let imageViewScale = max(self.size.width / viewSize.width,
                                 self.size.height / viewSize.height)

        // scale the crop rect
        let cropZone = CGRect(x:cr.origin.x * imageViewScale,
                              y:cr.origin.y * imageViewScale,
                              width:cr.size.width * imageViewScale,
                              height:cr.size.height * imageViewScale)
        
        // Perform cropping in Core Graphics
        guard let cutImageRef: CGImage = self.cgImage?.cropping(to:cropZone)
            else {
                return nil
        }
        
        // Return image to UIImage
        let croppedImage: UIImage = UIImage(cgImage: cutImageRef, scale: self.scale, orientation: self.imageOrientation)
        return croppedImage
    }
}

and change your call in photoOutput() to:

    guard let croppedImg = image.cropToRect(rect: cropRect, viewSize: view.frame.size) else {
        fatalError("Failed to crop image")
    }

Since your code is using the full view, that should work fine. If you change it to use a different sized view as your videoPreviewLayer then use that size instead of view.frame.size .

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM