Search code examples
iosswiftvideoavfoundationpreview

Scale of AVCaptureVideoPreviewLayer


I am currently working on a QR Scan View in my Swift application. I want to center the VideoPreview in the middle of my view.

The view looks like this:

Image of the View I'm working on

The view (white) is called ScanView and I want to make the image preview the same size as the ScanView and center it in it.

Code snippet:

Setup of my VideoDevice

Thanks for every help!


Solution

  • here is a working solution:

    import UIKit
    import AVFoundation
    
    class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate {
      @IBOutlet weak var innerView: UIView!
      var session: AVCaptureSession?
      var input: AVCaptureDeviceInput?
      var previewLayer: AVCaptureVideoPreviewLayer?
        
      override func viewDidLoad() {
        super.viewDidLoad()
        createSession()
      }
        
      override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        self.previewLayer?.frame.size = self.innerView.frame.size
      }
        
      private func createSession() {
        do {
          self.session = AVCaptureSession()
          if let device = AVCaptureDevice.default(for: AVMediaType.video) {
            self.input = try AVCaptureDeviceInput(device: device)
            self.session?.addInput(self.input!)
            self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
            self.previewLayer?.frame.size = self.innerView.frame.size
            self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
            self.innerView.layer.addSublayer(self.previewLayer!)
    
            //______ 1. solution with Video camera ______//
            let videoOutput = AVCaptureVideoDataOutput()
            videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
            self.session?.canAddOutput(videoOutput)
            self.session?.addOutput(videoOutput)
            self.session?.startRunning()
    
            //______ 2. solution with QR code ______//
            let videoOutput = AVCaptureMetadataOutput()
            videoOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            self.session?.canAddOutput(videoOutput)
            self.session?.addOutput(videoOutput)
            // explanation here: https://stackoverflow.com/a/35642852/2450755
            videoOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
            self.session?.startRunning()
          }
        } catch _ {
        }
      }
    
      //MARK: AVCaptureVideoDataOutputSampleBufferDelegate
      public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
            // awesome stuff here
        }
      }
    
      //MARK: AVCaptureMetadataOutputObjectsDelegate
      func setMetadataObjectsDelegate(_ objectsDelegate: AVCaptureMetadataOutputObjectsDelegate?, queue objectsCallbackQueue: DispatchQueue?) {
        
      }
    }
    

    requirements:

    • setup: Privacy - Camera Usage Description
    • innerView must be initialized, I did by Storyboard with the following constraints:

    enter image description here

    here the result:

    enter image description here