Search code examples
iosswiftavfoundationavcapturesessionavcapturedevice

How do I implement camera changing from front to back camera


Below is the code for the camera section, I tried adding in a boolean to detect when the front camera is activated but I receive an error.

import UIKit
import AVFoundation 
class MainCameraCollectionViewCell: UICollectionViewCell {
    @IBOutlet weak var myView: UIView!
    var captureSession = AVCaptureSession()
    var backCamera: AVCaptureDevice?
    var frontCamera: AVCaptureDevice?
    var currentCamera: AVCaptureDevice?
    var photoOutPut: AVCapturePhotoOutput?
    var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
    var image: UIImage?
    var usingFrontCamera = false   
    override func awakeFromNib() {
        super.awakeFromNib()
        setupCaptureSession()
        setupDevice()
        setupInput()
        setupPreviewLayer()
        startRunningCaptureSession()
    }
    func setupCaptureSession(){
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
    }
    func setupDevice(usingFrontCamera:Bool = false){
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
        let devices = deviceDiscoverySession.devices   
        for device in devices {
            if device.position == AVCaptureDevice.Position.back {
                backCamera = device
            } else if device.position == AVCaptureDevice.Position.front {
                frontCamera = device
            }
        }
        if usingFrontCamera {
            currentCamera = frontCamera
        } else {
            currentCamera = backCamera
        }
    }
    func setupInput() {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutPut = AVCapturePhotoOutput()
            photoOutPut?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
            captureSession.addOutput(photoOutPut!)
        } catch {
            print(error)
        }
    }
    func setupPreviewLayer(){
        cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
        cameraPreviewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
        self.layer.insertSublayer(cameraPreviewLayer!, at: 0)
    }
    func startRunningCaptureSession(){
        captureSession.startRunning()
    }
    @IBAction func cameraButton_TouchUpInside(_ sender: Any) {
        let settings = AVCapturePhotoSettings()
        photoOutPut?.capturePhoto(with: settings, delegate: self as! AVCapturePhotoCaptureDelegate)
    }
    //Flip to front and back camera
    @IBAction func FlipThe_camera(_ sender: UIButton) {
        print("Flip touched")
        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                captureSession.removeInput(input)
            }
        }
        usingFrontCamera = !usingFrontCamera
        setupCaptureSession()
        setupDevice(usingFrontCamera:usingFrontCamera)
        setupInput()
        setupPreviewLayer()
        startRunningCaptureSession()
    }
}

collectionViewAttemptOfPagingAttempt002[2908:834004] * Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '* -[AVCaptureSession addOutput:] Cannot add output to capture session -> -> because more than one output of the same type is unsupported'


Solution

  • Your problem is that you are not checking if you can add input and/or output before adding them to your capture session. Btw you don't need to setup the preview layer again when switching your camera and you need only one captureDevice:

    if captureSession.canAddInput(captureDeviceInput) {
        captureSession.addInput(captureDeviceInput)
    } 
    

    and

    if captureSession.canAddOutput(photoOutPut) {
        captureSession.addOutput(photoOutPut)
    }
    

    Your CollectionViewCell code should look something like this:

    class MainCameraCollectionViewCell: UICollectionViewCell, AVCapturePhotoCaptureDelegate {
        private var captureSession = AVCaptureSession()
        private var sessionQueue: DispatchQueue!
        private var captureDevice: AVCaptureDevice!
        private  var photoOutPut: AVCapturePhotoOutput!
        private var cameraPreviewLayer: AVCaptureVideoPreviewLayer!
        var image: UIImage?
        var usingFrontCamera = false
        override func awakeFromNib() {
            super.awakeFromNib()
            setupCaptureSession()
            setupDevice()
            setupInput()
            setupPreviewLayer()
            startRunningCaptureSession()
        }
        func setupCaptureSession(){
            captureSession.sessionPreset = .photo
            sessionQueue = DispatchQueue(label: "session queue")
        }
        func setupDevice(usingFrontCamera: Bool = false){
            sessionQueue.async {
                let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .unspecified)
                let devices = deviceDiscoverySession.devices
                for device in devices {
                    if usingFrontCamera && device.position == .front {
                        self.captureDevice = device
                    } else if device.position == .back {
                        self.captureDevice = device
                    }
                }
            }
        }
        func setupInput() {
            sessionQueue.async {
                do {
                    let captureDeviceInput = try AVCaptureDeviceInput(device: self.captureDevice)
                    if self.captureSession.canAddInput(captureDeviceInput) {
                        self.captureSession.addInput(captureDeviceInput)
                    }
                    self.photoOutPut = AVCapturePhotoOutput()
                    self.photoOutPut.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
                    if self.captureSession.canAddOutput(self.photoOutPut) {
                        self.captureSession.addOutput(self.photoOutPut)
                    }
                } catch {
                    print(error)
                }
            }
        }
        func setupPreviewLayer() {
            cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
            cameraPreviewLayer.videoGravity = .resizeAspectFill
            cameraPreviewLayer.connection?.videoOrientation = .portrait
            cameraPreviewLayer.frame = UIScreen.main.bounds
            layer.insertSublayer(cameraPreviewLayer, at: 0)
        }
        func startRunningCaptureSession() {
            captureSession.startRunning()
        }
        @IBAction func cameraButton_TouchUpInside(_ sender: Any) {
            let settings = AVCapturePhotoSettings()
            photoOutPut.capturePhoto(with: settings, delegate: self)
        }
        //Flip to front and back camera
        @IBAction func FlipThe_camera(_ sender: UIButton) {
            captureSession.beginConfiguration()
            if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
                for input in inputs {
                    captureSession.removeInput(input)
                }
            }
            usingFrontCamera = !usingFrontCamera
            setupCaptureSession()
            setupDevice(usingFrontCamera: usingFrontCamera)
            setupInput()
            captureSession.commitConfiguration()
            startRunningCaptureSession()
        }
    }
    

    Sample project