Search code examples
iosswiftuiimageface-detection

Delete white background from UIImage in swift 5


I am trying to save 2 copies of a photo that are taken from the camera, one is the photo itself and the other one its the photo + emojis hiding faces of people that appears there.

Right now I only get the original photo + a image with white background and the emoji faces, but not with the photo behind.

this is the code I use to put the emojis over the faces:

private func detectFace(in image: CVPixelBuffer) {
        let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
            DispatchQueue.main.async {
                if let results = request.results as? [VNFaceObservation] {
                    self.handleFaceDetectionResults(results)
                } else {
                    self.clearDrawings()
                }
            }
        })
        let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
        try? imageRequestHandler.perform([faceDetectionRequest])
    }
    
    private func handleFaceDetectionResults(_ observedFaces: [VNFaceObservation]) {
        
        self.clearDrawings()
        
        let facesBoundingBoxes: [CAShapeLayer] = observedFaces.flatMap({ (observedFace: VNFaceObservation) -> [CAShapeLayer] in
            let faceBoundingBoxOnScreen = self.previewLayer.layerRectConverted(fromMetadataOutputRect: observedFace.boundingBox)
            
            let image = UIImage(named: "happy_emoji.png")
            let imageView = UIImageView(image: image!)
            imageView.frame = faceBoundingBoxOnScreen
            showCamera.addSubview(imageView)
            let newDrawings = [CAShapeLayer]()
            
            return newDrawings
        })
        
        self.drawings = facesBoundingBoxes
    }
    
    private func clearDrawings() {
        showCamera.subviews.forEach({ $0.removeFromSuperview() })
    }

and this is the code i use to save the images:

@IBAction func onPhotoTaken(_ sender: Any) {
        
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
        self.photoOutput.capturePhoto(with: settings, delegate: self)
        
        
        UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, false, 0.0)
        if let context = UIGraphicsGetCurrentContext() {
            showCamera.layer.render(in: context)
        }
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        
        let topImage = outputImage
        let bottomImage = imageReciber

        let size = CGSize(width: topImage!.size.width, height: topImage!.size.height + bottomImage.size.height)
        UIGraphicsBeginImageContextWithOptions(size, false, 0.0)

        topImage!.draw(in: CGRect(x: 0, y: 0, width: size.width, height: topImage!.size.height))
        bottomImage.draw(in: CGRect(x: 0, y: topImage!.size.height, width: size.width, height: bottomImage.size.height))

        let newImage:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
        UIGraphicsEndImageContext()
        
        
        UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
    }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let imageData = photo.fileDataRepresentation()
            else { return }
        
        let image = UIImage(data: imageData)
        
        showCamera.image = image
        imageReciber = image!
        UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
    }

I tried different solutions to delete the white background (or black, depends if i put false or true on the "render" part. but i always get the emoji image with white background.

Please help me to get the emoji image with no white/black background and over the photo taken.

My full code here is:

import UIKit
import AVFoundation
import Vision


class cameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate {
    
    private let captureSession = AVCaptureSession()
    private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
    private let videoDataOutput = AVCaptureVideoDataOutput()
    private var drawings: [CAShapeLayer] = []
    private let photoOutput = AVCapturePhotoOutput()
    
    var imageReciber = UIImage()
    
    @IBOutlet weak var showCamera: UIImageView!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        self.addCameraInput()
        self.showCameraFeed()
        self.getCameraFrames()
        self.captureSession.startRunning()
    }
    
    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        self.previewLayer.frame = self.showCamera.frame
    }
    
    func captureOutput(
        _ output: AVCaptureOutput,
        didOutput sampleBuffer: CMSampleBuffer,
        from connection: AVCaptureConnection) {
        
        guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            debugPrint("unable to get image from sample buffer")
            return
        }
        self.detectFace(in: frame)
    }
    
    private func addCameraInput() {
        guard let device = AVCaptureDevice.DiscoverySession(
            deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
            mediaType: .video,
            position: .back).devices.first else {
                fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
        }
        let cameraInput = try! AVCaptureDeviceInput(device: device)
        self.captureSession.addInput(cameraInput)
        captureSession.addOutput(photoOutput)
    }
    
    private func showCameraFeed() {
        self.previewLayer.videoGravity = .resizeAspectFill
        self.showCamera.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = self.showCamera.frame
    }
    
    private func getCameraFrames() {
        self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
        self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
        self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
        self.captureSession.addOutput(self.videoDataOutput)
        guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
            connection.isVideoOrientationSupported else { return }
        connection.videoOrientation = .portrait
    }
    
    private func detectFace(in image: CVPixelBuffer) {
        let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
            DispatchQueue.main.async {
                if let results = request.results as? [VNFaceObservation] {
                    self.handleFaceDetectionResults(results)
                } else {
                    self.clearDrawings()
                }
            }
        })
        let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
        try? imageRequestHandler.perform([faceDetectionRequest])
    }
    
    private func handleFaceDetectionResults(_ observedFaces: [VNFaceObservation]) {
        
        self.clearDrawings()
        
        let facesBoundingBoxes: [CAShapeLayer] = observedFaces.flatMap({ (observedFace: VNFaceObservation) -> [CAShapeLayer] in
            let faceBoundingBoxOnScreen = self.previewLayer.layerRectConverted(fromMetadataOutputRect: observedFace.boundingBox)
            
            let image = UIImage(named: "happy_emoji.png")
            let imageView = UIImageView(image: image!)
            imageView.frame = faceBoundingBoxOnScreen
            showCamera.addSubview(imageView)
            let newDrawings = [CAShapeLayer]()
            
            return newDrawings
        })
        
        self.drawings = facesBoundingBoxes
    }
    
    private func clearDrawings() {
        showCamera.subviews.forEach({ $0.removeFromSuperview() })
    }
    
    
    @IBAction func onPhotoTaken(_ sender: Any) {
        
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
        self.photoOutput.capturePhoto(with: settings, delegate: self)
        
        
        UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, false, 0.0)
        if let context = UIGraphicsGetCurrentContext() {
            showCamera.layer.render(in: context)
        }
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        
        let topImage = outputImage
        let bottomImage = imageReciber

        let size = CGSize(width: topImage!.size.width, height: topImage!.size.height + bottomImage.size.height)
        UIGraphicsBeginImageContextWithOptions(size, false, 0.0)

        topImage!.draw(in: CGRect(x: 0, y: 0, width: size.width, height: topImage!.size.height))
        bottomImage.draw(in: CGRect(x: 0, y: topImage!.size.height, width: size.width, height: bottomImage.size.height))

        let newImage:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
        UIGraphicsEndImageContext()
        
        
        UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
    }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let imageData = photo.fileDataRepresentation()
            else { return }
        
        let image = UIImage(data: imageData)
        
        //Se añade la imagen capturada desde el Buffer a imageView y se le da un borde algo redondeado para que quede bien.
        showCamera.image = image
        imageReciber = image!
        UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
    }
   
    

}

Thank you in advance.


Solution

  • After looking more calmly into the problem I´ve discovered how to solve my problem.

    The problem is that I was trying to print the image to a file before getting it from the video stream, to solve this I created a new function that executes after the image is taken and now everything works flawlessly.

        func saveEmoji() {
            showCamera.backgroundColor = UIColor.clear
            UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, true, 0.0)
            if let context = UIGraphicsGetCurrentContext() {
                showCamera.layer.render(in: context)
            }
            let outputImage = UIGraphicsGetImageFromCurrentImageContext()
            UIGraphicsEndImageContext()
            
            var topImage = outputImage
            
            UIImageWriteToSavedPhotosAlbum(topImage!, nil, nil, nil)
            topImage = nil
        }
    

    The function is called after the first image is saved:

        func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
            
            guard let imageData = photo.fileDataRepresentation()
                else { return }
            
            let image = UIImage(data: imageData)
            
            showCamera.image = image
            imageReciber = image!
            UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
            saveEmoji()
        }