Im trying to extract some code out of my view controller to clean it up an d maintain as a DRY as possible code. It works fine when as follows:
class InitialRegistrationViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate, NVActivityIndicatorViewable {
var session: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if Platform.isPhone {
session = AVCaptureSession()
session!.sessionPreset = AVCaptureSessionPresetPhoto
var frontCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let availableCameraDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in availableCameraDevices as! [AVCaptureDevice] {
if device.position == .front {
frontCamera = device
}
}
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: frontCamera)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && session!.canAddInput(input) {
session!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session!.canAddOutput(stillImageOutput) {
session!.addOutput(stillImageOutput)
session!.startRunning()
}
}
}
}
func capturePhoto() {
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent)
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
self.profileImage.image = image
}
})
}
}
}
but when i extract to a helper like below:
import UIKit import AVFoundation
class ProfilePhoto {
var session: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
func startSession() {
if Platform.isPhone {
session = AVCaptureSession()
session!.sessionPreset = AVCaptureSessionPresetPhoto
var frontCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let availableCameraDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in availableCameraDevices as! [AVCaptureDevice] {
if device.position == .front {
frontCamera = device
}
}
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: frontCamera)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && session!.canAddInput(input) {
session!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session!.canAddOutput(stillImageOutput) {
session!.addOutput(stillImageOutput)
session!.startRunning()
}
}
}
}
func capture() -> UIImage {
var image: UIImage!
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
if sampleBuffer != nil {
let cgImageRef = self.setBufferData(sampleBuffer: sampleBuffer!)
image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
}
})
}
return image
}
func setBufferData(sampleBuffer: CMSampleBuffer ) -> CGImage {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent)
return cgImageRef!
}
}
where in InitialRegistrationViewController i call:
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
profilePhoto.startSession()
}
func capturePhoto() {
profileImage.image = profilePhoto.capture()
}
I get fatal error: unexpectedly found nil while unwrapping an Optional value
when returning the image in profilePhoto.capture().
I dont understand how the session works as im new to ios but i think its because the session is ending(?) when i try to capture the image? Any insight would be great. Thanks
UPDATE: I upvotes the answer given as its close enough, below is what worked for me.
func capture(completion: @escaping (UIImage?) -> Void) {
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
if sampleBuffer != nil {
let cgImageRef = self.setBufferData(sampleBuffer: sampleBuffer!)
let image: UIImage! = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
completion(image)
} else {
completion(nil)
}
})
} else {
completion(nil)
}
}
Your capture() method makes an asynchronous call to get the UIImage, therefore when it returns [immediately] the value returned is always nil
.
The article @dan suggested shows a callback pattern that can be used to return the image to the caller, make sure you understand this mechanism before proceeding.
func capture(result: (image: UIImage?) -> Void) -> UIImage
{
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo)
{
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler:
{ (sampleBuffer, error) -> Void in
if sampleBuffer != nil
{
let cgImageRef = self.setBufferData(sampleBuffer: sampleBuffer!)
var image: UIImage! = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
result(image: image)
}
else
result(image: nil)
})
}
else
result(image: nil)
}
And to call it, you could use
capture(){(image: UIImage?) -> Void in
//use the image that was just retrieved
}
You've now made your capture() method asynchronous, and report its return value via a callback.