I made a live translation app that identifies an object and translates it using the user's camera. It works just fine on my iPhone 6s and doesn't crash in any of the simulators, but when I run it on an iPhone 6, it crashes as soon I try to segue to the camera feed. Apple also says it crashes on the iPad as well.
Do certain devices just not support Vision API or is something wrong with my code?
import UIKit
import AVKit
import Vision
var lang = ""
var lang2 = ""
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate {
@IBAction func screenshotB(_ sender: Any) {
//screenshot camera screen view
}
@IBOutlet weak var screenshotBOutlet: UIButton!
@IBOutlet weak var swirlyGuy: UIActivityIndicatorView!
@IBOutlet weak var title1: UILabel!
@IBOutlet weak var settingsButtonOutlet: UIButton!
@IBOutlet weak var launchScreen: UIViewX!
@IBOutlet weak var launchScreenLogo: UIImageView!
func stopSwirlyGuy(){
swirlyGuy.stopAnimating()
}
let identifierLabel: UILabel = {
let label = UILabel()
label.backgroundColor = UIColor(red: 0, green: 0, blue:0, alpha: 0.4)
label.textColor = .white
label.textAlignment = .center
label.translatesAutoresizingMaskIntoConstraints = false
return label
}()
@IBAction func prepareForUnwind (segue:UIStoryboardSegue) {
}
override func viewDidLoad() {
super.viewDidLoad()
launchScreen.alpha = 1
launchScreenLogo.alpha = 1
swirlyGuy.startAnimating()
// start up the camera
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .hd4K3840x2160
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
captureSession.addInput(input)
captureSession.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(previewLayer)
previewLayer.frame = view.frame
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
captureSession.addOutput(dataOutput)
setupIdentifierConfidenceLabel()
setupSettingsButton()
setupTitle()
setupSwirlyGuy()
setupScreenshot()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
UIView.animate(withDuration: 1.5) {
self.launchScreen.alpha = 0
self.launchScreenLogo.alpha = 0
}
}
fileprivate func setupSettingsButton() {
view.addSubview(settingsButtonOutlet)
}
fileprivate func setupScreenshot() {
view.addSubview(screenshotBOutlet)
}
fileprivate func setupSwirlyGuy() {
view.addSubview(swirlyGuy)
}
fileprivate func setupTitle() {
view.addSubview(title1)
}
fileprivate func setupIdentifierConfidenceLabel() {
view.addSubview(identifierLabel)
identifierLabel.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
identifierLabel.leftAnchor.constraint(equalTo: view.leftAnchor).isActive = true
identifierLabel.rightAnchor.constraint(equalTo: view.rightAnchor).isActive = true
identifierLabel.heightAnchor.constraint(equalToConstant: 100).isActive = true
identifierLabel.numberOfLines = 0
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// print("Camera was able to capture a frame:", Date())
guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
// model
guard let model = try? VNCoreMLModel(for: Resnet50().model) else { return }
let request = VNCoreMLRequest(model: model) { (finishedReq, err) in
//perhaps check the err
// print(finishedReq.results)
guard let results = finishedReq.results as? [VNClassificationObservation] else { return }
guard let firstObservation = results.first else { return }
print(firstObservation.identifier, firstObservation.confidence)
let x = (firstObservation.confidence)
let y = (x * 10000).rounded() / 10000
let z = (firstObservation.identifier)
let s = (self.translateSpanish(object1: firstObservation.identifier))
let f = (self.translateFrench(object1: firstObservation.identifier))
// var lang = ""
// var lang2 = ""
if language == "English" {
lang = z
}
else if language == "Spanish" {
lang = s
}
else {
lang = f
}
if language2 == "Spanish" {
lang2 = s
}
else if language2 == "English" {
lang2 = z
}
else {
lang2 = f
}
DispatchQueue.main.async {
self.identifierLabel.text = "\(lang)" + " = " + "\(lang2) \n \(y * 100)% accuracy"
self.stopSwirlyGuy()
}
}
try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
}
//Translation fucntions omitted for brevity
This is the code for the view controller that segues into the main screen where the camera feed and Vision processing take place.
import UIKit
class FirstLaunchViewController: UIViewController {
@IBOutlet weak var title1: UILabelX!
@IBOutlet weak var logo1: UIImageView!
@IBOutlet weak var description1: UILabel!
@IBOutlet weak var buttonOutlet: UIButtonX!
@IBOutlet weak var initialBackground: UIViewX!
@IBOutlet weak var initialLogo: UIImageView!
@IBAction func toVC(_ sender: Any) {
UserDefaults.standard.set(false, forKey: "name")
performSegue(withIdentifier: "toMain", sender: self)
}
override func viewDidLoad() {
super.viewDidLoad()
initialLogo.alpha = 1
initialBackground.alpha = 1
title1.alpha = 0
logo1.alpha = 0
description1.alpha = 0
buttonOutlet.alpha = 0
// Do any additional setup after loading the view.
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
UIView.animate(withDuration: 1.5, animations: {
self.initialLogo.alpha = 0
self.initialBackground.alpha = 0
}) { (true) in
self.initialBackgroundGone()
}
}
func initialBackgroundGone() {
UIView.animate(withDuration: 1.5, animations: {
self.title1.alpha = 1
}) { (true) in
self.showBackgroundAgain()
}
}
func showBackgroundAgain() {
UIView.animate(withDuration: 1.3, animations: {
self.logo1.alpha = 1
}) { (true) in
self.showTitle()
}
}
func showTitle() {
UIView.animate(withDuration: 1.5, animations: {
self.description1.alpha = 1
}) { (true) in
self.showEverythingElse()
}
}
func showEverythingElse() {
UIView.animate(withDuration: 3.5) {
self.buttonOutlet.alpha = 1
}
}
}
This is a lot of code but I think your issue comes from the video preset your are using as iPhone 6 doesn't have support for 4K video recording.
When setting the session preset you should test that it is supported by all the targeted devices:
if captureSession.canSetSessionPreset(.hd4K3840x2160) {
captureSession.sessionPreset = .hd4K3840x2160
} else {
captureSession.sessionPreset = .high // or any other preset that suits your needs
}