I'm looking to make a barcode scanner app. As I'm new to Swift and Xcode I've managed to get help from other stack-overflow articles to create a page where I can scan a barcode. My issue is I don't want full screen, I want to add Label on top and Button at bottom and In Center QRCode screen. I have Uploaded Screen as well.
But I'm Getting Full Screen. How can I achieve This Functionality. Here's My Code
@IBAction func scanButtonTapped(_ sender: UIButton) {
scanQRCodeTapped()
}
func scanQRCodeTapped(){
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.view.backgroundColor = UIColor.black
self.captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else {
print("No Device Found")
return
}
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
self.failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
self.failed()
return
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.view.layer.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.view.layer.addSublayer(self.previewLayer)
self.captureSession.startRunning()
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
login(emailID: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
You can design the screen as per your design in the storyboard.
Like this:
The Gray color area is a UIView
in which we will add the previewLayer
for scanner.
ViewController code should look like this:
import UIKit
import AVKit
class QRCodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var titleLabel: UILabel!
@IBOutlet weak var scannerView: UIView!
@IBOutlet weak var scanButton: UIButton!
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
@IBAction func scanButtonAction(_ sender: UIButton) {
scanQRCodeTapped()
}
func scanQRCodeTapped() {
self.scannerView.backgroundColor = UIColor.black
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else {
print("No Device Found")
return
}
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
self.failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
self.failed()
return
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.scannerView.layer.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.scannerView.layer.addSublayer(self.previewLayer)
self.captureSession.startRunning()
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
login(emailID: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
}
The output will look like this when you click on the scan button: