Search code examples
iosiphoneswiftavaudioplayer

Adding audio to iphone app xcode 7.3


I am very new to Swift and Xcode.

I am trying to design a simple camera App. I have successfully integrated camera, except I want to play a sound when the camera takes a picture. Im unsure about how to go about this.

Below is my code and I keep getting an error when invoking

Error : (EXC_BAD_ACCESS(code=1, address=0x38)

in the line audioPlayer.play()

import UIKit
import AVFoundation

class ViewController: UIViewController {

var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()


    var kranz = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("Din Daa Daa ; George Kranz", ofType: "mp3")!)
    var audioPlayer = AVAudioPlayer()


    override func viewDidLoad() {
        super.viewDidLoad()
        do {let audioPlayer = try AVAudioPlayer(contentsOfURL: kranz, fileTypeHint: nil)
        audioPlayer.prepareToPlay()

        }
       catch{
           print("error")
        }
    }



@IBOutlet weak var CameraView: UIView!

override func viewWillAppear(animated: Bool) {

    let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
    for device in devices {
        if device.position == AVCaptureDevicePosition.Back {

            do {
                let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
                if captureSession.canAddInput(input){
                    captureSession.addInput(input)
                    sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

                    if captureSession.canAddOutput(sessionOutput){
                        captureSession.addOutput(sessionOutput)
                        captureSession.startRunning()
                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                        previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
                        CameraView.layer.addSublayer(previewLayer)

                        previewLayer.position = CGPoint(x: self.CameraView.frame.width / 2, y: self.CameraView.frame.height / 2)
                        previewLayer.bounds = CameraView.frame

                    }
                }

            }
            catch{
                print("ERror")
            }
        }
    }
}


@IBAction func TakePhoto(sender: UIButton) {
    audioPlayer.play()
    if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){

        sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
            buffer, error in

            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
            UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)

            })
    }
}


}

Solution

  • import UIKit import AVFoundation
    
     class ViewController: UIViewController {
    
       var captureSession = AVCaptureSession()
       var sessionOutput = AVCaptureStillImageOutput()
       var previewLayer = AVCaptureVideoPreviewLayer()
       var audioPlayer = AVAudioPlayer()
    
       override viewDidLoad(){
          super.viewDidLoad()
    
              prepareMusic("LazerNoise",type : "mp3" )      
             } 
    
       func prepareMusic(name :String! , type :String!){
    
            let path = NSBundle.mainBundle().pathForResource(name, ofType: type)
            let soundURL = NSURL(fileURLWithPath: path!)
        do{
           try audioPlayer = AVAudioPlayer(contentsOfURL: soundURL)
           audioPlayer.prepareToPlay()
        }
      catch let err as NSError
       {
        print(err.debugDescription)
      }
    
     }
    
     func playNstop(){
    
        if audioPlayer.playing{
    
          audioPlayer.stop()
       }else{
    
        audioPlayer.play()
       }
     }
    
    
    
    @IBOutlet weak var CameraView: UIView!
    
     override func viewWillAppear(animated: Bool) {
    
      let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
       for device in devices {
        if device.position == AVCaptureDevicePosition.Back {
    
            do {
                let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
                if captureSession.canAddInput(input){
                    captureSession.addInput(input)
                    sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    
                    if captureSession.canAddOutput(sessionOutput){
                        captureSession.addOutput(sessionOutput)
                        captureSession.startRunning()
                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                        previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
                        CameraView.layer.addSublayer(previewLayer)
    
                        previewLayer.position = CGPoint(x: self.CameraView.frame.width / 2, y: self.CameraView.frame.height / 2)
                        previewLayer.bounds = CameraView.frame
    
                    }
                }
    
            }
            catch{
                print("ERror")
            }
          }
        }
      }
    
    
    @IBAction func TakePhoto(sender: UIButton) {
    
     audioPlayer.play()
     if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
            buffer, error in
    
            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
            UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
    
            })
         }
       }
     }