I'm using AVCaptureSession to create a camera and I'm trying to take a picture with it. Here is the code that loads the camera...
func reloadCamera() {
cameraView.backgroundColor = UIColor.clearColor()
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetHigh
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
if (camera == false) {
let videoDevices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in videoDevices {
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.Front {
captureDevice = device
break
} else {
captureDevice = backCamera
}
}
} else {
captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
}
do {
let input = try AVCaptureDeviceInput(device: captureDevice)
if captureSession!.canAddInput(input) {
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.Portrait
previewLayer?.frame = cameraView.bounds
cameraView.layer.addSublayer(previewLayer!)
captureSession!.startRunning()
}
}
} catch let error as NSError {
// Handle any errors
print(error)
}
}
Here is how I take a photo...
func didPressTakePhoto(){
toggleFlash()
if let videoConnection = stillImageOutput?.connectionWithMediaType(AVMediaTypeVideo){
videoConnection.videoOrientation = (previewLayer?.connection.videoOrientation)!
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
self.capturedImage = UIImage(data: imageData)
if self.camera == true {
self.capturedImage = UIImage(CGImage: self.capturedImage.CGImage!, scale: 1.0, orientation: UIImageOrientation.Right)
} else {
self.capturedImage = UIImage(CGImage: self.capturedImage.CGImage!, scale: 1.0, orientation: UIImageOrientation.LeftMirrored)
}
self.tempImageView.image = self.capturedImage
UIImageWriteToSavedPhotosAlbum(self.capturedImage, nil, nil, nil);
self.tempImageView.hidden = false
self.goButton.hidden = false
self.cameraView.hidden = true
self.removeImageButton.hidden = false
self.captureButton.hidden = true
self.flashChanger.hidden = true
self.switchCameraButton.hidden = true
}
})
}
}
But what's happening is the picture that is taken is as large as the entire screen (like Snapchat), but I only want it to be as big as the UIView I'm taking it from. Tell me if I you need any more information. Thank you!
First of all, you are the one who is setting the capture session's preset to AVCaptureSessionPresetHigh. If you don't need that, don't do that; use a smaller-size preset. For example, use AVCaptureSessionPreset640x480 to get a smaller size.
Second, no matter what the size of the resulting photo may be, reducing it to the size you need, and displaying it at that size, is entirely up to you. Ultimately, it's just a matter of the size and content mode of your image view, though it is always better to reduce the image size as well, in order to avoid wasting memory (as you would be doing by displaying an image that is significantly larger then what the user is shown).