Search code examples
iosswiftdelegatesbufferavfoundation

didOutputSampleBuffer delegate not called


didOutputSampleBuffer function in my code was not called. I don't know why it happened. Here's the code:

import UIKit
import AVFoundation
import Accelerate

class ViewController: UIViewController {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
    super.viewDidAppear(animated)
    captureSession?.startRunning()
    //setupCameraSession()
}

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    //captureSession?.startRunning()
    setupCameraSession()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func setupCameraSession() {
    // Session
    self.captureSession = AVCaptureSession()
    captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
    // Capture device
    let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    var deviceInput = AVCaptureDeviceInput()

    do {
        deviceInput = try AVCaptureDeviceInput(device: inputDevice)
    } catch let error as NSError {
        print(error)
    }
    if captureSession!.canAddInput(deviceInput) {
        captureSession!.addInput(deviceInput)
    }
    // Preview

    self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.customPreviewLayer!.frame = camView.bounds
    self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
    self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
    camView.layer.addSublayer(self.customPreviewLayer!)
    print("Cam layer added")

    self.dataOutput = AVCaptureVideoDataOutput()
    self.dataOutput!.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    dataOutput!.alwaysDiscardsLateVideoFrames = true
    if captureSession!.canAddOutput(dataOutput) {
        captureSession!.addOutput(dataOutput)
    }
    captureSession!.commitConfiguration()
    let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
    let delegate = VideoDelegate()
    dataOutput!.setSampleBufferDelegate(delegate, queue: queue)
}




 func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
    CVPixelBufferLockBaseAddress(imageBuffer, 0)
    // For the iOS the luma is contained in full plane (8-bit)
    let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
    let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
    let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
    let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
    let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
    let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)!
    let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
    dispatch_sync(dispatch_get_main_queue(), {() -> Void in
        self.customPreviewLayer!.contents = dstImageFilter as AnyObject
    })

}


}

And here is my VideoDelegate code:

import Foundation
import AVFoundation
import UIKit

// Video Delegate
class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{

    func captureOutput(captureOutput: AVCaptureOutput!,
        didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){
            print("hihi")

    }


    func captureOutput(captureOutput: AVCaptureOutput!,
        didDropSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){

            print("LOL")
    }


}

Why does"t my delegate get called and how to fix it? I've checked similar question on stack overflow but but i can't find a method to solve this. Please help.


Solution

  • I found the problem of my error! It's because the delegate that was being called has to be created in the same view controller. here is the modified code:

    import UIKit
    import AVFoundation
    import Accelerate
    
    var customPreviewLayer: AVCaptureVideoPreviewLayer?
    
    class ViewController: UIViewController,     AVCaptureVideoDataOutputSampleBufferDelegate {
    
    var captureSession: AVCaptureSession?
    var dataOutput: AVCaptureVideoDataOutput?
    //var customPreviewLayer: AVCaptureVideoPreviewLayer?
    
    @IBOutlet weak var camView: UIView!
    
    override func viewWillAppear(animated: Bool) {
        super.viewDidAppear(animated)
        //setupCameraSession()
    }
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.
        //captureSession?.startRunning()
        setupCameraSession()
        self.captureSession?.startRunning()
    }
    
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }
    
    func setupCameraSession() {
        // Session
        self.captureSession = AVCaptureSession()
        self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
        // Capture device
        let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        var deviceInput = AVCaptureDeviceInput()
        // Device input
        //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
        do {
            deviceInput = try AVCaptureDeviceInput(device: inputDevice)
    
        } catch let error as NSError {
            // Handle errors
            print(error)
        }
        if self.captureSession!.canAddInput(deviceInput) {
            self.captureSession!.addInput(deviceInput)
        }
        // Preview
        customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        customPreviewLayer!.frame = camView.bounds
        customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
        customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
        self.camView.layer.addSublayer(customPreviewLayer!)
        print("Cam layer added")
    
        self.dataOutput = AVCaptureVideoDataOutput()
        self.dataOutput!.videoSettings = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
        ]
    
        self.dataOutput!.alwaysDiscardsLateVideoFrames = true
        if self.captureSession!.canAddOutput(dataOutput) {
            self.captureSession!.addOutput(dataOutput)
        }
        self.captureSession!.commitConfiguration()
        let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
        //let delegate = VideoDelegate()
        self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
    }
    
    
    
    
     func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
        print("buffered")
        let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
        CVPixelBufferLockBaseAddress(imageBuffer, 0)
        let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
        let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
        let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
        let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
        let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
        let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic
    
        let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
        dispatch_sync(dispatch_get_main_queue(), {() -> Void in
            customPreviewLayer!.contents = dstImageFilter as AnyObject
        })
    }
    
    
    
    }