Search code examples
iosswiftavassetwriter

Black frames while converting array of UIImages to Video


I'm trying to convert an array of UIImages to video but I have a lot of black frames in resulting file (like, 4 black frames at the beginning, and 3 good frames after them, and after that 3 black frames and 2 good frames and this pattern is repeated till the end of the video).

My code is based on this solution but I believe that the main source of problem should be in this part of code:

func build(progress: (NSProgress -> Void), success: (NSURL -> Void), failure: (NSError -> Void)) {
    //videosizes and path to temp output file
    let inputSize = CGSize(width: 568, height: 320)
    let outputSize = CGSize(width: 568, height: 320)
    var error: NSError?
    let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! NSString
    let videoOutputURL = NSURL(fileURLWithPath: documentsPath.stringByAppendingPathComponent("TempVideo.mov"))!
    NSFileManager.defaultManager().removeItemAtURL(videoOutputURL, error: nil)

    videoWriter = AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeMPEG4, error: &error)

    if let videoWriter = videoWriter {
        let videoSettings: [NSObject : AnyObject] = [
            AVVideoCodecKey  : AVVideoCodecH264,
            AVVideoWidthKey  : outputSize.width,
            AVVideoHeightKey : outputSize.height,
        ]

        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
            assetWriterInput: videoWriterInput,
            sourcePixelBufferAttributes: [
                kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32ARGB,
                kCVPixelBufferWidthKey : inputSize.width,
                kCVPixelBufferHeightKey : inputSize.height,
            ]
        )

        assert(videoWriter.canAddInput(videoWriterInput))
        videoWriter.addInput(videoWriterInput)

        if videoWriter.startWriting() {
            videoWriter.startSessionAtSourceTime(kCMTimeZero)
            assert(pixelBufferAdaptor.pixelBufferPool != nil)

            let media_queue = dispatch_queue_create("mediaInputQueue", nil)

            videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
                let fps: Int32 = 30
                let frameDuration = CMTimeMake(1, fps)
                let currentProgress = NSProgress(totalUnitCount: Int64(self.photoURLs.count))

                var frameCount: Int64 = 0

                for var i = 0; i < self.photoURLs.count - 1; i++ {

                    var currentFrame = self.photoURLs[i]
                    var lastFrameTime = CMTimeMake(Int64(i), fps)                       
                    var presentationTime = CMTimeAdd(lastFrameTime, frameDuration)

                    //this one is needed because sometimes videoWriter is not ready, and we have to wait for a while
                    while videoWriterInput.readyForMoreMediaData == false {
                        var maxDate = NSDate(timeIntervalSinceNow: 0.5)
                        var currentRunLoop = NSRunLoop()
                        currentRunLoop.runUntilDate(maxDate)

                    }

                    self.appendPixelBufferForImageAtURL(currentFrame, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime)

                    frameCount++
                    currentProgress.completedUnitCount = frameCount
                    progress(currentProgress)

                }

                videoWriterInput.markAsFinished()
                videoWriter.finishWritingWithCompletionHandler { () -> Void in
                    if error == nil {
                        success(videoOutputURL)
                    }
                }
            })
        } else {
            error = NSError(
                domain: kErrorDomain,
                code: kFailedToStartAssetWriterError,
                userInfo: ["description": "AVAssetWriter failed to start writing"]
            )
        }
    }

    if let error = error {
        failure(error)
    }
}

Obviously I'm doing something wrong but what? I think it should be here because some of the images don't have any problems with conversion, but there are two more functions for pixelbuffer:

func appendPixelBufferForImageAtURL(image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = true

    autoreleasepool {


                var pixelBuffer: Unmanaged<CVPixelBuffer>?
                let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
                    kCFAllocatorDefault,
                    pixelBufferAdaptor.pixelBufferPool,
                    &pixelBuffer
                )

                if let pixelBuffer = pixelBuffer where status == 0 {
                    let managedPixelBuffer = pixelBuffer.takeRetainedValue()

                    fillPixelBufferFromImage(image, pixelBuffer: managedPixelBuffer)

                    appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(
                        managedPixelBuffer,
                        withPresentationTime: presentationTime
                    )
                } else {
                    NSLog("error: Failed to allocate pixel buffer from pool")
                }

    }

    return appendSucceeded
}

func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBufferRef) {

    let imageData = CGDataProviderCopyData(CGImageGetDataProvider(image.CGImage))
    let lockStatus:UInt8 = UInt8(CVPixelBufferLockBaseAddress(pixelBuffer, 0))

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    let context = CGBitmapContextCreate(
        pixelData,
        Int(568),
        Int(320),
        8,
        Int(8 * 320),
        rgbColorSpace,
        bitmapInfo
    )



    var imageDataProvider = CGDataProviderCreateWithCFData(imageData)
    var imageRef = CGImageCreateWithJPEGDataProvider(imageDataProvider, nil, true, kCGRenderingIntentDefault)

    CGContextDrawImage(context, CGRectMake(0, 0, 568, 320), imageRef)

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)
}

Solution

  • So I was able to solve this by rewriting the fillPixelBufferFromImage using an example I found here: CVPixelBufferPool Error ( kCVReturnInvalidArgument/-6661)

    Here's the Swift 2 - Xcode 7 GM solution that's working for me:

     public func build(progress: (NSProgress -> Void), success: (NSURL -> Void), failure: (NSError -> Void)) {
        let inputSize = CGSize(width: 600, height: 600)
        let outputSize = CGSize(width: 600, height: 600)
        var error: NSError?
    
        let fileManager = NSFileManager.defaultManager()
        let urls = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
        guard let documentDirectory: NSURL = urls.first else {
          fatalError("documentDir Error")
        }
    
        let videoOutputURL = documentDirectory.URLByAppendingPathComponent("AssembledVideo.mov")
    
        if NSFileManager.defaultManager().fileExistsAtPath(videoOutputURL.path!) {
          do {
            try NSFileManager.defaultManager().removeItemAtPath(videoOutputURL.path!)
          }catch{
            fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
          }
        }
    
        guard let videoWriter = try? AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie) else{
          fatalError("AVAssetWriter error")
        }
    
        let outputSettings = [
          AVVideoCodecKey  : AVVideoCodecH264,
          AVVideoWidthKey  : NSNumber(float: Float(outputSize.width)),
          AVVideoHeightKey : NSNumber(float: Float(outputSize.height)),
        ]
    
        guard videoWriter.canApplyOutputSettings(outputSettings, forMediaType: AVMediaTypeVideo) else {
          fatalError("Negative : Can't apply the Output settings...")
        }
    
        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
    
        let sourcePixelBufferAttributesDictionary = [
          kCVPixelBufferPixelFormatTypeKey as String: NSNumber(unsignedInt: kCVPixelFormatType_32ARGB),
          kCVPixelBufferWidthKey as String: NSNumber(float: Float(inputSize.width)),
          kCVPixelBufferHeightKey as String: NSNumber(float: Float(inputSize.height)),
        ]
    
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
          assetWriterInput: videoWriterInput,
          sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary
        )
    
        assert(videoWriter.canAddInput(videoWriterInput))
        videoWriter.addInput(videoWriterInput)
    
        if videoWriter.startWriting() {
          videoWriter.startSessionAtSourceTime(kCMTimeZero)
          assert(pixelBufferAdaptor.pixelBufferPool != nil)
    
          let media_queue = dispatch_queue_create("mediaInputQueue", nil)
    
          videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
            let fps: Int32 = 1
            let frameDuration = CMTimeMake(1, fps)
            let currentProgress = NSProgress(totalUnitCount: Int64(self.photoURLs.count))
    
            var frameCount: Int64 = 0
            var remainingPhotoURLs = [String](self.photoURLs)
    
            while (videoWriterInput.readyForMoreMediaData && !remainingPhotoURLs.isEmpty) {
              let nextPhotoURL = remainingPhotoURLs.removeAtIndex(0)
              let lastFrameTime = CMTimeMake(frameCount, fps)
              let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
    
    
              if !self.appendPixelBufferForImageAtURL(nextPhotoURL, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
                error = NSError(domain: kErrorDomain, code: kFailedToAppendPixelBufferError,
                  userInfo: [
                    "description": "AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer",
                    "rawError": videoWriter.error ?? "(none)"
                  ])
    
                break
              }
    
              frameCount++
    
              currentProgress.completedUnitCount = frameCount
              progress(currentProgress)
            }
    
            videoWriterInput.markAsFinished()
            videoWriter.finishWritingWithCompletionHandler { () -> Void in
              if error == nil {
                success(videoOutputURL)
              }
            }
          })
        } else {
          error = NSError(domain: kErrorDomain, code: kFailedToStartAssetWriterError,
            userInfo: ["description": "AVAssetWriter failed to start writing"]
          )
        }
    
        if let error = error {
          failure(error)
        }
      }
    
      public func appendPixelBufferForImageAtURL(urlString: String, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
        var appendSucceeded = true
    
        autoreleasepool {
          if let image = UIImage(contentsOfFile: urlString) {
    
              var pixelBuffer: CVPixelBuffer? = nil
              let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
    
              if let pixelBuffer = pixelBuffer where status == 0 {
                let managedPixelBuffer = pixelBuffer
    
                fillPixelBufferFromImage(image.CGImage!, pixelBuffer: managedPixelBuffer)
    
                appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime)
    
              } else {
                NSLog("error: Failed to allocate pixel buffer from pool")
              }
           }
        }
    
        return appendSucceeded
      }
    
    
      func fillPixelBufferFromImage(image: CGImage, pixelBuffer: CVPixelBuffer){
        let frameSize = CGSizeMake(CGFloat(CGImageGetWidth(image)), CGFloat(CGImageGetHeight(image)))
        CVPixelBufferLockBaseAddress(pixelBuffer, 0)
        let data = CVPixelBufferGetBaseAddress(pixelBuffer)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGBitmapContextCreate(data, Int(frameSize.width), Int(frameSize.height), 8, CVPixelBufferGetBytesPerRow(pixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)
        CGContextDrawImage(context, CGRectMake(0, 0, CGFloat(CGImageGetWidth(image)), CGFloat(CGImageGetHeight(image))), image)
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)
      }
    

    Working project files here: https://github.com/justinlevi/imagesToVideo