Search code examples
iosobjective-cxcodecodecmovie

iOS MOV rendering producing odd results (obj-c)


I'm creating an extension to an AS3 AIR application that will take in pixel data as ARGB32 and I want to render it to a .mov container using the H264 codec and then send it to the users camera roll. I'm currently running a test harness which takes an image from a local path (on my desktop) as a .jpeg, gets the pixel data out and renders it 50 times over into a short clip. The problem I'm having is when I render it on 400x200, it works fine however when I use an image of any other size, the frames come out very weirdly (images splayed diagonally, sometimes with a little black bar at the bottom right as if there are missing pixels).

I have a feeling its todo with the height and width as those are the only things that change from what I can see. Here is my code below:

   - (void)initFunction:(NSString *)context width:(int)widthData height:(int)heightData fps:(int)fpsData
     {
        NSLog(@"Initializing...");

        error = nil;
        frameCount = 0;                                     //Used for itterating
        fps = fpsData;                                      //FPS from AS3
        width = widthData;                                  //Width from AS3
        height = heightData;                                //Height from AS3
        numberOfSecondsPerFrame = 1.0f/(float)fps;          //Seconds per frame
        frameDuration = fps * numberOfSecondsPerFrame;      //Frame showing time
        imageSize = CGSizeMake(width, height);              //imageSize from AS3

        // Setup and remove pre-existing "render.mov"
        fileMgr = [NSFileManager defaultManager];
        documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
        videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"render.mov"];

        if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
        {
            NSLog(@"This is the first VS render on this device: %@", [error localizedDescription]);
        } 
        else
        {
            NSLog(@"Removed previous render file in save path.");
        }

        NSLog(@"Starting render.");

        videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
        NSParameterAssert(videoWriter);

        videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                         AVVideoCodecH264, AVVideoCodecKey,
                         [NSNumber numberWithInt:width], AVVideoWidthKey,
                         [NSNumber numberWithInt:height], AVVideoHeightKey,
                         nil];

        videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

        NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys
                                          [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

        adaptor = [AVAssetWriterInputPixelBufferAdaptor
                   assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                   sourcePixelBufferAttributes:nil];

        NSParameterAssert(videoWriterInput);
        NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
        videoWriterInput.expectsMediaDataInRealTime = YES;
        [videoWriter addInput:videoWriterInput];

        //Start a session:
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];

        buffer = NULL;

        NSLog(@"**************************************************");
    }

    -(void) sendToCameraRoll:(NSString *)videoPath {
        UISaveVideoAtPathToSavedPhotosAlbum(videoPath, nil, NULL, nil);
    }

    //Take in URL from AIR
    - (void)addFrameFromBytes:(CGImageRef *)FrameBytes {

        UIImage *image = [UIImage imageWithCGImage:*FrameBytes];
        CGImageRef cgImage = [image CGImage];

        buffer = [self pixelBufferFromCGImage:cgImage];

        append_ok = NO;
        int j = 0;
        while (!append_ok) {
            if (adaptor.assetWriterInput.readyForMoreMediaData)  {

                frameCount = frameCount+1;
                NSLog(@"Processing video frame %d",frameCount);

                frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                [NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
                if(!append_ok){
                    error = videoWriter.error;
                    if(error!=nil) {
                        NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                    }
                }
            } else {
                printf("Adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
            }
            j++;
        }

        if (!append_ok) {
            printf("Error appending image %d times %d\n, with error.", frameCount, j);
        }
    }

    //Take in Path from AIR
    - (void)addFrameFromURL:(NSString *)FramePath {
        UIImage *image = [UIImage imageWithContentsOfFile:FramePath];
        CGImageRef cgImage = [image CGImage];

        buffer = [self pixelBufferFromCGImage:cgImage];

        append_ok = NO;
        int j = 0;
        while (!append_ok) {
            if (adaptor.assetWriterInput.readyForMoreMediaData)  {

                frameCount = frameCount+1;
                NSLog(@"Processing video frame %d",frameCount);

                frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                [NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
                if(!append_ok){
                    error = videoWriter.error;
                    if(error!=nil) {
                        NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                    }
                }
            } else {
                printf("Adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
            }
            j++;
        }

        if (!append_ok) {
            printf("Error appending image %d times %d\n, with error.", frameCount, j);
        }
    }

    - (void)saveVideoWithTrack:(NSString *)AudioTrackPath {

        NSLog(@"**************************************************");

        //Finish the session:
        [videoWriterInput markAsFinished];

        [videoWriter finishWritingWithCompletionHandler:^{
            NSLog(@"Render complete."); ///Used to satisfy 64-bit devices
        }];

        sleep(1); //Required to avoid crash due to overload

        NSLog(@"Render complete.");

        //Audio File Addition
        NSLog(@"Singing Tracks...");
        AVMutableComposition *mixComposition = [AVMutableComposition composition];
        //NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
        //NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
        NSURL    *audio_inputFileUrl = [NSURL fileURLWithPath:AudioTrackPath];

        //Get the final video path
        NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

        //Create the final video amd export as MOV
        NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_output.mov"];
        NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

        if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) {
            [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
        }

        CMTime nextClipStartTime = kCMTimeZero;

        AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:video_inputFileUrl options:nil];
        CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);

        AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

        //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
        AVURLAsset *audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
        CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
        AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];

        AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];

        //Supported file types include...
        //NSLog(@"Supported file types are %@", [_assetExport supportedFileTypes]);

        _assetExport.outputFileType = @"com.apple.quicktime-movie";
        _assetExport.outputURL = outputFileUrl;

        [_assetExport exportAsynchronouslyWithCompletionHandler:^(){
            NSString *exported = [[NSString alloc] initWithString:[outputFileUrl path]];
            [self sendToCameraRoll:exported];
        }];

        NSLog(@"Saved to camera roll. %@", outputFilePath);

    }

    - (void)saveVideo {

        NSLog(@"**************************************************");

        //Finish the session:
        [videoWriterInput markAsFinished];

        [videoWriter finishWritingWithCompletionHandler:^{
            NSLog(@"Finished"); //Used to satisfy 64-bit systems.
        }];

        sleep(1); //Required to avoid system crash

        NSLog(@"Render complete.");

        [self sendToCameraRoll:videoOutputPath];

        NSLog(@"Saved to camera roll. %@", videoOutputPath);

    }

    - (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
        NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                                 nil];
        CVPixelBufferRef pxbuffer = NULL;

        CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                              width,
                                              height,
                                              kCVPixelFormatType_32ARGB,
                                              ( CFDictionaryRef) options,
                                              &pxbuffer);
        if (status != kCVReturnSuccess){
            NSLog(@"Failed to create pixel buffer");
        }

        CVPixelBufferLockBaseAddress(pxbuffer, 0);
        void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
        size_t bytesPerRow = 4*width; //BytesPerRow 4 bytes each pixel.
        CGContextRef imageContext = CGBitmapContextCreate(pxdata, width,
                                                          height, 8, bytesPerRow, rgbColorSpace,
                                                          kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipFirst);
        CGContextDrawImage(imageContext, CGRectMake(0, 0, width, height), image);
        CGColorSpaceRelease(rgbColorSpace);
        CGContextRelease(imageContext);

        CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
        NSLog(@"bPR: %zd", pxbuffer);

        return pxbuffer;

    }

    - (void)viewDidLoad {
        [super viewDidLoad];

        //[self initFunction:nil width:1138 height:640 fps:25];
        //[self initFunction:nil width:910 height:512 fps:25];
        //[self initFunction:nil width:400 height:200 fps:25];
        [self initFunction:nil width:50 height:50 fps:25];

        //find url...
        //get pixel data...

        //loop through writing it from the pixel data
        for (int i = 1; i <= 50; i++) {
            //NSString *string = [NSString stringWithFormat:@"/Users/Lewis/Desktop/sequence/%06d.jpg", i];
            //NSString *string = [NSString stringWithFormat:@"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image1.jpg"];
            //NSString *string = [NSString stringWithFormat:@"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image2.jpg"];
            //NSString *string = [NSString stringWithFormat:@"/Users/Lewis/Desktop/mini.jpg"];
            NSString *string = [NSString stringWithFormat:@"/Users/Lewis/Desktop/50x50.jpg"];
            NSLog(@"%@", string);
            [self addFrameFromURL:string];
        }

        [self saveVideo];

    }

Edit: All images work if they are 400 x 200. All also work at 800 x 400, 1600 x 800. 600 x 300 does not work. Possibly aspect ratios?


Solution

  • Solved this issue using specific dimensions. AVAssetWriter only supports specific "valid" aspect ratios.