Search code examples
iosobjective-ciphonecameravideo-recording

Video recording in iOS programmatically


I am trying to implement functionality like below

Final Recorded Video = "Capture a video from front camera + Record an audio from video (which I am playing through video player)".

For more understanding please see the attach screen shot.

enter image description here

Using my blocks of codes which is given below: At the end what I get is A video but without audio.

But what I want to trying to implement is "Final recorded video which must be combination of: 'A video which is captured from my front camera + Record only audio from video file which I am playing.'"

How can I achieve above functionality?

This is my code.

"Recording" Button Click Method is as following:

-(void) startRecording
{
    [self initCaptureSession];

    NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle]
                                         pathForResource:@"video"
                                         ofType:@"mp4"]];
    [self playMovieAtURL:url];

    [self startVideoRecording];
}

"initCaptureSession" : Using this method I am recording a video using front camera using "AVCaptureSession"

-(void) initCaptureSession
{
    NSLog(@"Setting up capture session");
    captureSession = [[AVCaptureSession alloc] init];

    NSLog(@"Adding video input");

    AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable];

    if (VideoDevice)
    {
        NSError *error;
        videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
        if (!error)
        {
            if ([captureSession canAddInput:videoInputDevice])
            {
                [captureSession addInput:videoInputDevice];
            }
            else
            {
                NSLog(@"Couldn't add video input");
            }
        }
        else
        {
            NSLog(@"Couldn't create video input");
        }
    }
    else
    {
        NSLog(@"Couldn't create video capture device");
    }


    NSLog(@"Adding audio input");
    AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed];
    NSError *error = nil;
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
    if (audioInput)
    {
        [captureSession addInput:audioInput];
    }


    NSLog(@"Adding movie file output");
    movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];

    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;    //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

    if ([captureSession canAddOutput:movieFileOutput])
        [captureSession addOutput:movieFileOutput];

    [self CameraSetOutputProperties];           //(We call a method as it also has to be done after changing camera)

    NSLog(@"Setting image quality");
    [captureSession setSessionPreset:AVCaptureSessionPresetMedium];
    if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])     //Check size based configs are supported before setting them
        [captureSession setSessionPreset:AVCaptureSessionPreset640x480];

    [captureSession startRunning];
}

- (void) CameraSetOutputProperties
{
    AVCaptureConnection *CaptureConnection = nil;

    NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: @"5.0.0" options: NSNumericSearch];
    if (order == NSOrderedSame || order == NSOrderedDescending) {
        // OS version >= 5.0.0
        CaptureConnection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
    } else {
        // OS version < 5.0.0
        CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[movieFileOutput connections]];
    }

    //Set landscape (if required)
    if ([CaptureConnection isVideoOrientationSupported])
    {
        AVCaptureVideoOrientation orientation =  AVCaptureVideoOrientationPortrait;// AVCaptureVideoOrientationLandscapeRight;      //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
        [CaptureConnection setVideoOrientation:orientation];
    }
 }

"-(void) playMovieAtURL: (NSURL*) theURL " By using this method I am playing a video

-(void) playMovieAtURL: (NSURL*) theURL
{
player =
[[MPMoviePlayerController alloc] initWithContentURL: theURL];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];

player.scalingMode = MPMovieScalingModeAspectFill;
player.controlStyle = MPMovieControlStyleNone;
[player prepareToPlay];

[[NSNotificationCenter defaultCenter]
 addObserver: self
 selector: @selector(myMovieFinishedCallback:)
 name: MPMoviePlayerPlaybackDidFinishNotification
 object: player];
player.view.frame=CGRectMake(10, 30, 300, 200);
[self.view addSubview:player.view];

[player play];
}

"startVideoRecording" using this method I have started recording the Final video.

- (void) startVideoRecording
{
    //Create temporary URL to record to
    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager *fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath])
    {
        NSError *error;
        if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
        {
            //Error - handle if required
            NSLog(@"file remove error");
        }
    }
    //Start recording
    [movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
      fromConnections:(NSArray *)connections
                error:(NSError *)error
{
    NSLog(@"didFinishRecordingToOutputFileAtURL - enter");

    BOOL RecordedSuccessfully = YES;
    if ([error code] != noErr)
    {
        // A problem occurred: Find out if the recording was successful.
        id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
        if (value)
        {
            RecordedSuccessfully = [value boolValue];
        }
    }
    if (RecordedSuccessfully)
    {
        //----- RECORDED SUCCESSFULLY -----
        NSLog(@"didFinishRecordingToOutputFileAtURL - success");
        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
        if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
        {
            [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
                                        completionBlock:^(NSURL *assetURL, NSError *error)
             {
                 if (error)
                 {
                     NSLog(@"File save error");
                 }
                 else
                 {
                     recordedVideoURL=assetURL;
                 }
             }];
        }
        else
        {
            NSString *assetURL=[self copyFileToDocuments:outputFileURL];
            if(assetURL!=nil)
            {
                recordedVideoURL=[NSURL URLWithString:assetURL];
            }
        }
    }
}

Solution

  • // Add some extra code for following methods "1st Method"

      -(void) playMovieAtURL: (NSURL*) theURL
    
        {
           [player play];
           AVAudioSession *audioSession = [AVAudioSession sharedInstance];
           NSError *err = nil;
           [audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err];
         if(err)
          {
            NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo]     description]);
            return;
        }
           [audioSession setActive:YES error:&err];
           err = nil;
        if(err){
            NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
            return;
        }
    
           recordSetting = [[NSMutableDictionary alloc] init];
    
          [recordSetting setValue :[NSNumber numberWithInt:kAudioFormatAppleIMA4] forKey:AVFormatIDKey];
          [recordSetting setValue:[NSNumber numberWithFloat:16000.0] forKey:AVSampleRateKey];
          [recordSetting setValue:[NSNumber numberWithInt: 1] forKey:AVNumberOfChannelsKey];
          recorderFilePath = [NSString stringWithFormat:@"%@/MySound.caf", DOCUMENTS_FOLDER];
         NSLog(@"recorderFilePath: %@",recorderFilePath);
         audio_url = [NSURL fileURLWithPath:recorderFilePath];
        err = nil;
        NSData *audioData = [NSData dataWithContentsOfFile:[audio_url path] options: 0 error:&err];
        if(audioData)
        {
            NSFileManager *fm = [NSFileManager defaultManager];
            [fm removeItemAtPath:[audio_url path] error:&err];
        }
    
        err = nil;
        recorder = [[ AVAudioRecorder alloc] initWithURL:audio_url settings:recordSetting error:&err];
        if(!recorder)
        {
            NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
            UIAlertView *alert =
            [[UIAlertView alloc] initWithTitle: @"Warning"
                                       message: [err localizedDescription]
                                      delegate: nil
                             cancelButtonTitle:@"OK"
                             otherButtonTitles:nil];
            [alert show];
            return;
        }
    
        //prepare to record
        [recorder setDelegate:self];
        [recorder prepareToRecord];
        recorder.meteringEnabled = YES;
    
        BOOL audioHWAvailable = audioSession.inputAvailable;
        if (! audioHWAvailable)
        {
            UIAlertView *cantRecordAlert =
            [[UIAlertView alloc] initWithTitle: @"Warning"
                                       message: @"Audio input hardware not available"
                                      delegate: nil
                             cancelButtonTitle:@"OK"
                             otherButtonTitles:nil];
            [cantRecordAlert show];
            return;
        }
    
    
    }
    

    // 2nd method

    -(void) stopVideoRecording
    
        {
        [player.view removeFromSuperview];
        [player stop];
        [movieFileOutput stopRecording];
    
        AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
        AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputURL options:nil];
    
        mixComposition = [AVMutableComposition composition];
    
        AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                            preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
                                            ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                             atTime:kCMTimeZero error:nil];
    
        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                       preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                       ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                        atTime:kCMTimeZero error:nil];
    
        AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                              presetName:AVAssetExportPresetPassthrough];
    
        AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        [compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
    }
    

    // Final Play Video

    AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
    AVPlayer *player1 = [AVPlayer playerWithPlayerItem:playerItem];
    AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player1];
    [playerLayer setFrame:CGRectMake(0, 0, 320, 480)];
    [[[self view] layer] addSublayer:playerLayer];
    playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [player1 play];
    player1.actionAtItemEnd = AVPlayerActionAtItemEndNone;