Search code examples
iosswiftavaudioplayeraudio-recordingtrim

iOS: trimming audio files with Swift?


I have to merge the audio file and recorded voice.For example the recorded voice is 47seconds. I have to cut or trim the 4minutes audio song to 47seconds. And merge the audio file.

var url:NSURL?
    if self.audioRecorder != nil
    {
        url = self.audioRecorder!.url
    }

    else
    {
        url = self.soundFileURL!

    }
    print("playing \(url)")
    do
    {
        self.newplayer = try AVPlayer(URL: url!)
        let avAsset = AVURLAsset(URL: url!, options: nil)
        print("\(avAsset)")
        let audioDuration = avAsset.duration
        let totalSeconds = CMTimeGetSeconds(audioDuration)
        let hours = floor(totalSeconds / 3600)
        var minutes = floor(totalSeconds % 3600 / 60)
        var seconds = floor(totalSeconds % 3600 % 60)
        print("hours = \(hours),minutes = \(minutes),seconds = \(seconds)")}

This is the output: //hours = 0.0,minutes = 0.0,seconds = 42.0

For trim method i just tried this: How to set exact Duration,start time and end time and the new url :

 func exportAsset(asset:AVAsset, fileName:String)
{
    let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
    let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
    print("saving to \(trimmedSoundFileURL!.absoluteString)")

    let filemanager = NSFileManager.defaultManager()
    if filemanager.fileExistsAtPath(trimmedSoundFileURL!.absoluteString!) {
        print("sound exists")
    }

    let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A)
    exporter!.outputFileType = AVFileTypeAppleM4A
    exporter!.outputURL = trimmedSoundFileURL

    let duration = CMTimeGetSeconds(asset.duration)
    if (duration < 5.0) {
        print("sound is not long enough")
        return
    }
    // e.g. the first 5 seconds
    let startTime = CMTimeMake(0, 1)
    let stopTime = CMTimeMake(5, 1)
    let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
    exporter!.timeRange = exportTimeRange


    // do it
    exporter!.exportAsynchronouslyWithCompletionHandler({
        switch exporter!.status {
        case  AVAssetExportSessionStatus.Failed:
            print("export failed \(exporter!.error)")
        case AVAssetExportSessionStatus.Cancelled:
            print("export cancelled \(exporter!.error)")
        default:
            print("export complete")
        }
    })
}

Solution

  • Finally I find the answer for my question.It's working fine...I attached the code below. I added the trim audio code in it.It will be useful for those who are trying to merge and trim the audio(swift2.3):

    func mixAudio()
    {
        let currentTime = CFAbsoluteTimeGetCurrent()
        let composition = AVMutableComposition()
        let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
        compositionAudioTrack.preferredVolume = 0.8
        let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil)
        print("\(avAsset)")
        var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio)
        let clipAudioTrack = tracks[0]
        do {
            try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero)
        }
        catch _ {
        }
        let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
        compositionAudioTrack.preferredVolume = 0.8
    
        let avAsset1 = AVURLAsset.init(URL: soundFileURL1)
        print(avAsset1)
    
    
        var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
        let clipAudioTrack1 = tracks1[0]
        do {
            try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero)
        }
        catch _ {
        }
        var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true)
        let CachesDirectory = paths[0]
        let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav")
        print(" strOutputFilePath is \n \(strOutputFilePath)")
    
        let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a")
        print(" requiredOutputPath is \n \(requiredOutputPath)")
    
        soundFile1 = NSURL.fileURLWithPath(requiredOutputPath)
        print(" OUtput path is \n \(soundFile1)")
        var audioDuration = avAsset.duration
        var totalSeconds = CMTimeGetSeconds(audioDuration)
        var hours = floor(totalSeconds / 3600)
        var minutes = floor(totalSeconds % 3600 / 60)
        var seconds = Int64(totalSeconds % 3600 % 60)
        print("hours = \(hours), minutes = \(minutes), seconds = \(seconds)")
    
        let recordSettings:[String : AnyObject] = [
    
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 12000,
            AVNumberOfChannelsKey: 1,
            AVEncoderAudioQualityKey: AVAudioQuality.Low.rawValue
        ]
        do {
            audioRecorder = try AVAudioRecorder(URL: soundFile1, settings: recordSettings)
            audioRecorder!.delegate = self
            audioRecorder!.meteringEnabled = true
            audioRecorder!.prepareToRecord()
        }
    
        catch let error as NSError
        {
            audioRecorder = nil
            print(error.localizedDescription)
        }
    
        do {
    
            try NSFileManager.defaultManager().removeItemAtURL(soundFile1)
        }
        catch _ {
        }
        let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
        exporter!.outputURL = soundFile1
        exporter!.outputFileType = AVFileTypeAppleM4A
        let duration = CMTimeGetSeconds(avAsset1.duration)
        print(duration)
        if (duration < 5.0) {
            print("sound is not long enough")
            return
        }
        // e.g. the first 30 seconds
        let startTime = CMTimeMake(0, 1)
        let stopTime = CMTimeMake(seconds,1)
        let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
        print(exportTimeRange)
        exporter!.timeRange = exportTimeRange
        print(exporter!.timeRange)
    
    
        exporter!.exportAsynchronouslyWithCompletionHandler
            {() -> Void in
                print(" OUtput path is \n \(requiredOutputPath)")
                print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)")
                var url:NSURL?
                if self.audioRecorder != nil
                {
                    url = self.audioRecorder!.url
                }
    
                else
                {
                    url = self.soundFile1!
                    print(url)
    
                }
    
                print("playing \(url)")
    
        do
        {
            print(self.soundFile1)
            print(" OUtput path is \n \(requiredOutputPath)")
            self.setSessionPlayback()
            do {
                                            self.optData = try NSData(contentsOfURL: self.soundFile1!, options: NSDataReadingOptions.DataReadingMappedIfSafe)
                                            print(self.optData)
                                            self.recordencryption = self.optData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions())
    
                                            //  print(self.recordencryption)
                                              self.myImageUploadRequest()
    
    
                                        }
    
    
    
            self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1)
            self.wasteplayer.numberOfLoops = 0
            self.wasteplayer.play()
    
    
        }
    
        catch _
        {
        }
    
        }
    }