I have some code below to merge video clips into one video. I don't have much experience with AVFoundation so Im not sure how to replace the depreciated code with the updated methods without causing an error Type 'CameraViewModel' does not conform to protocol 'AVCaptureFileOutputRecordingDelegate. This code works just fine for iOS 15 but with iOS 16 it has been depreciated. In the first code snippet below is the original depreciated code (depreciated code specifically in mergeVideos func). In the second code snippet is my attempt to update the depreciated code with the correct one, but in doing so I get the error above.
In my code Ive changed the depreciated asset.duration into a async asset.load(.duration)
Ive also changed asset.tracks(withMediaType: .audio)[0] into a async asset.loadTracks(withMediaType: .audio)
import SwiftUI
import SwiftUI
import AVFoundation
class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate{
@Published var session = AVCaptureSession()
@Published var alert = false
@Published var output = AVCaptureMovieFileOutput()
@Published var preview : AVCaptureVideoPreviewLayer!
@Published var isRecording: Bool = false
@Published var recordedURLs: [URL] = []
@Published var previewURL: URL?
@Published var showPreview: Bool = false
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print(error.localizedDescription)
return
}
self.recordedURLs.append(outputFileURL)
if self.recordedURLs.count == 1{
self.previewURL = outputFileURL
return
}
let assets = recordedURLs.compactMap { url -> AVURLAsset in
return AVURLAsset(url: url)
}
self.previewURL = nil
mergeVideos(assets: assets) { exporter in // ADD AWAIT HERE
exporter.exportAsynchronously {
if exporter.status == .failed{
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.main.async {
self.previewURL = finalURL
}
}
}
}
}
}
func mergeVideos(assets: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()){
let compostion = AVMutableComposition()
var lastTime: CMTime = .zero
guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
for asset in assets {
// Linking Audio and Video
do{
try videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .video)[0], at: lastTime)
// Safe Check if Video has Audio
if !asset.tracks(withMediaType: .audio).isEmpty{
try audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .audio)[0], at: lastTime)
}
}
catch{
// HANDLE ERROR
print(error.localizedDescription)
}
// Updating Last Time
lastTime = CMTimeAdd(lastTime, asset.duration)
}
//more code
}
}
import SwiftUI
import SwiftUI
import AVFoundation
class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate{
@Published var session = AVCaptureSession()
@Published var alert = false
@Published var output = AVCaptureMovieFileOutput()
@Published var preview : AVCaptureVideoPreviewLayer!
@Published var isRecording: Bool = false
@Published var recordedURLs: [URL] = []
@Published var previewURL: URL?
@Published var showPreview: Bool = false
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) async { //made func async
if let error = error {
print(error.localizedDescription)
return
}
self.recordedURLs.append(outputFileURL)
if self.recordedURLs.count == 1{
self.previewURL = outputFileURL
return
}
let assets = recordedURLs.compactMap { url -> AVURLAsset in
return AVURLAsset(url: url)
}
self.previewURL = nil
await mergeVideos(assets: assets) { exporter in // ADD AWAIT HERE
exporter.exportAsynchronously {
if exporter.status == .failed{
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.main.async {
self.previewURL = finalURL
}
}
}
}
}
}
func mergeVideos(assets: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
let compostion = AVMutableComposition()
var lastTime: CMTime = .zero
guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
for asset in assets {
do {
//Changes
try await videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
if try await !asset.loadTracks(withMediaType: .audio).isEmpty {
try await audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
}
}
catch {
print(error.localizedDescription)
}
do {
lastTime = try await CMTimeAdd(lastTime, asset.load(.duration))
} catch {
print(error.localizedDescription)
}
}
}
}
Instead of making func fileOutput
async
, just wrap the mergeVideos block in a Task
. That will keep the protocol conformance.
Task {
await mergeVideos(assets: assets) { exporter in
exporter.exportAsynchronously {
if exporter.status == .failed{
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.main.async {
self.previewURL = finalURL
}
}
}
}
}
}