Swift将audio和video文件合并为一个video

我在Swift中编写了一个程序。我想将一个video与一个audio文件合并,但是得到了这个错误。

“失败错误域= AVFoundationErrorDomain代码= -11838”停止操作“UserInfo = 0x17da4230 {NSLocalizedDescription =操作已停止,NSLocalizedFailureReason =操作不支持此媒体}

func mergeAudio(audioURL: NSURL, moviePathUrl: NSURL, savePathUrl: NSURL) { var composition = AVMutableComposition() let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) let option = NSDictionary(object: true, forKey: "AVURLAssetPreferPreciseDurationAndTimingKey") let sourceAsset = AVURLAsset(URL: moviePathUrl, options: option as [NSObject : AnyObject]) let audioAsset = AVURLAsset(URL: audioURL, options: option as [NSObject : AnyObject]) let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo) let audios = audioAsset.tracksWithMediaType(AVMediaTypeAudio) if tracks.count > 0 { let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack let audioDuration:CMTime = assetTrackAudio.timeRange.duration let audioSeconds:Float64 = CMTimeGetSeconds(assetTrackAudio.timeRange.duration) trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrack, atTime: kCMTimeZero, error: nil) trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrackAudio, atTime: kCMTimeZero, error: nil) } var assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) assetExport.outputFileType = AVFileTypeMPEG4 assetExport.outputURL = savePathUrl self.tmpMovieURL = savePathUrl assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in switch assetExport.status { case AVAssetExportSessionStatus.Completed: let assetsLib = ALAssetsLibrary() assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil) println("success") case AVAssetExportSessionStatus.Failed: println("failed \(assetExport.error)") case AVAssetExportSessionStatus.Cancelled: println("cancelled \(assetExport.error)") default: println("complete") } } } 

在我的想法像mpeg4媒体types是错误的。 哪里有问题? 我错过了什么?

在上面的问题相同的错误,我发现由于错误的savePathUrl,目标url应该像下面的代码,包括新的video名称。

我正在寻找合适的audio和video文件到一个video的代码,但无法find任何地方,所以花了几个小时阅读苹果文档,我写了这个代码

注意:这是testing和100%的工作代码对我来说。

Stap:1将这个模块导入你的viewController。

 import UIKit import AVFoundation import AVKit import AssetsLibrary 

第2步:在你的代码中添加这个函数

 func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL) { let mixComposition : AVMutableComposition = AVMutableComposition() var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = [] var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = [] let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction() //start merge let aVideoAsset : AVAsset = AVAsset(URL: videoUrl) let aAudioAsset : AVAsset = AVAsset(URL: audioUrl) mutableCompositionVideoTrack.append(mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)) mutableCompositionAudioTrack.append( mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)) let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] do{ try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aVideoAssetTrack, atTime: kCMTimeZero) //In my case my audio file is longer then video file so i took videoAsset duration //instead of audioAsset duration try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero) //Use this instead above line if your audiofile and video file's playing durations are same // try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero) }catch{ } totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration ) let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition() mutableVideoComposition.frameDuration = CMTimeMake(1, 30) mutableVideoComposition.renderSize = CGSizeMake(1280,720) // playerItem = AVPlayerItem(asset: mixComposition) // player = AVPlayer(playerItem: playerItem!) // // // AVPlayerVC.player = player //find your video on this URl let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4") let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)! assetExport.outputFileType = AVFileTypeMPEG4 assetExport.outputURL = savePathUrl assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in switch assetExport.status { case AVAssetExportSessionStatus.Completed: //Uncomment this if u want to store your video in asset //let assetsLib = ALAssetsLibrary() //assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil) print("success") case AVAssetExportSessionStatus.Failed: print("failed \(assetExport.error)") case AVAssetExportSessionStatus.Cancelled: print("cancelled \(assetExport.error)") default: print("complete") } } } 

第3步:呼叫function,你想这样

 let videoUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleVideo", ofType: "mp4")!) let audioUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleAudio", ofType: "mp3")!) mergeFilesWithUrl(videoUrl, audioUrl: audioUrl) 

希望这会帮助你,节省你的时间。

带有URL和新语法的版本Swift3。

 func mergeFilesWithUrl(videoUrl:URL, audioUrl:URL) { let mixComposition : AVMutableComposition = AVMutableComposition() var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = [] var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = [] let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction() //start merge let aVideoAsset : AVAsset = AVAsset(url: videoUrl) let aAudioAsset : AVAsset = AVAsset(url: audioUrl) mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)) mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)) let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] do{ try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero) //In my case my audio file is longer then video file so i took videoAsset duration //instead of audioAsset duration try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero) //Use this instead above line if your audiofile and video file's playing durations are same // try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero) }catch{ } totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration ) let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition() mutableVideoComposition.frameDuration = CMTimeMake(1, 30) mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720) // playerItem = AVPlayerItem(asset: mixComposition) // player = AVPlayer(playerItem: playerItem!) // // // AVPlayerVC.player = player //find your video on this URl let savePathUrl : URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4") let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)! assetExport.outputFileType = AVFileTypeMPEG4 assetExport.outputURL = savePathUrl assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronously { () -> Void in switch assetExport.status { case AVAssetExportSessionStatus.completed: //Uncomment this if u want to store your video in asset //let assetsLib = ALAssetsLibrary() //assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil) print("success") case AVAssetExportSessionStatus.failed: print("failed \(assetExport.error)") case AVAssetExportSessionStatus.cancelled: print("cancelled \(assetExport.error)") default: print("complete") } } }