Swift Merge AVasset-Videosarrays

我想将AVAsset- arrayVideos合并为一个video并将其保存在相机胶卷上。 Raywenderlich.com有一个很棒的教程 ,其中两个video合并为一个。 我创建了以下代码,但是导出到相机胶卷后我获得的video仅包含arrays中的第一个和最后一个video(不包括arrayVideos中间的其他video)。 我在这里错过了什么吗?

 var arrayVideos = [AVAsset]() //Videos Array var atTimeM: CMTime = CMTimeMake(0, 0) var lastAsset: AVAsset! var layerInstructionsArray = [AVVideoCompositionLayerInstruction]() var completeTrackDuration: CMTime = CMTimeMake(0, 1) var videoSize: CGSize = CGSize(width: 0.0, height: 0.0) func mergeVideoArray(){ let mixComposition = AVMutableComposition() for videoAsset in arrayVideos{ let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) do { if videoAsset == arrayVideos.first{ atTimeM = kCMTimeZero } else{ atTimeM = lastAsset!.duration } try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM) videoSize = videoTrack.naturalSize } catch let error as NSError { print("error: \(error)") } completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration) let videoInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) if videoAsset != arrayVideos.last{ videoInstruction.setOpacity(0.0, at: videoAsset.duration) } layerInstructionsArray.append(videoInstruction) lastAsset = videoAsset } let mainInstruction = AVMutableVideoCompositionInstruction() mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration) mainInstruction.layerInstructions = layerInstructionsArray let mainComposition = AVMutableVideoComposition() mainComposition.instructions = [mainInstruction] mainComposition.frameDuration = CMTimeMake(1, 30) mainComposition.renderSize = CGSize(width: videoSize.width, height: videoSize.height) let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] let dateFormatter = DateFormatter() dateFormatter.dateStyle = .long dateFormatter.timeStyle = .short let date = dateFormatter.string(from: NSDate() as Date) let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov") let url = NSURL(fileURLWithPath: savePath) let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) exporter!.outputURL = url as URL exporter!.outputFileType = AVFileTypeQuickTimeMovie exporter!.shouldOptimizeForNetworkUse = true exporter!.videoComposition = mainComposition exporter!.exportAsynchronously { PHPhotoLibrary.shared().performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exporter!.outputURL!) }) { saved, error in if saved { let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert) let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil) alertController.addAction(defaultAction) self.present(alertController, animated: true, completion: nil) } else{ print("video erro: \(error)") } } } } 

您需要跟踪所有资产的总时间并为每个video更新资产。

你问题中的代码是用当前video重写atTimeM 。 这就是为什么只包括第一个和最后一个。

它看起来像这样:

 ... var totalTime : CMTime = CMTimeMake(0, 0) func mergeVideoArray() { let mixComposition = AVMutableComposition() for videoAsset in arrayVideos { let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) do { if videoAsset == arrayVideos.first { atTimeM = kCMTimeZero } else { atTimeM = totalTime // <-- Use the total time for all the videos seen so far. } try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM) videoSize = videoTrack.naturalSize } catch let error as NSError { print("error: \(error)") } totalTime += videoAsset.duration // <-- Update the total time for all videos. ... 

您可以删除lastAsset的使用。

您根本不需要atTimeM,因为您只需沿着它行进completeTrackDuration,就应该添加下一个部分。 所以更换

 if videoAsset == arrayVideos.first{ atTimeM = kCMTimeZero } else{ atTimeM = lastAsset!.duration } try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM) 

 try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: completeTrackDuration) 

斯威夫特4

使用像

 MeargeVide.mergeVideoArray(arrayVideos: arrayAsset) { (urlMeargeVide, error) in debugPrint("url",urlMeargeVide ?? "") debugPrint("error",error ?? "") } 

完成具有方向的类并将多个剪辑合并为单个。

 class MeargeVide { static func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { var assetOrientation = UIImageOrientation.up var isPortrait = false if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { assetOrientation = .right isPortrait = true } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { assetOrientation = .left isPortrait = true } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { assetOrientation = .up } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { assetOrientation = .down } return (assetOrientation, isPortrait) } static func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) let assetTrack = asset.tracks(withMediaType: .video)[0] let transform = assetTrack.preferredTransform let assetInfo = orientationFromTransform(transform) var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width if assetInfo.isPortrait { scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: kCMTimeZero) } else { let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) var concat = assetTrack.preferredTransform.concatenating(scaleFactor) .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2)) if assetInfo.orientation == .down { let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi)) let windowBounds = UIScreen.main.bounds let yFix = assetTrack.naturalSize.height + windowBounds.height let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix) concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor) } instruction.setTransform(concat, at: kCMTimeZero) } return instruction } class func mergeVideoArray(arrayVideos:[AVAsset], callBack:@escaping (_ urlGet:URL?,_ errorGet:Error?) -> Void){ var atTimeM: CMTime = CMTimeMake(0, 0) var lastAsset: AVAsset! var layerInstructionsArray = [AVVideoCompositionLayerInstruction]() var completeTrackDuration: CMTime = CMTimeMake(0, 1) var videoSize: CGSize = CGSize(width: 0.0, height: 0.0) var totalTime : CMTime = CMTimeMake(0, 0) let mixComposition = AVMutableComposition.init() for videoAsset in arrayVideos{ let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) do { if videoAsset == arrayVideos.first { atTimeM = kCMTimeZero } else { atTimeM = totalTime // <-- Use the total time for all the videos seen so far. } try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaType.video)[0], at: completeTrackDuration) videoSize = (videoTrack?.naturalSize)! } catch let error as NSError { print("error: \(error)") } totalTime = CMTimeAdd(totalTime, videoAsset.duration) completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration) let firstInstruction = self.videoCompositionInstruction(videoTrack!, asset: videoAsset) firstInstruction.setOpacity(0.0, at: videoAsset.duration) layerInstructionsArray.append(firstInstruction) lastAsset = videoAsset } let mainInstruction = AVMutableVideoCompositionInstruction() mainInstruction.layerInstructions = layerInstructionsArray mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration) let mainComposition = AVMutableVideoComposition() mainComposition.instructions = [mainInstruction] mainComposition.frameDuration = CMTimeMake(1, 30) mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height) let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] let dateFormatter = DateFormatter() dateFormatter.dateStyle = .long dateFormatter.timeStyle = .short let date = dateFormatter.string(from: NSDate() as Date) let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov") let url = NSURL(fileURLWithPath: savePath) let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) exporter!.outputURL = url as URL exporter!.outputFileType = AVFileType.mp4 exporter!.shouldOptimizeForNetworkUse = true exporter!.videoComposition = mainComposition exporter!.exportAsynchronously { DispatchQueue.main.async { callBack(exporter?.outputURL, exporter?.error) } } } }