audio和video合并后的video方向

我试图合并一个audio和video,但是当我将video保存到我的图书馆,这是保存。 我正在按照这个教程,但没有find任何合适的答案。

但问题是video不是肖像模式。

那么我怎样才能在我的图书馆这里得到video与肖像我试试

func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL) { let mixComposition : AVMutableComposition = AVMutableComposition() var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = [] var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = [] let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction() //start merge let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL) let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL) mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)) mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)) let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] do{ try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero) //In my case my audio file is longer then video file so i took videoAsset duration //instead of audioAsset duration try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero) try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero) }catch{ } totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration ) let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition() mutableVideoComposition.frameDuration = CMTimeMake(1, 30) mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720) //find your video on this URl let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4") let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)! assetExport.outputFileType = AVFileTypeMPEG4 assetExport.outputURL = savePathUrl as URL assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronously { () -> Void in switch assetExport.status { case AVAssetExportSessionStatus.completed: //here i Store into asset library let assetsLib = ALAssetsLibrary() assetsLib.writeVideoAtPath(toSavedPhotosAlbum: savePathUrl as URL!, completionBlock: nil) print("success") case AVAssetExportSessionStatus.failed: print("failed \(String(describing: assetExport.error))") case AVAssetExportSessionStatus.cancelled: print("cancelled \(String(describing: assetExport.error))") default: print("complete") } } } 

你有两种方法来解决这个问题。

没有使用AVMutableVideoComposition方法

 let mainComposition = AVMutableComposition() let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero) videoTrack.preferredTransform = videoAssetTrack.preferredTransform // THIS LINE IS IMPORTANT let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first! try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero) let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) exportSession?.outputURL = outputURL exportSession?.outputFileType = AVFileTypeQuickTimeMovie exportSession?.shouldOptimizeForNetworkUse = true exportSession?.exportAsynchronously { } 

第二种方法AVMutableVideoComposition

 let mainComposition = AVMutableComposition() let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero) let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first! try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero) let videoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) videoCompositionLayerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero) let videoCompositionInstuction = AVMutableVideoCompositionInstruction() videoCompositionInstuction.timeRange = CMTimeRangeMake(kCMTimeZero, mainComposition.duration) videoCompositionInstuction.layerInstructions = [ videoCompositionLayerInstruction ] var renderSize = videoAssetTrack.naturalSize renderSize = renderSize.applying(videoAssetTrack.preferredTransform) renderSize = CGSize(width: fabs(renderSize.width), height: fabs(renderSize.height)) let videoComposition = AVMutableVideoComposition() videoComposition.renderSize = renderSize videoComposition.frameDuration = CMTimeMake(1, 30) videoComposition.instructions = [ videoCompositionInstuction ] let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) exportSession?.videoComposition = videoComposition exportSession?.outputURL = outputURL exportSession?.outputFileType = AVFileTypeQuickTimeMovie exportSession?.shouldOptimizeForNetworkUse = false exportSession?.exportAsynchronously { }