AVMutableVideoComposition旋转以纵向模式拍摄的video

我已经使用下面的代码添加图像覆盖video,然后将新生成的video导出到文档目录。 但奇怪的是,video旋转了90度。

- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition { CMTime nextClipStartTime = kCMTimeZero; NSInteger i; // Make transitionDuration no greater than half the shortest clip duration. CMTime transitionDuration = self.transitionDuration; for (i = 0; i < [_clips count]; i++ ) { NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; if (clipTimeRange) { CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration; halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator. transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration); } } // Add two video tracks and two audio tracks. AVMutableCompositionTrack *compositionVideoTracks[2]; AVMutableCompositionTrack *compositionAudioTracks[2]; compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration. for (i = 0; i < [_clips count]; i++ ) { NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ... AVURLAsset *asset = [_clips objectAtIndex:i]; NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; CMTimeRange timeRangeInAsset; if (clipTimeRange) timeRangeInAsset = [clipTimeRange CMTimeRangeValue]; else timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]); AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; /* CGAffineTransform t = clipVideoTrack.preferredTransform; NSLog(@"Transform1 : %@",t); */ AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil]; // Remember the time range in which this clip should pass through. // Every clip after the first begins with a transition. // Every clip before the last ends with a transition. // Exclude those transitions from the pass through time ranges. passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration); if (i > 0) { passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration); passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); } if (i+1 < [_clips count]) { passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); } // The end of this clip will overlap the start of the next by transitionDuration. // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.) nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration); // Remember the time range for the transition to the next item. transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration); } // Set up the video composition if we are to perform crossfade or push transitions between clips. NSMutableArray *instructions = [NSMutableArray array]; // Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A". for (i = 0; i < [_clips count]; i++ ) { NSInteger alternatingIndex = i % 2; // alternating targets // Pass through clip i. AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; passThroughInstruction.timeRange = passThroughTimeRanges[i]; AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; /* CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2); CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0); [passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero]; */ passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer]; [instructions addObject:passThroughInstruction]; if (i+1 < [_clips count]) { // Add transition from clip i to clip i+1. AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; transitionInstruction.timeRange = transitionTimeRanges[i]; AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]]; if (self.transitionType == SimpleEditorTransitionTypeCrossFade) { // Fade out the fromLayer by setting a ramp from 1.0 to 0.0. [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]]; } else if (self.transitionType == SimpleEditorTransitionTypePush) { // Set a transform ramp on fromLayer from identity to all the way left of the screen. [fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]]; // Set a transform ramp on toLayer from all the way right of the screen to identity. [toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]]; } transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil]; [instructions addObject:transitionInstruction]; } } videoComposition.instructions = instructions; } 

请帮助,因为我无法以适当的模式导出肖像video。任何帮助表示赞赏。 谢谢。

默认情况下,使用AVAssetExportSession导出video时,video将从原始方向旋转。 你必须应用它的变换来确定方向。你可以尝试下面的代码来做同样的事情。

 - (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset forTrack:(AVMutableCompositionTrack *)inTrack atTime:(CMTime)inTime { //FIXING ORIENTATION// AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack]; AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; BOOL isVideoAssetPortrait_ = NO; CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;} if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;} if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;} if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;} CGFloat FirstAssetScaleToFitRatio = 320.0 / videoAssetTrack.naturalSize.width; if(isVideoAssetPortrait_) { FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height; CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; }else{ CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; } [videolayerInstruction setOpacity:0.0 atTime:inTime]; return videolayerInstruction; } 

我希望这能帮到您。

 AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack]; AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration]; 

以上是调用上述方法的代码,其中inAsset是您的video资产, videoTotalDuration是您在CMTime.mergeCompositionvideo总持续时间,是AVMutableComposition类的对象。

希望这会有所帮助。

编辑:这不是任何callback方法或事件,你必须预期与上述所需的参数调用它。

如果你只是想保持原来的旋转,这是一个稍微简单的方法。

 // Grab the source track from AVURLAsset for example. AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject; // Grab the composition video track from AVMutableComposition you already made. AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject; // Apply the original transform. if (assetVideoTrack && compositionVideoTrack) { [compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform]; } // Export... 

使用以下方法根据AVMutableVideoCompositionvideo资产定位设置正确的方向

 -(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset { AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableComposition *composition = [AVMutableComposition composition]; AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; CGSize videoSize = videoTrack.naturalSize; BOOL isPortrait_ = [self isVideoPortrait:asset]; if(isPortrait_) { NSLog(@"video is portrait "); videoSize = CGSizeMake(videoSize.height, videoSize.width); } composition.naturalSize = videoSize; videoComposition.renderSize = videoSize; // videoComposition.renderSize = videoTrack.naturalSize; // videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600); AVMutableCompositionTrack *compositionVideoTrack; compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil]; AVMutableVideoCompositionLayerInstruction *layerInst; layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; [layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero]; AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); inst.layerInstructions = [NSArray arrayWithObject:layerInst]; videoComposition.instructions = [NSArray arrayWithObject:inst]; return videoComposition; } -(BOOL) isVideoPortrait:(AVAsset *)asset { BOOL isPortrait = FALSE; NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; if([tracks count] > 0) { AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; CGAffineTransform t = videoTrack.preferredTransform; // Portrait if(ta == 0 && tb == 1.0 && tc == -1.0 && td == 0) { isPortrait = YES; } // PortraitUpsideDown if(ta == 0 && tb == -1.0 && tc == 1.0 && td == 0) { isPortrait = YES; } // LandscapeRight if(ta == 1.0 && tb == 0 && tc == 0 && td == 1.0) { isPortrait = NO; } // LandscapeLeft if(ta == -1.0 && tb == 0 && tc == 0 && td == -1.0) { isPortrait = NO; } } return isPortrait; } 

在快速dizy答案..这对我的作品

  var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack if (assetVideoTrack.playable && compositionVideoTrack.playable) { compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform } 

迅捷2:


做{
            让path= NSSearchPathForDirectoriesInDomains(
                 NSSearchPathDirectory.DocumentDirectory,NSSearchPathDomainMask.UserDomainMask,true)
            让documentsDirectory:AnyObject = paths [0]
             / /这将被改变,以适应dynamicvideo
            让dataPath = documentsDirectory.stringByAppendingPathComponent(videoFileName +“。MOV”)
            让videoAsset = AVURLAsset(URL:NSURL(fileURLWithPath:dataPath),options:nil)
            让imgGenerator = AVAssetImageGenerator(资产:videoAsset)
             imgGenerator.appliesPreferredTrackTransform = true
            让cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0,1),actualTime:nil)
            让uiImage = UIImage(CGImage:cgImage)

             videoThumb.image = uiImage
        捕获让错误为NSError {
            打印(“错误生成缩略图:\(错误)”)
         }