在iOS中裁剪和修复video方向

我使用UIImagePickerController捕捉video,我可以使用下面的代码裁剪video,

AVAsset *asset = [AVAsset assetWithURL:url]; //create an avassetrack with our asset AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; //create a video composition and preset some settings AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition]; videoComposition.frameDuration = CMTimeMake(1, 30); //here we are setting its render size to its height x height (Square) videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height); //create a video instruction AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30)); AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack]; //Here we shift the viewing square up to the TOP of the video so we only see the top CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -20); //Use this code if you want the viewing square to be in the middle of the video //CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 ); //Make sure the square is portrait CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2); CGAffineTransform finalTransform = t2; [transformer setTransform:finalTransform atTime:kCMTimeZero]; //add the transformer layer instructions, then add to video composition instruction.layerInstructions = [NSArray arrayWithObject:transformer]; videoComposition.instructions = [NSArray arrayWithObject: instruction]; //Create an Export Path to store the cropped video NSString *outputPath = [NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"video.mp4"]; NSURL *exportUrl = [NSURL fileURLWithPath:outputPath]; //Remove any prevouis videos at that path [[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil]; //Export AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality] ; exporter.videoComposition = videoComposition; exporter.outputURL = exportUrl; exporter.outputFileType = AVFileTypeMPEG4; [exporter exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(), ^{ //Call when finished [self exportDidFinish:exporter]; }); }]; 

但我不知道如何解决方向问题。 像instagram和藤应用程序,即(即)如果我即使在横向模式下捕捉video,它应该是在肖像模式,并需要裁剪video广场。 请给我解决scheme…我正在努力解决这个问题…

我想源代码来自这个链接(包括项目代码)

http://www.one-dreamer.com/cropping-video-square-like-vine-instagram-xcode/

您首先需要了解REALvideo方向:

 - (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset { AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize size = [videoTrack naturalSize]; CGAffineTransform txf = [videoTrack preferredTransform]; if (size.width == txf.tx && size.height == txf.ty) return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft; else if (txf.tx == 0 && txf.ty == 0) return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight; else if (txf.tx == 0 && txf.ty == size.width) return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown; else return UIImageOrientationUp; //return UIInterfaceOrientationPortrait; } 

我以这样的方式创造了这个function,就像它是一个图像一样,它返回了正确的方向

然后,我修改了函数来修复正确的方向,支持任何作物区域不只是一个正方形,像这样:

 // apply the crop to passed video asset (set outputUrl to avoid the saving on disk ). Return the exporter session object - (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion { //create an avassetrack with our asset AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; //create a video composition and preset some settings AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition]; videoComposition.frameDuration = CMTimeMake(1, 30); CGFloat cropOffX = cropRect.origin.x; CGFloat cropOffY = cropRect.origin.y; CGFloat cropWidth = cropRect.size.width; CGFloat cropHeight = cropRect.size.height; videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight); //create a video instruction AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; instruction.timeRange = cropTimeRange; AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack]; UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset]; CGAffineTransform t1 = CGAffineTransformIdentity; CGAffineTransform t2 = CGAffineTransformIdentity; switch (videoOrientation) { case UIImageOrientationUp: t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY ); t2 = CGAffineTransformRotate(t1, M_PI_2 ); break; case UIImageOrientationDown: t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down t2 = CGAffineTransformRotate(t1, - M_PI_2 ); break; case UIImageOrientationRight: t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY ); t2 = CGAffineTransformRotate(t1, 0 ); break; case UIImageOrientationLeft: t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY ); t2 = CGAffineTransformRotate(t1, M_PI ); break; default: NSLog(@"no supported orientation has been found in this video"); break; } CGAffineTransform finalTransform = t2; [transformer setTransform:finalTransform atTime:kCMTimeZero]; //add the transformer layer instructions, then add to video composition instruction.layerInstructions = [NSArray arrayWithObject:transformer]; videoComposition.instructions = [NSArray arrayWithObject: instruction]; //Remove any prevouis videos at that path [[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil]; if (!exporter){ exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ; } // assign all instruction for the video processing (in this case the transformation for cropping the video exporter.videoComposition = videoComposition; //exporter.outputFileType = AVFileTypeQuickTimeMovie; if (outputUrl){ exporter.outputURL = outputUrl; [exporter exportAsynchronouslyWithCompletionHandler:^{ switch ([exporter status]) { case AVAssetExportSessionStatusFailed: NSLog(@"crop Export failed: %@", [[exporter error] localizedDescription]); if (completion){ dispatch_async(dispatch_get_main_queue(), ^{ completion(NO,[exporter error],nil); }); return; } break; case AVAssetExportSessionStatusCancelled: NSLog(@"crop Export canceled"); if (completion){ dispatch_async(dispatch_get_main_queue(), ^{ completion(NO,nil,nil); }); return; } break; default: break; } if (completion){ dispatch_async(dispatch_get_main_queue(), ^{ completion(YES,nil,outputUrl); }); } }]; } return exporter; } 

在所有录制的video方向(Up,Down,Lanscape R,Landscape L)中testing正常和前置相机的情况。 我testing了iPhone 5S(iOS 8.1),iPhone 6 Plus(iOS 8.1)

希望能帮助到你

我知道这个问题很老,但有些人可能仍然想知道,为什么一些相机胶卷的video在裁剪之后会放大。 我遇到了这个问题,并意识到cropRect我用作一个框架没有缩放video的不同长宽比。 为了解决这个问题,我简单地添加了下面的代码,将video的最顶部裁剪成一个正方形。 如果你想改变位置只是改变y值,但一定要根据video缩放。 Luca Iaco提供了一些很好的代码来开始。 我很感激!

 CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize]; float scaleFactor; if (videoSize.width > videoSize.height) { scaleFactor = videoSize.height/320; } else if (videoSize.width == videoSize.height){ scaleFactor = videoSize.height/320; } else{ scaleFactor = videoSize.width/320; } CGFloat cropOffX = 0; CGFloat cropOffY = 0; CGFloat cropWidth = 320 *scaleFactor; CGFloat cropHeight = 320 *scaleFactor; 

这是我的代码,从磁盘上的video创build一个藤蔓般的video。 这是写在迅速:

 static let MaxDuration: CMTimeValue = 12 class func compressVideoAsset(_ asset: AVAsset, output: URL, completion: @escaping (_ data: Data?) -> Void) { let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)! session.videoComposition = self.squareVideoCompositionForAsset(asset) session.outputURL = output session.outputFileType = AVFileTypeMPEG4 session.shouldOptimizeForNetworkUse = true session.canPerformMultiplePassesOverSourceMediaData = true let duration = CMTimeValue(CGFloat(asset.duration.value) / CGFloat(asset.duration.timescale) * 30) session.timeRange = CMTimeRange(start: kCMTimeZero, duration: CMTime(value: min(duration, VideoCompressor.MaxDuration * 30), timescale: 30)) session.exportAsynchronously(completionHandler: { () -> Void in let data = try? Data(contentsOf: output) DispatchQueue.main.async(execute: { () -> Void in completion(data) }) }) } private class func squareVideoCompositionForAsset(_ asset: AVAsset) -> AVVideoComposition { let track = asset.tracks(withMediaType: AVMediaTypeVideo)[0] let length = min(track.naturalSize.width, track.naturalSize.height) var transform = track.preferredTransform let size = track.naturalSize let scale: CGFloat = (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) ? -1 : 1 // check for inversion transform = transform.translatedBy(x: scale * -(size.width - length) / 2, y: scale * -(size.height - length) / 2) let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track) transformer.setTransform(transform, at: kCMTimeZero) let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimePositiveInfinity) instruction.layerInstructions = [transformer] let composition = AVMutableVideoComposition() composition.frameDuration = CMTime(value: 1, timescale: 30) composition.renderSize = CGSize(width: length, height: length) composition.instructions = [instruction] return composition }