我如何使用比特率在ios中压缩video?

我如何使用比特率压缩video?

我试着下面的代码来压缩video,但它不工作,因为它给我一个错误

******终止应用程序,由于未捕获exception“NSInvalidArgumentException”,原因:'* – [AVAssetReader startReading]在读取已经开始后不能被再次调用' ****

- (void) imagePickerController: (UIImagePickerController *) picker didFinishPickingMediaWithInfo: (NSDictionary *) info { // Handle movie capture NSURL *movieURL = [info objectForKey: UIImagePickerControllerMediaURL]; NSData *data = [NSData dataWithContentsOfURL:movieURL]; NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *tempPath = [documentsDirectory stringByAppendingFormat:@"/vid1.mp4"]; BOOL success = [data writeToFile:tempPath atomically:NO]; if (success) { NSLog(@"VIdeo Successfully written"); } else { NSLog(@"VIdeo Wrting failed"); } NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:@"1234"] stringByAppendingString:@".mp4"]]; // Compress movie first [self convertVideoToLowQuailtyWithInputURL:movieURL outputURL:uploadURL]; } - (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL { //setup video writer AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize videoSize = videoTrack.naturalSize; NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoWriterSettings]; videoWriterInput.expectsMediaDataInRealTime = YES; videoWriterInput.transform = videoTrack.preferredTransform; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; [videoWriter addInput:videoWriterInput]; //setup video reader NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; [videoReader addOutput:videoReaderOutput]; //setup audio writer AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil]; audioWriterInput.expectsMediaDataInRealTime = NO; [videoWriter addInput:audioWriterInput]; //setup audio reader AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; [audioReader addOutput:audioReaderOutput]; [videoWriter startWriting]; //start writing from video reader [videoReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: ^{ while ([videoWriterInput isReadyForMoreMediaData]) { CMSampleBufferRef sampleBuffer; if ([videoReader status] == AVAssetReaderStatusReading && (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) { [videoWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [videoWriterInput markAsFinished]; if ([videoReader status] == AVAssetReaderStatusCompleted) { [audioReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ while (audioWriterInput.readyForMoreMediaData) { CMSampleBufferRef sampleBuffer; if ([audioReader status] == AVAssetReaderStatusReading && (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { [audioWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [audioWriterInput markAsFinished]; if ([audioReader status] == AVAssetReaderStatusCompleted) { [videoWriter finishWritingWithCompletionHandler:^() { NSLog(@"Output URl : %@",outputURL); }]; } } } } ]; } } } } ]; } 

您可以使用下面的参数来压缩video的质量。

  • AVAssetExportPresetLowQuality
  • AVAssetExportPresetMediumQuality
  • AVAssetExportPresetHighestQuality

码:

 - (void)CompressVideo { if(firstAsset !=nil) { //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; // http://stackoverflow.com/questions/22715881/merge-video-files-with-their-original-audio-in-ios //VIDEO TRACK AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; //For Audio Track inclusion //============================================================================================ NSArray *arr = [firstAsset tracksWithMediaType:AVMediaTypeAudio]; AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[arr lastObject] atTime:kCMTimeZero error:nil]; //=============================================================================================== AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); //FIXING ORIENTATION// AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; BOOL isFirstAssetPortrait_ = NO; CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown; } CGFloat FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.width; if(isFirstAssetPortrait_) { FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.height; CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; } else { CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; } [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil]; AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; MainCompositionInst.frameDuration = CMTimeMake(1, 30); // MainCompositionInst.renderSize = CGSizeMake(VideoWidth, 900); MainCompositionInst.renderSize = CGSizeMake(VideoWidth, [UIScreen mainScreen].bounds.size.height); NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"CompressedVideo.mov"]; NSLog(@"myPath Docs : %@",myPathDocs); NSURL *url = [NSURL fileURLWithPath:myPathDocs]; if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) { NSError *error; [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:&error]; } //Movie Quality //================================================== AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; //================================================== exporter.outputURL=url; //Movie Type //================================================== exporter.outputFileType = AVFileTypeQuickTimeMovie; //================================================== exporter.videoComposition = MainCompositionInst; exporter.shouldOptimizeForNetworkUse = YES; [exporter exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(), ^ { videoUrToUload = url; [self exportDidFinish:exporter]; }); }]; } } - (void)exportDidFinish:(AVAssetExportSession *)session { if(session.status == AVAssetExportSessionStatusCompleted) { //Store URL Somewhere using session.url } } 

我得到了同样的崩溃问题,但做了一些更改后,这种方法为我工作..只需将您的上述方法replace此方法…

 - (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL { //setup video writer AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize videoSize = videoTrack.naturalSize; NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoWriterSettings]; videoWriterInput.expectsMediaDataInRealTime = YES; videoWriterInput.transform = videoTrack.preferredTransform; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; [videoWriter addInput:videoWriterInput]; //setup video reader NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; [videoReader addOutput:videoReaderOutput]; //setup audio writer AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil]; audioWriterInput.expectsMediaDataInRealTime = NO; [videoWriter addInput:audioWriterInput]; //setup audio reader AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; [audioReader addOutput:audioReaderOutput]; [videoWriter startWriting]; //start writing from video reader [videoReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: ^{ while ([videoWriterInput isReadyForMoreMediaData]) { CMSampleBufferRef sampleBuffer; if ([videoReader status] == AVAssetReaderStatusReading && (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) { [videoWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [videoWriterInput markAsFinished]; if ([videoReader status] == AVAssetReaderStatusCompleted) { if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) { } else{ //start writing from audio reader [audioReader startReading]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ while (audioWriterInput.readyForMoreMediaData) { CMSampleBufferRef sampleBuffer; if ([audioReader status] == AVAssetReaderStatusReading && (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { [audioWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } else { [audioWriterInput markAsFinished]; if ([audioReader status] == AVAssetReaderStatusCompleted) { [videoWriter finishWritingWithCompletionHandler:^(){ // [self sendMovieFileAtURL:outputURL]; NSString *moviePath = [outputURL path]; if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(moviePath)) { UISaveVideoAtPathToSavedPhotosAlbum(moviePath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil); } }]; break; } } } } ]; } } } } } ]; }