如何将video从iOS设备发送到服务器?

我必须从iPhone实时发送video到服务器。 我创建捕获会话并使用AVCaptureMovieFileOutput。

NSError *error = nil; captureSession = [[AVCaptureSession alloc] init]; // find, attach devices AVCaptureDevice *muxedDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeMuxed]; if (muxedDevice) { NSLog (@"got muxedDevice"); AVCaptureDeviceInput *muxedInput = [AVCaptureDeviceInput deviceInputWithDevice:muxedDevice error:&error]; if (muxedInput) { [captureSession addInput:muxedInput]; } } else { AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo]; if (videoDevice) { NSLog (@"got videoDevice"); AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; if (videoInput) { [captureSession addInput: videoInput]; } } AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio]; if (audioDevice) { NSLog (@"got audioDevice"); AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error]; if (audioInput) { [captureSession addInput: audioInput]; } } } // create a preview layer from the session and add it to UI AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession]; previewLayer.frame = view.layer.bounds; previewLayer.videoGravity = AVLayerVideoGravityResizeAspect; previewLayer.orientation = AVCaptureVideoOrientationPortrait; [view.layer addSublayer:previewLayer]; // create capture file output captureMovieOutput = [[AVCaptureMovieFileOutput alloc] init]; if (! captureMovieURL) { captureMoviePath = [[self getMoviePathWithName:MOVIE_FILE_NAME] retain]; captureMovieURL = [[NSURL alloc] initFileURLWithPath:captureMoviePath]; } NSLog (@"recording to %@", captureMovieURL); [captureSession addOutput:captureMovieOutput];

 NSError *error = nil; captureSession = [[AVCaptureSession alloc] init]; // find, attach devices AVCaptureDevice *muxedDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeMuxed]; if (muxedDevice) { NSLog (@"got muxedDevice"); AVCaptureDeviceInput *muxedInput = [AVCaptureDeviceInput deviceInputWithDevice:muxedDevice error:&error]; if (muxedInput) { [captureSession addInput:muxedInput]; } } else { AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo]; if (videoDevice) { NSLog (@"got videoDevice"); AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; if (videoInput) { [captureSession addInput: videoInput]; } } AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio]; if (audioDevice) { NSLog (@"got audioDevice"); AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error]; if (audioInput) { [captureSession addInput: audioInput]; } } } // create a preview layer from the session and add it to UI AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession]; previewLayer.frame = view.layer.bounds; previewLayer.videoGravity = AVLayerVideoGravityResizeAspect; previewLayer.orientation = AVCaptureVideoOrientationPortrait; [view.layer addSublayer:previewLayer]; // create capture file output captureMovieOutput = [[AVCaptureMovieFileOutput alloc] init]; if (! captureMovieURL) { captureMoviePath = [[self getMoviePathWithName:MOVIE_FILE_NAME] retain]; captureMovieURL = [[NSURL alloc] initFileURLWithPath:captureMoviePath]; } NSLog (@"recording to %@", captureMovieURL); [captureSession addOutput:captureMovieOutput]; 

我使用AVAssetExportSession来获取持续时间为10秒的video。

  AVURLAsset *asset = [AVURLAsset URLAssetWithURL:captureMovieURL options:[NSDictionary dictionaryWithObject:@"YES" forKey:AVURLAssetPreferPreciseDurationAndTimingKey]]; 

 AVMutableComposition *composition = [AVMutableComposition composition]; CMTime endTime; CMTime duration = CMTimeMake(6000, 600); if (asset.duration.value - startFragment.value < 6000) { endTime = asset.duration; } else { endTime = CMTimeMake(startFragment.value + 6000, 600); } CMTimeRange editRange = CMTimeRangeMake(startFragment, duration); startFragment = CMTimeMake(endTime.value, 600); NSError *editError = nil; // and add into your composition 

[composition insertTimeRange:editRange ofAsset:asset atTime:composition.duration error:&editError];

AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetPassthrough]; exportSession.shouldOptimizeForNetworkUse = YES; NSString *name = [NSString stringWithFormat:MOVUE_SEGMENT_NAME, countMovies]; NSString *path = [NSString stringWithFormat:@"file://localhost%@", [self getMoviePathWithName:name]]; NSURL *url = [NSURL URLWithString:path]; NSLog(@"urlsegment = %@", url); exportSession.outputFileType = AVFileTypeMPEG4; exportSession.outputURL = url; [exportSession exportAsynchronouslyWithCompletionHandler:^{ if (AVAssetExportSessionStatusCompleted == exportSession.status) { countMovies++; NSLog(@"AVAssetExportSessionStatusCompleted"); } else if (AVAssetExportSessionStatusFailed == exportSession.status) { NSLog(@"AVAssetExportSessionStatusFailed: %@", [exportSession.error localizedDescription]); } else { NSLog(@"Export Session Status: %d", exportSession.status); } }];

如果导出会话状态完成,我会将video发送到服务器。 但它很慢。 要获得持续时间为10秒的电影,然后向服务器发送必要的15秒。 如果胶片的尺寸小于10秒,则没有任何变化。 我怎么解决这个问题? 做这个的最好方式是什么? 我怎么解决这个问题? 什么更适合在服务器上流式传输video?

使用ffmpeg对元数据进行编码,它可能比AVAssetExportSession更好。但是ffmpeg编码要比AVAssetExportSession困难得多;