在Swift中使用实时filter录制video

我是新来的迅速,并试图build立一个相机应用程序,可以应用实时filter,并保存与应用的filter。

到目前为止,我可以预览实时应用的filter,但是当我保存video全黑。

import UIKit import AVFoundation import AssetsLibrary import CoreMedia import Photos class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate { var captureSession: AVCaptureSession! @IBOutlet weak var previewView: UIView! @IBOutlet weak var recordButtton: UIButton! @IBOutlet weak var imageView: UIImageView! var assetWriter: AVAssetWriter? var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor? var isWriting = false var currentSampleTime: CMTime? var currentVideoDimensions: CMVideoDimensions? override func viewDidLoad() { super.viewDidLoad() FilterVendor.register() setupCaptureSession() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() } func setupCaptureSession() { let captureSession = AVCaptureSession() captureSession.sessionPreset = AVCaptureSessionPresetPhoto guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else { print("Can't access the camera") return } if captureSession.canAddInput(input) { captureSession.addInput(input) } let videoOutput = AVCaptureVideoDataOutput() videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) if((previewLayer) != nil) { view.layer.addSublayer(previewLayer!) } captureSession.startRunning() } @IBAction func record(_ sender: Any) { if isWriting { print("stop record") self.isWriting = false assetWriterPixelBufferInput = nil assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in self.saveMovieToCameraRoll() }) } else { print("start record") createWriter() assetWriter?.startWriting() assetWriter?.startSession(atSourceTime: currentSampleTime!) isWriting = true } } func saveMovieToCameraRoll() { PHPhotoLibrary.shared().performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL) }) { saved, error in if saved { print("saved") } } } func movieURL() -> NSURL { let tempDir = NSTemporaryDirectory() let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov") return url! as NSURL } func checkForAndDeleteFile() { let fm = FileManager.default let url = movieURL() let exist = fm.fileExists(atPath: url.path!) if exist { do { try fm.removeItem(at: url as URL) } catch let error as NSError { print(error.localizedDescription) } } } func createWriter() { self.checkForAndDeleteFile() do { assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie) } catch let error as NSError { print(error.localizedDescription) return } let outputSettings = [ AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : Int(currentVideoDimensions!.width), AVVideoHeightKey : Int(currentVideoDimensions!.height) ] as [String : Any] let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject]) assetWriterVideoInput.expectsMediaDataInRealTime = true assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0)) let sourcePixelBufferAttributesDictionary = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA), String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width), String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height), String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue ] as [String : Any] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) if assetWriter!.canAdd(assetWriterVideoInput) { assetWriter!.add(assetWriterVideoInput) } else { print("no way\(assetWriterVideoInput)") } } func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { autoreleasepool { connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) let filter = CIFilter(name: "Fİlter")! filter.setValue(cameraImage, forKey: kCIInputImageKey) let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if self.isWriting { if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { var newPixelBuffer: CVPixelBuffer? = nil CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) if success == false { print("Pixel Buffer failed") } } } DispatchQueue.main.async { if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { let filteredImage = UIImage(ciImage: outputValue) self.imageView.image = filteredImage } } } } } 

我在下面的关键部分添加了一些评论:

 func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { autoreleasepool { connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; // COMMENT: This line makes sense - this is your pixelbuffer from the camera. guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } // COMMENT: OK, so you turn pixelBuffer into a CIImage... let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) // COMMENT: And now you've create a CIImage with a Filter instruction... let filter = CIFilter(name: "Fİlter")! filter.setValue(cameraImage, forKey: kCIInputImageKey) let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if self.isWriting { if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write... var newPixelBuffer: CVPixelBuffer? = nil // COMMENT: And you grabbed memory from the pool. CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame. let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) if success == false { print("Pixel Buffer failed") } } } // COMMENT: And now you're sending the filtered image back to the screen. DispatchQueue.main.async { if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { let filteredImage = UIImage(ciImage: outputValue) self.imageView.image = filteredImage } } } } 

它看起来像你基本上获得屏幕图像,创build一个过滤副本,然后使一个新的像素缓冲区是空的,写出来。

如果你写的pixelBuffer你抓住,而不是你正在创build的新的,你应该成功地写图像。

你需要成功写出过滤的video是从CIImage创build一个新的CVPixelBuffer – 这个解决scheme已经存在于StackOverflow中,我知道,因为我需要这一步!