使用AVFoundation捕获当前相机图像

当我按“myButton”时,我正在尝试捕获图像并将其保存到变量中。 我该怎么办?

我的代码如下:

import UIKit import AVFoundation import MobileCoreServices class ViewController: UIViewController { let captureSession = AVCaptureSession() var previewLayer : AVCaptureVideoPreviewLayer? var captureDevice : AVCaptureDevice? @IBOutlet var myTap: UITapGestureRecognizer! @IBOutlet weak var myButton: UIButton! @IBAction func shotPress(sender: UIButton) { //Save image to variable somehow }) var stillImageOutput = AVCaptureStillImageOutput() stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] if captureSession.canAddOutput(stillImageOutput) { captureSession.addOutput(stillImageOutput) } } override func viewDidLoad() { super.viewDidLoad() captureSession.sessionPreset = AVCaptureSessionPresetHigh let devices = AVCaptureDevice.devices() for device in devices { if (device.hasMediaType(AVMediaTypeVideo)) { if(device.position == AVCaptureDevicePosition.Back) { captureDevice = device as? AVCaptureDevice if captureDevice != nil { beginSession() } } } } } func updateDeviceSettings(focusValue : Float, isoValue : Float) { if let device = captureDevice { if(device.lockForConfiguration(nil)) { device.focusMode = AVCaptureFocusMode.ContinuousAutoFocus device.unlockForConfiguration() } } } func beginSession() { var err : NSError? = nil captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err)) if err != nil { println("error: \(err?.localizedDescription)") } previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) self.view.layer.addSublayer(previewLayer) self.view.bringSubviewToFront(myButton) previewLayer?.frame = self.view.layer.frame captureSession.startRunning() } } 

这就是我正在使用的其中一个应用程序。 对你的问题也应该有所帮助。

 func capturePicture(){ println("Capturing image") stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] captureSession.addOutput(stillImageOutput) if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){ stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (sampleBuffer, error) in var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) var dataProvider = CGDataProviderCreateWithCFData(imageData) var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault) var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right) var imageView = UIImageView(image: image) imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height) //Show the captured image to self.view.addSubview(imageView) //Save the captured preview to image UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil) }) } } 

针对Swift 3进行了更新:

  var stillImageOutput = AVCaptureStillImageOutput.init() stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] self.cameraSession.addOutput(stillImageOutput) if let videoConnection = stillImageOutput.connection(withMediaType:AVMediaTypeVideo){ stillImageOutput.captureStillImageAsynchronously(from:videoConnection, completionHandler: { (sampleBuffer, error) in var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) var dataProvider = CGDataProvider.init(data: imageData as! CFData) var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent) var image = UIImage.init(cgImage: cgImageRef!, scale: 1.0, orientation: .right) // do something with image }) }