用于捕获图像的 AVCapture 会话 SWIFT
AVCapture Session To Capture Image SWIFT
我创建了一个 AVCaptureSession 来捕获视频输出并通过 UIView 将其显示给用户。现在我希望能够单击一个按钮(takePhoto 方法)并在 UIImageView 中显示会话中的图像。我试图遍历每个设备连接并尝试保存输出,但这没有用。我的代码如下
let captureSession = AVCaptureSession()
var stillImageOutput: AVCaptureStillImageOutput!
@IBOutlet var imageView: UIImageView!
@IBOutlet var cameraView: UIView!
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
// Do any additional setup after loading the view, typically from a nib.
super.viewDidLoad()
println("I AM AT THE CAMERA")
captureSession.sessionPreset = AVCaptureSessionPresetLow
self.captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
if(captureDevice != nil){
beginSession()
}
}
func beginSession() {
self.stillImageOutput = AVCaptureStillImageOutput()
self.captureSession.addOutput(self.stillImageOutput)
var err : NSError? = nil
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.cameraView.layer.addSublayer(previewLayer)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
@IBAction func takePhoto(sender: UIButton) {
self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)) { (buffer:CMSampleBuffer!, error:NSError!) -> Void in
var image = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
var data_image = UIImage(data: image)
self.imageView.image = data_image
}
}
}
在启动会话之前,您应该在向会话添加输入和输出时尝试添加一个新线程。在 Apple 的文档中,他们声明
Important: The startRunning method is a blocking call which can take some time, therefore you should perform session setup on a serial queue so that the main queue isn't blocked (which keeps the UI responsive). See AVCam for iOS for the canonical implementation example.
尝试在创建会话方法中使用分派,如下所示
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), { // 1
self.captureSession.addOutput(self.stillImageOutput)
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &err))
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if err != nil {
println("error: \(err?.localizedDescription)")
}
var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.frame = self.cameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
dispatch_async(dispatch_get_main_queue(), { // 2
// 3
self.cameraView.layer.addSublayer(previewLayer)
self.captureSession.startRunning()
});
});
我创建了一个 AVCaptureSession 来捕获视频输出并通过 UIView 将其显示给用户。现在我希望能够单击一个按钮(takePhoto 方法)并在 UIImageView 中显示会话中的图像。我试图遍历每个设备连接并尝试保存输出,但这没有用。我的代码如下
let captureSession = AVCaptureSession()
var stillImageOutput: AVCaptureStillImageOutput!
@IBOutlet var imageView: UIImageView!
@IBOutlet var cameraView: UIView!
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
// Do any additional setup after loading the view, typically from a nib.
super.viewDidLoad()
println("I AM AT THE CAMERA")
captureSession.sessionPreset = AVCaptureSessionPresetLow
self.captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
if(captureDevice != nil){
beginSession()
}
}
func beginSession() {
self.stillImageOutput = AVCaptureStillImageOutput()
self.captureSession.addOutput(self.stillImageOutput)
var err : NSError? = nil
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.cameraView.layer.addSublayer(previewLayer)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
@IBAction func takePhoto(sender: UIButton) {
self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)) { (buffer:CMSampleBuffer!, error:NSError!) -> Void in
var image = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
var data_image = UIImage(data: image)
self.imageView.image = data_image
}
}
}
在启动会话之前,您应该在向会话添加输入和输出时尝试添加一个新线程。在 Apple 的文档中,他们声明
Important: The startRunning method is a blocking call which can take some time, therefore you should perform session setup on a serial queue so that the main queue isn't blocked (which keeps the UI responsive). See AVCam for iOS for the canonical implementation example.
尝试在创建会话方法中使用分派,如下所示
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), { // 1
self.captureSession.addOutput(self.stillImageOutput)
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &err))
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if err != nil {
println("error: \(err?.localizedDescription)")
}
var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.frame = self.cameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
dispatch_async(dispatch_get_main_queue(), { // 2
// 3
self.cameraView.layer.addSublayer(previewLayer)
self.captureSession.startRunning()
});
});