视频预览期间的屏幕截图失败
Screen capture during video preview fails
我尝试在使用 AVFoundation(AVCaptureDeviceInput 和 AVCaptureVideoDataOutput)进行视频预览时捕获屏幕
开始预览:
func startCamera(){
var screenSize = UIScreen.mainScreen().bounds.size;
self.previewView = UIView(frame: CGRectMake(0, 0, UIScreen.mainScreen().bounds.size.width, UIScreen.mainScreen().bounds.size.height));
self.previewView.contentMode = UIViewContentMode.ScaleAspectFit
self.view.addSubview(previewView);
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice;
if captureDevice != nil {
beginSession();
break;
}
}
}
}
}
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput {
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
delay(8, closure: { () -> () in
self.processImage()
})
}
}
}
截屏代码:
func processImage() {
UIGraphicsBeginImageContextWithOptions(view.bounds.size, false, 0)
previewLayer!.renderInContext(UIGraphicsGetCurrentContext())
// tried previewView!.layer.render... to no avail
let previewImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
savePDFImage(previewImage, name: "front.pdf")
}
返回的图像全是白色。如何在进行视频预览时截取屏幕上的内容?
不要截屏。相反,从缓冲区捕获一帧并使用它。
实施 AVCaptureVideoDataOutputSampleBufferDelegate。
在 VideoDataOuput 上,设置 setSampleBufferDelegate
实现 captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) 方法。
当您将图像存储到设备时,自己播放快门声音。
最后,您的代码看起来更像这样:
var videoDataOutput:AVCaptureVideoDataOutput?;
var videoDataOutputQueue:dispatch_queue_t = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
var stillImageOutput:AVCaptureStillImageOutput?
var previewLayer:AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice?
let session = AVCaptureSession()
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
stillImageOutput = AVCaptureStillImageOutput()
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput, stillImageOutput = stillImageOutput {
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_32BGRA)]
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(videoDataOutput){
session.addOutput(videoDataOutput)
}
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
}
}
}
// this gets called periodically with an image
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if let image = CheckResponse.imageFromSampleBuffer(sampleBuffer) {
if keepImage(image) {
AudioServicesPlaySystemSound(1108)
session.stopRunning()
}
}
}
// This is in the Objective-C CheckResponse class to get an image from the buffer:
+ (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciimg = [CIImage imageWithCVPixelBuffer:pb];
// show result
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef ref = [context createCGImage:ciimg fromRect:ciimg.extent];
UIImage *image = [UIImage imageWithCGImage:ref scale:1.0 orientation:(UIImageOrientationUp)];
CFRelease(ref);
return image;
}
我尝试在使用 AVFoundation(AVCaptureDeviceInput 和 AVCaptureVideoDataOutput)进行视频预览时捕获屏幕
开始预览:
func startCamera(){
var screenSize = UIScreen.mainScreen().bounds.size;
self.previewView = UIView(frame: CGRectMake(0, 0, UIScreen.mainScreen().bounds.size.width, UIScreen.mainScreen().bounds.size.height));
self.previewView.contentMode = UIViewContentMode.ScaleAspectFit
self.view.addSubview(previewView);
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice;
if captureDevice != nil {
beginSession();
break;
}
}
}
}
}
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput {
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
delay(8, closure: { () -> () in
self.processImage()
})
}
}
}
截屏代码:
func processImage() {
UIGraphicsBeginImageContextWithOptions(view.bounds.size, false, 0)
previewLayer!.renderInContext(UIGraphicsGetCurrentContext())
// tried previewView!.layer.render... to no avail
let previewImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
savePDFImage(previewImage, name: "front.pdf")
}
返回的图像全是白色。如何在进行视频预览时截取屏幕上的内容?
不要截屏。相反,从缓冲区捕获一帧并使用它。
实施 AVCaptureVideoDataOutputSampleBufferDelegate。
在 VideoDataOuput 上,设置 setSampleBufferDelegate
实现 captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) 方法。
当您将图像存储到设备时,自己播放快门声音。
最后,您的代码看起来更像这样:
var videoDataOutput:AVCaptureVideoDataOutput?;
var videoDataOutputQueue:dispatch_queue_t = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
var stillImageOutput:AVCaptureStillImageOutput?
var previewLayer:AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice?
let session = AVCaptureSession()
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
stillImageOutput = AVCaptureStillImageOutput()
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput, stillImageOutput = stillImageOutput {
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_32BGRA)]
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(videoDataOutput){
session.addOutput(videoDataOutput)
}
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
}
}
}
// this gets called periodically with an image
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if let image = CheckResponse.imageFromSampleBuffer(sampleBuffer) {
if keepImage(image) {
AudioServicesPlaySystemSound(1108)
session.stopRunning()
}
}
}
// This is in the Objective-C CheckResponse class to get an image from the buffer:
+ (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciimg = [CIImage imageWithCVPixelBuffer:pb];
// show result
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef ref = [context createCGImage:ciimg fromRect:ciimg.extent];
UIImage *image = [UIImage imageWithCGImage:ref scale:1.0 orientation:(UIImageOrientationUp)];
CFRelease(ref);
return image;
}