GPUImage - 点按时的聚焦和曝光无法正常工作 - 遗漏了什么?
GPUImage - Focusing and Exposure on tap does not work properly - Missing something?
我有一个初始项目,它使用 AVFoundation
来设置相机,并且运行良好。现在我需要将 Camera 机制转换为 GPUImage
。我在两个项目中都使用了相同的聚焦和曝光方法(在 AVFoundation
项目中效果很好)但是在 GPUImage
项目中它没有正确聚焦并且总是错误的。
不要介意应用过滤器,它对所有过滤器都是一样的
样本:
在屏幕的右上角你可以看到小羊。这就是它聚焦+曝光的方式。
设置 GPU:
stillCamera = GPUImageStillCamera(sessionPreset: AVCaptureSessionPreset640x480, cameraPosition: .Front)
CorrectPosition = AVCaptureDevicePosition.Front
stillCamera!.outputImageOrientation = .Portrait;
stillCamera?.horizontallyMirrorFrontFacingCamera = true
filter = GPUImageFilter()
stillCamera?.addTarget(filter)
filter?.addTarget(self.view as! GPUImageView)
(self.view as! GPUImageView).fillMode = GPUImageFillModeType.init(2)
TouchBegan 方法:
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
var tap : CGPoint!
if let touch = touches.first as UITouch! {
tap = touch.locationInView(self.view)
}
let device: AVCaptureDevice! = self.stillCamera?.inputCamera!
var error: NSError? = nil
do {
try device.lockForConfiguration()
if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus){
device.focusMode = AVCaptureFocusMode.AutoFocus
device.focusPointOfInterest = tap
}
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose){
device.exposurePointOfInterest = tap
device.exposureMode = AVCaptureExposureMode.AutoExpose
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch let error1 as NSError {
error = error1
print(error)
} catch {
fatalError()
}
}
有什么想法吗?
您可能遇到的问题是 device.focusPointOfInterest
的 x
和 y
需要在 [0;1]
范围内,其中点 (0,0)
是相机的左下角,(1,1)
是右上角,而您在视图的框架坐标系中传递点击坐标。
您唯一需要做的就是将 tap 的坐标转换为您的相机点。但是请注意,相机可以有不同的填充模式。
这是我进行转换的方式(抱歉 Objective-C 代码,但主要是简单的数学运算):
CGPoint tapPoint = [gestureRecognizer locationInView:cameraView];
CGPoint pointOfInterest = [HBFocusUtils convertToPointOfInterestFromViewCoordinates:tapPoint inFrame:cameraView.bounds withOrientation:self.currentOrientation andFillMode:cameraView.fillMode mirrored:currentVideoCamera == frontVideoCamera];
[HBFocusUtils setFocus:pointOfInterest forDevice:currentVideoCamera.inputCamera];
和方法的实现:
@implementation HBFocusUtils
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
CGSize frameSize = frame.size;
CGPoint pointOfInterest = CGPointMake(0.5, 0.5);
if (mirrored)
{
viewCoordinates.x = frameSize.width - viewCoordinates.x;
}
if (fillMode == kGPUImageFillModeStretch) {
pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
} else {
CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = frameSize.width / frameSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if (fillMode == kGPUImageFillModePreserveAspectRatio) {
if (viewRatio > apertureRatio) {
CGFloat y2 = frameSize.height;
CGFloat x2 = frameSize.height * apertureRatio;
CGFloat x1 = frameSize.width;
CGFloat blackBar = (x1 - x2) / 2;
if (point.x >= blackBar && point.x <= blackBar + x2) {
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = frameSize.width / apertureRatio;
CGFloat y1 = frameSize.height;
CGFloat x2 = frameSize.width;
CGFloat blackBar = (y1 - y2) / 2;
if (point.y >= blackBar && point.y <= blackBar + y2) {
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
yc = (frameSize.width - point.x) / frameSize.width;
} else {
CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
xc = point.y / frameSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
}
}
return pointOfInterest;
}
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setFocusPointOfInterest:focus];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
}
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setExposurePointOfInterest:focus];
[device setExposureMode:AVCaptureExposureModeAutoExpose];
[device unlockForConfiguration];
}
}
}
@end
SWift
1) 首先在Objective-C
中创建HBFocusUtils
Class
2) #import "HBFocusUtils.h"
在桥文件中
//Focus on tap
//============
let tap = UITapGestureRecognizer(target: self, action: Selector("tapOnFocus:"))
tap.delegate = self
filterView.addGestureRecognizer(tap)
func tapOnFocus(gestureRecognizer: UITapGestureRecognizer? = nil)
{
let tapPoint = (gestureRecognizer?.locationInView(filterView))! as CGPoint
let pointOfInterest = HBFocusUtils.convertToPointOfInterestFromViewCoordinates(tapPoint, inFrame: filterView.bounds, withOrientation: .Portrait, andFillMode:
GPUImageFillModeType.init(1), mirrored: true)
HBFocusUtils.setFocus(pointOfInterest, forDevice: stillCamera.inputCamera)
}
HBFocusUtils.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageView.h"
@interface HBFocusUtils : NSObject
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device;
@end
HBFocusUtils.m
#import "HBFocusUtils.h"
@implementation HBFocusUtils
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
CGSize frameSize = frame.size;
CGPoint pointOfInterest = CGPointMake(0.5, 0.5);
if (mirrored)
{
viewCoordinates.x = frameSize.width - viewCoordinates.x;
}
if (fillMode == kGPUImageFillModeStretch) {
pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
} else {
CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = frameSize.width / frameSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if (fillMode == kGPUImageFillModePreserveAspectRatio) {
if (viewRatio > apertureRatio) {
CGFloat y2 = frameSize.height;
CGFloat x2 = frameSize.height * apertureRatio;
CGFloat x1 = frameSize.width;
CGFloat blackBar = (x1 - x2) / 2;
if (point.x >= blackBar && point.x <= blackBar + x2) {
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = frameSize.width / apertureRatio;
CGFloat y1 = frameSize.height;
CGFloat x2 = frameSize.width;
CGFloat blackBar = (y1 - y2) / 2;
if (point.y >= blackBar && point.y <= blackBar + y2) {
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
yc = (frameSize.width - point.x) / frameSize.width;
} else {
CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
xc = point.y / frameSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
}
}
return pointOfInterest;
}
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setFocusPointOfInterest:focus];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
}
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setExposurePointOfInterest:focus];
[device setExposureMode:AVCaptureExposureModeAutoExpose];
[device unlockForConfiguration];
}
}
}
@end
我有一个初始项目,它使用 AVFoundation
来设置相机,并且运行良好。现在我需要将 Camera 机制转换为 GPUImage
。我在两个项目中都使用了相同的聚焦和曝光方法(在 AVFoundation
项目中效果很好)但是在 GPUImage
项目中它没有正确聚焦并且总是错误的。
不要介意应用过滤器,它对所有过滤器都是一样的
样本: 在屏幕的右上角你可以看到小羊。这就是它聚焦+曝光的方式。
设置 GPU:
stillCamera = GPUImageStillCamera(sessionPreset: AVCaptureSessionPreset640x480, cameraPosition: .Front)
CorrectPosition = AVCaptureDevicePosition.Front
stillCamera!.outputImageOrientation = .Portrait;
stillCamera?.horizontallyMirrorFrontFacingCamera = true
filter = GPUImageFilter()
stillCamera?.addTarget(filter)
filter?.addTarget(self.view as! GPUImageView)
(self.view as! GPUImageView).fillMode = GPUImageFillModeType.init(2)
TouchBegan 方法:
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
var tap : CGPoint!
if let touch = touches.first as UITouch! {
tap = touch.locationInView(self.view)
}
let device: AVCaptureDevice! = self.stillCamera?.inputCamera!
var error: NSError? = nil
do {
try device.lockForConfiguration()
if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus){
device.focusMode = AVCaptureFocusMode.AutoFocus
device.focusPointOfInterest = tap
}
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose){
device.exposurePointOfInterest = tap
device.exposureMode = AVCaptureExposureMode.AutoExpose
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch let error1 as NSError {
error = error1
print(error)
} catch {
fatalError()
}
}
有什么想法吗?
您可能遇到的问题是 device.focusPointOfInterest
的 x
和 y
需要在 [0;1]
范围内,其中点 (0,0)
是相机的左下角,(1,1)
是右上角,而您在视图的框架坐标系中传递点击坐标。
您唯一需要做的就是将 tap 的坐标转换为您的相机点。但是请注意,相机可以有不同的填充模式。
这是我进行转换的方式(抱歉 Objective-C 代码,但主要是简单的数学运算):
CGPoint tapPoint = [gestureRecognizer locationInView:cameraView];
CGPoint pointOfInterest = [HBFocusUtils convertToPointOfInterestFromViewCoordinates:tapPoint inFrame:cameraView.bounds withOrientation:self.currentOrientation andFillMode:cameraView.fillMode mirrored:currentVideoCamera == frontVideoCamera];
[HBFocusUtils setFocus:pointOfInterest forDevice:currentVideoCamera.inputCamera];
和方法的实现:
@implementation HBFocusUtils
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
CGSize frameSize = frame.size;
CGPoint pointOfInterest = CGPointMake(0.5, 0.5);
if (mirrored)
{
viewCoordinates.x = frameSize.width - viewCoordinates.x;
}
if (fillMode == kGPUImageFillModeStretch) {
pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
} else {
CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = frameSize.width / frameSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if (fillMode == kGPUImageFillModePreserveAspectRatio) {
if (viewRatio > apertureRatio) {
CGFloat y2 = frameSize.height;
CGFloat x2 = frameSize.height * apertureRatio;
CGFloat x1 = frameSize.width;
CGFloat blackBar = (x1 - x2) / 2;
if (point.x >= blackBar && point.x <= blackBar + x2) {
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = frameSize.width / apertureRatio;
CGFloat y1 = frameSize.height;
CGFloat x2 = frameSize.width;
CGFloat blackBar = (y1 - y2) / 2;
if (point.y >= blackBar && point.y <= blackBar + y2) {
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
yc = (frameSize.width - point.x) / frameSize.width;
} else {
CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
xc = point.y / frameSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
}
}
return pointOfInterest;
}
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setFocusPointOfInterest:focus];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
}
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setExposurePointOfInterest:focus];
[device setExposureMode:AVCaptureExposureModeAutoExpose];
[device unlockForConfiguration];
}
}
}
@end
SWift
1) 首先在Objective-C
中创建HBFocusUtils
Class
2) #import "HBFocusUtils.h"
在桥文件中
//Focus on tap
//============
let tap = UITapGestureRecognizer(target: self, action: Selector("tapOnFocus:"))
tap.delegate = self
filterView.addGestureRecognizer(tap)
func tapOnFocus(gestureRecognizer: UITapGestureRecognizer? = nil)
{
let tapPoint = (gestureRecognizer?.locationInView(filterView))! as CGPoint
let pointOfInterest = HBFocusUtils.convertToPointOfInterestFromViewCoordinates(tapPoint, inFrame: filterView.bounds, withOrientation: .Portrait, andFillMode:
GPUImageFillModeType.init(1), mirrored: true)
HBFocusUtils.setFocus(pointOfInterest, forDevice: stillCamera.inputCamera)
}
HBFocusUtils.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageView.h"
@interface HBFocusUtils : NSObject
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device;
@end
HBFocusUtils.m
#import "HBFocusUtils.h"
@implementation HBFocusUtils
+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
CGSize frameSize = frame.size;
CGPoint pointOfInterest = CGPointMake(0.5, 0.5);
if (mirrored)
{
viewCoordinates.x = frameSize.width - viewCoordinates.x;
}
if (fillMode == kGPUImageFillModeStretch) {
pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
} else {
CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = frameSize.width / frameSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if (fillMode == kGPUImageFillModePreserveAspectRatio) {
if (viewRatio > apertureRatio) {
CGFloat y2 = frameSize.height;
CGFloat x2 = frameSize.height * apertureRatio;
CGFloat x1 = frameSize.width;
CGFloat blackBar = (x1 - x2) / 2;
if (point.x >= blackBar && point.x <= blackBar + x2) {
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = frameSize.width / apertureRatio;
CGFloat y1 = frameSize.height;
CGFloat x2 = frameSize.width;
CGFloat blackBar = (y1 - y2) / 2;
if (point.y >= blackBar && point.y <= blackBar + y2) {
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
yc = (frameSize.width - point.x) / frameSize.width;
} else {
CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
xc = point.y / frameSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
}
}
return pointOfInterest;
}
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setFocusPointOfInterest:focus];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
}
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
{
NSError *error;
if ([device lockForConfiguration:&error])
{
[device setExposurePointOfInterest:focus];
[device setExposureMode:AVCaptureExposureModeAutoExpose];
[device unlockForConfiguration];
}
}
}
@end