自定义相机视图 Swift iOS 8 iPhone Xcode 6.1

Custom camera view Swift iOS 8 iPhone Xcode 6.1

我想在 iPhone 视图中使用摄像头。我不想使用典型的全屏相机视图,而是我自己的。

例如,我想要在屏幕中间有一个 200x200 的正方形,并且有一个相机预览。在这个正方形下方,我想要一个拍照按钮。怎么做?我是 swift 初学者。

您需要使用 AVFoundation 框架来在故事板中创建的视图中创建自己的 AVCaptureSession。这是一个很好的教程,向您展示如何找到相机并创建捕获会话:

http://jamesonquave.com/blog/taking-control-of-the-iphone-camera-in-ios-8-with-swift-part-1/

本教程使用整个视图作为捕获视图,因此如果您按照他的代码建模,相机将有多大。要在屏幕中间制作一个 200x200 的正方形,您必须在情节提要中的视图控制器上绘制一个,link 它到所有代码所在的 swift 文件中的变量,然后更改底部的部分,

previewLayer?.frame = self.view.layer.frame

your200by200View.layer.frame

希望这对您有所帮助。如果没有,我可以尝试提供更多帮助或者有人可以纠正我。

祝你好运!

如何添加 cameraOverlayView 以在屏幕中央创建 200x200 方形视图 window 的示例:

@IBAction func takePhoto(sender: AnyObject) {

    if !UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.Camera){
        return
    }

    var imagePicker = UIImagePickerController()
    imagePicker.delegate = self
    imagePicker.sourceType = UIImagePickerControllerSourceType.Camera;

    //Create camera overlay
    let pickerFrame = CGRectMake(0, UIApplication.sharedApplication().statusBarFrame.size.height, imagePicker.view.bounds.width, imagePicker.view.bounds.height - imagePicker.navigationBar.bounds.size.height - imagePicker.toolbar.bounds.size.height)
    let squareFrame = CGRectMake(pickerFrame.width/2 - 200/2, pickerFrame.height/2 - 200/2, 200, 200)
    UIGraphicsBeginImageContext(pickerFrame.size)

    let context = UIGraphicsGetCurrentContext()
    CGContextSaveGState(context)
    CGContextAddRect(context, CGContextGetClipBoundingBox(context))
    CGContextMoveToPoint(context, squareFrame.origin.x, squareFrame.origin.y)
    CGContextAddLineToPoint(context, squareFrame.origin.x + squareFrame.width, squareFrame.origin.y)
    CGContextAddLineToPoint(context, squareFrame.origin.x + squareFrame.width, squareFrame.origin.y + squareFrame.size.height)
    CGContextAddLineToPoint(context, squareFrame.origin.x, squareFrame.origin.y + squareFrame.size.height)
    CGContextAddLineToPoint(context, squareFrame.origin.x, squareFrame.origin.y)
    CGContextEOClip(context)
    CGContextMoveToPoint(context, pickerFrame.origin.x, pickerFrame.origin.y)
    CGContextSetRGBFillColor(context, 0, 0, 0, 1)
    CGContextFillRect(context, pickerFrame)
    CGContextRestoreGState(context)

    let overlayImage = UIGraphicsGetImageFromCurrentImageContext()
    UIGraphicsEndImageContext();

    let overlayView = UIImageView(frame: pickerFrame)
    overlayView.image = overlayImage
    imagePicker.cameraOverlayView = overlayView
    self.presentViewController(imagePicker, animated: true, completion: nil)

}

另一个代码块。如何使用 iPhone.

进行手动对焦
import UIKit

class ViewController: UIViewController {

    @IBOutlet var cameraView: CameraView!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    @IBAction func sliderChanged(sender: UISlider) {
        cameraView.setFocusWithLensPosition(sender.value)
    }
}


import UIKit
import AVFoundation

class CameraView: UIView {

    // AVFoundation properties
    let captureSession = AVCaptureSession()
    var captureDevice: AVCaptureDevice!
    var captureDeviceFormat: AVCaptureDeviceFormat?
    let stillImageOutput = AVCaptureStillImageOutput()
    var cameraLayer: AVCaptureVideoPreviewLayer?

    required init?(coder aDecoder: NSCoder) {
        super.init(coder: aDecoder)
        initCamera()
    }

    func initCamera() {
        captureSession.beginConfiguration()


        stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

        // get the back camera
        if let device = cameraDeviceForPosition(AVCaptureDevicePosition.Back) {

            captureDevice = device
            captureDeviceFormat = device.activeFormat

            let error: NSErrorPointer = nil

            do {
                try captureDevice!.lockForConfiguration()
            } catch let error1 as NSError {
                error.memory = error1
            }
            captureDevice!.focusMode = AVCaptureFocusMode.Locked
            captureDevice!.unlockForConfiguration()

            var deviceInput: AVCaptureDeviceInput!
            do {
                deviceInput = try AVCaptureDeviceInput(device: captureDevice)
            } catch let error1 as NSError {
                error.memory = error1
                deviceInput = nil
            }
            if(error == nil) {
                captureSession.addInput(deviceInput)
            }

            captureSession.addOutput(stillImageOutput)

            // use the high resolution photo preset
            captureSession.sessionPreset = AVCaptureSessionPresetPhoto


            // setup camera preview
            cameraLayer = AVCaptureVideoPreviewLayer(session: captureSession)


            if let player = cameraLayer {
                player.videoGravity = AVLayerVideoGravityResizeAspectFill
                self.layer.addSublayer(player)
                player.frame = self.layer.bounds
                player.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
            }

            // commit and start capturing
            captureSession.commitConfiguration()
            captureSession.startRunning()
        }

        captureSession.commitConfiguration()
    }

    func setFocusWithLensPosition(pos: CFloat) {
        let error: NSErrorPointer = nil
        do {
            try captureDevice!.lockForConfiguration()
        } catch let error1 as NSError {
            error.memory = error1
        }
        captureDevice!.setFocusModeLockedWithLensPosition(pos, completionHandler: nil)
        captureDevice!.unlockForConfiguration()
    }

    // return the camera device for a position
    func cameraDeviceForPosition(position:AVCaptureDevicePosition) -> AVCaptureDevice?
    {
        for device:AnyObject in AVCaptureDevice.devices() {
            if (device.position == position) {
                return device as? AVCaptureDevice;
            }
        }

        return nil
    }

}