在 swift 5 中从 UIImage 中删除白色背景

Delete white background from UIImage in swift 5

我正在尝试保存从相机拍摄的照片的 2 个副本,一个是照片本身,另一个是照片 + 表情符号隐藏出现在那里的人的脸。

现在我只有原图+一张白底和表情符号的图片,但没有后面的照片。

这是我用来将表情符号放在脸上的代码:

private func detectFace(in image: CVPixelBuffer) {
        let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
            DispatchQueue.main.async {
                if let results = request.results as? [VNFaceObservation] {
                    self.handleFaceDetectionResults(results)
                } else {
                    self.clearDrawings()
                }
            }
        })
        let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
        try? imageRequestHandler.perform([faceDetectionRequest])
    }
    
    private func handleFaceDetectionResults(_ observedFaces: [VNFaceObservation]) {
        
        self.clearDrawings()
        
        let facesBoundingBoxes: [CAShapeLayer] = observedFaces.flatMap({ (observedFace: VNFaceObservation) -> [CAShapeLayer] in
            let faceBoundingBoxOnScreen = self.previewLayer.layerRectConverted(fromMetadataOutputRect: observedFace.boundingBox)
            
            let image = UIImage(named: "happy_emoji.png")
            let imageView = UIImageView(image: image!)
            imageView.frame = faceBoundingBoxOnScreen
            showCamera.addSubview(imageView)
            let newDrawings = [CAShapeLayer]()
            
            return newDrawings
        })
        
        self.drawings = facesBoundingBoxes
    }
    
    private func clearDrawings() {
        showCamera.subviews.forEach({ [=11=].removeFromSuperview() })
    }

这是我用来保存图像的代码:

@IBAction func onPhotoTaken(_ sender: Any) {
        
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
        self.photoOutput.capturePhoto(with: settings, delegate: self)
        
        
        UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, false, 0.0)
        if let context = UIGraphicsGetCurrentContext() {
            showCamera.layer.render(in: context)
        }
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        
        let topImage = outputImage
        let bottomImage = imageReciber

        let size = CGSize(width: topImage!.size.width, height: topImage!.size.height + bottomImage.size.height)
        UIGraphicsBeginImageContextWithOptions(size, false, 0.0)

        topImage!.draw(in: CGRect(x: 0, y: 0, width: size.width, height: topImage!.size.height))
        bottomImage.draw(in: CGRect(x: 0, y: topImage!.size.height, width: size.width, height: bottomImage.size.height))

        let newImage:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
        UIGraphicsEndImageContext()
        
        
        UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
    }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let imageData = photo.fileDataRepresentation()
            else { return }
        
        let image = UIImage(data: imageData)
        
        showCamera.image = image
        imageReciber = image!
        UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
    }

我尝试了不同的解决方案来删除白色背景(或黑色,取决于我在“渲染”部分设置的是 false 还是 true。但我总是得到白色背景的表情符号图像。

请帮助我获取没有 white/black 背景并覆盖拍摄照片的表情符号图像。

我的完整代码是:

import UIKit
import AVFoundation
import Vision


class cameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate {
    
    private let captureSession = AVCaptureSession()
    private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
    private let videoDataOutput = AVCaptureVideoDataOutput()
    private var drawings: [CAShapeLayer] = []
    private let photoOutput = AVCapturePhotoOutput()
    
    var imageReciber = UIImage()
    
    @IBOutlet weak var showCamera: UIImageView!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        self.addCameraInput()
        self.showCameraFeed()
        self.getCameraFrames()
        self.captureSession.startRunning()
    }
    
    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        self.previewLayer.frame = self.showCamera.frame
    }
    
    func captureOutput(
        _ output: AVCaptureOutput,
        didOutput sampleBuffer: CMSampleBuffer,
        from connection: AVCaptureConnection) {
        
        guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            debugPrint("unable to get image from sample buffer")
            return
        }
        self.detectFace(in: frame)
    }
    
    private func addCameraInput() {
        guard let device = AVCaptureDevice.DiscoverySession(
            deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
            mediaType: .video,
            position: .back).devices.first else {
                fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
        }
        let cameraInput = try! AVCaptureDeviceInput(device: device)
        self.captureSession.addInput(cameraInput)
        captureSession.addOutput(photoOutput)
    }
    
    private func showCameraFeed() {
        self.previewLayer.videoGravity = .resizeAspectFill
        self.showCamera.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = self.showCamera.frame
    }
    
    private func getCameraFrames() {
        self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
        self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
        self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
        self.captureSession.addOutput(self.videoDataOutput)
        guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
            connection.isVideoOrientationSupported else { return }
        connection.videoOrientation = .portrait
    }
    
    private func detectFace(in image: CVPixelBuffer) {
        let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
            DispatchQueue.main.async {
                if let results = request.results as? [VNFaceObservation] {
                    self.handleFaceDetectionResults(results)
                } else {
                    self.clearDrawings()
                }
            }
        })
        let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
        try? imageRequestHandler.perform([faceDetectionRequest])
    }
    
    private func handleFaceDetectionResults(_ observedFaces: [VNFaceObservation]) {
        
        self.clearDrawings()
        
        let facesBoundingBoxes: [CAShapeLayer] = observedFaces.flatMap({ (observedFace: VNFaceObservation) -> [CAShapeLayer] in
            let faceBoundingBoxOnScreen = self.previewLayer.layerRectConverted(fromMetadataOutputRect: observedFace.boundingBox)
            
            let image = UIImage(named: "happy_emoji.png")
            let imageView = UIImageView(image: image!)
            imageView.frame = faceBoundingBoxOnScreen
            showCamera.addSubview(imageView)
            let newDrawings = [CAShapeLayer]()
            
            return newDrawings
        })
        
        self.drawings = facesBoundingBoxes
    }
    
    private func clearDrawings() {
        showCamera.subviews.forEach({ [=13=].removeFromSuperview() })
    }
    
    
    @IBAction func onPhotoTaken(_ sender: Any) {
        
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
        self.photoOutput.capturePhoto(with: settings, delegate: self)
        
        
        UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, false, 0.0)
        if let context = UIGraphicsGetCurrentContext() {
            showCamera.layer.render(in: context)
        }
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        
        let topImage = outputImage
        let bottomImage = imageReciber

        let size = CGSize(width: topImage!.size.width, height: topImage!.size.height + bottomImage.size.height)
        UIGraphicsBeginImageContextWithOptions(size, false, 0.0)

        topImage!.draw(in: CGRect(x: 0, y: 0, width: size.width, height: topImage!.size.height))
        bottomImage.draw(in: CGRect(x: 0, y: topImage!.size.height, width: size.width, height: bottomImage.size.height))

        let newImage:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
        UIGraphicsEndImageContext()
        
        
        UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
    }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let imageData = photo.fileDataRepresentation()
            else { return }
        
        let image = UIImage(data: imageData)
        
        //Se añade la imagen capturada desde el Buffer a imageView y se le da un borde algo redondeado para que quede bien.
        showCamera.image = image
        imageReciber = image!
        UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
    }
   
    

}

提前谢谢你。

在更冷静地研究问题之后,我发现了解决问题的方法。

问题是我试图在从视频流中获取图像之前将图像打印到文件中,为了解决这个问题,我创建了一个在拍摄图像后执行的新函数,现在一切正常。

    func saveEmoji() {
        showCamera.backgroundColor = UIColor.clear
        UIGraphicsBeginImageContextWithOptions(showCamera.frame.size, true, 0.0)
        if let context = UIGraphicsGetCurrentContext() {
            showCamera.layer.render(in: context)
        }
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        var topImage = outputImage
        
        UIImageWriteToSavedPhotosAlbum(topImage!, nil, nil, nil)
        topImage = nil
    }

保存第一张图片后调用函数:

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let imageData = photo.fileDataRepresentation()
            else { return }
        
        let image = UIImage(data: imageData)
        
        showCamera.image = image
        imageReciber = image!
        UIImageWriteToSavedPhotosAlbum(showCamera.image!, nil, nil, nil)
        saveEmoji()
    }