Image/Text 在视频中叠加 swift

Image/Text overlay in video swift

我正在使用 swift.I 在视频中使用图像叠加来实现水印效果,为此我正在使用 AVFoundation,但不知何故我没有成功。

以下是我的覆盖代码 image/text

    let path = NSBundle.mainBundle().pathForResource("sample_movie", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    var vidAsset = AVURLAsset(URL: fileURL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] as! AVAssetTrack
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    var error: NSError?
    let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
    compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero, error: &error)

    compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.CGImage
    imglayer.frame = CGRectMake(5, 5, 100, 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.whiteColor().CGColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = kCAAlignmentCenter
    titleLayer.frame = CGRectMake(0, 50, size.width, size.height / 6)

    let videolayer = CALayer()
    videolayer.frame = CGRectMake(0, 0, size.width, size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRectMake(0, 0, size.width, size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(1, 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
    let docsDir: AnyObject = dirPaths[0]
    let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)       

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport.outputFileType = AVFileTypeQuickTimeMovie 
    assetExport.outputURL = movieDestinationUrl
    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:
            println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:
            println("cancelled \(assetExport.error)")
        default:
            println("Movie complete")


            // play video
            NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
                self.playVideo(movieDestinationUrl!)
            })
        }
    })    

通过这段代码,我没有实现覆盖....我不知道我做错了什么...

问题:

对我来说(我在你的代码中看到的),你没有将 parentlayer 添加到屏幕上。

您创建了一个 CALayer() 以将 videolayerimglayertitleLayer 添加到新图层中,但您没有在屏幕上添加最后一个图层。

yourView.layer.addSublayer(parentlayer)

希望对您有所帮助

@El Captain 提供的代码可以工作。只缺:

    assetExport.videoComposition = layercomposition

您可以在 AVAssetExportSession

实例化后立即添加它

注意:原来提供的代码只能导出视频轨道,不能导出音频轨道。如果您需要音轨,可以在配置 compositionvideoTrack:

后添加类似的内容
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    for audioTrack in audioTracks {
        try! compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: kCMTimeZero)
    }

@Rey Hernandez 这对我帮助很大!如果有人想进一步说明如何将音频资产添加到视频,这里是组合它们的代码

    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] 
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    let atrack =  vidAsset.tracksWithMediaType(AVMediaTypeAudio)
    let audioTrack:AVAssetTrack = atrack[0]
    let audio_duration = audioTrack.timeRange.duration
    let audio_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    do {
        let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

        try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform



        let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
        try! compositionAudioTrack.insertTimeRange(audio_timerange, ofTrack: audioTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = audioTrack.preferredTransform

    } catch {
        print(error)
    }

作为补充,这里有一个函数可以根据通过复制其旋转、缩放和字体提供的数组 UITextView 创建 CATextLayers。只需将这些添加到提供给 AVVideoCompositionCoreAnimationTool 的容器层即可:

private static func createTextLayer(totalSize: CGSize,
                                        textView: UITextView) -> CATextLayer {
        let textLayer: CACenteredTextLayer = CACenteredTextLayer()
        textLayer.backgroundColor = UIColor.clear
        textLayer.foregroundColor = textView.textColor?.cgColor
        textLayer.masksToBounds = false
        textLayer.isWrapped = true

        let scale: CGFloat = UIScreen.main.scale

        if let font: UIFont = textView.font {
            let upscaledFont: UIFont = font.withSize(font.pointSize * scale)
            let attributedString = NSAttributedString(
                string: textView.text,
                attributes: [NSAttributedString.Key.font: upscaledFont,
                             NSAttributedString.Key.foregroundColor: textView.textColor ?? UIColor.white])
            textLayer.string = attributedString
        }

        // Set text alignment
        let alignment: CATextLayerAlignmentMode
        switch textView.textAlignment {
        case NSTextAlignment.left:
            alignment = CATextLayerAlignmentMode.left
        case NSTextAlignment.center:
            alignment = CATextLayerAlignmentMode.center
        default:
            alignment = CATextLayerAlignmentMode.right
        }
        textLayer.alignmentMode = alignment

        let originalFrame: CGRect = textView.frame

        // Also take scale into consideration
        let targetSize: CGSize = CGSize(width: originalFrame.width * scale,
                                        height: originalFrame.height * scale)

        // The CALayer positioning is inverted on the Y-axes, so apply this
        let origin: CGPoint = CGPoint(x: originalFrame.origin.x * scale,
                                      y: (totalSize.height - (originalFrame.origin.y * scale)) - targetSize.height)

        textLayer.frame = CGRect(x: origin.x,
                                 y: origin.y,
                                 width: targetSize.width,
                                 height: targetSize.height)

        // Determine the scale
        textLayer.anchorPoint = CGPoint(x: 0.5,
                                        y: 0.5)

        var newTransform: CATransform3D = CATransform3DMakeScale(textView.transform.xScale,
                                                                 textView.transform.yScale,
                                                                 0)

        // Convert to degrees, invert the amount and convert back to radians to apply
        newTransform = CATransform3DRotate(newTransform,
                                           textView.transform.radiansFor3DTransform,
                                           0,
                                           0,
                                           1)
        textLayer.transform = newTransform

        return textLayer
}

将它与 CATextLayer 的这个子类结合起来使文本垂直居中:

final class CACenteredTextLayer: CATextLayer {
    override func draw(in ctx: CGContext) {
        guard let attributedString = string as? NSAttributedString else { return }

        let height = self.bounds.size.height
        let boundingRect: CGRect = attributedString.boundingRect(
            with: CGSize(width: bounds.width,
                         height: CGFloat.greatestFiniteMagnitude),
            options: NSStringDrawingOptions.usesLineFragmentOrigin,
            context: nil)
        let yDiff: CGFloat = (height - boundingRect.size.height) / 2

        ctx.saveGState()
        ctx.translateBy(x: 0.0, y: yDiff)
        super.draw(in: ctx)
        ctx.restoreGState()
    }
}

private extension CGAffineTransform {
    var xScale: CGFloat {
        return sqrt((a*a) + (c*c))
    }

    var yScale: CGFloat {
        return sqrt((b*b) + (d*d))
    }

    var radiansFor3DTransform: CGFloat {
        let radians: CGFloat = atan2(b, a);
        let degrees: CGFloat = -(radians * 180 / CGFloat.pi)
        let convertedRadians: CGFloat = CGFloat(degrees * (CGFloat.pi / 180))
        return convertedRadians
    }
}

这是 Swift 4 中有效的更新:

import UIKit
import AVFoundation
import AVKit
import Photos

class ViewController: UIViewController {

var myurl: URL?

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

}

@IBAction func saveVideoTapper(_ sender: Any) {

    let path = Bundle.main.path(forResource: "sample_video", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    let vidAsset = AVURLAsset(url: fileURL as URL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracks(withMediaType: AVMediaType.video)
    let videoTrack: AVAssetTrack = vtrack[0]
    let vid_timerange = CMTimeRangeMake(start: CMTime.zero, duration: vidAsset.duration)

    let tr: CMTimeRange = CMTimeRange(start: CMTime.zero, duration: CMTime(seconds: 10.0, preferredTimescale: 600))
    composition.insertEmptyTimeRange(tr)

    let trackID:CMPersistentTrackID = CMPersistentTrackID(kCMPersistentTrackID_Invalid)

    if let compositionvideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: trackID) {

        do {
            try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: CMTime.zero)
        } catch {
            print("error")
        }

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    } else {
        print("unable to add video track")
        return
    }


    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.cgImage
    imglayer.frame = CGRect(x: 5, y: 5, width: 100, height: 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.white.cgColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = CATextLayerAlignmentMode.center
    titleLayer.frame = CGRect(x: 0, y: 50, width: size.width, height: size.height / 6)


    let videolayer = CALayer()
    videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: composition.duration)
    let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as! [AVVideoCompositionInstructionProtocol]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let docsDir = dirPaths[0] as NSString
    let movieFilePath = docsDir.appendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport?.outputFileType = AVFileType.mov
    assetExport?.videoComposition = layercomposition

    // Check exist and remove old file
    FileManager.default.removeItemIfExisted(movieDestinationUrl as URL)

    assetExport?.outputURL = movieDestinationUrl as URL
    assetExport?.exportAsynchronously(completionHandler: {
        switch assetExport!.status {
        case AVAssetExportSession.Status.failed:
            print("failed")
            print(assetExport?.error ?? "unknown error")
        case AVAssetExportSession.Status.cancelled:
            print("cancelled")
            print(assetExport?.error ?? "unknown error")
        default:
            print("Movie complete")

            self.myurl = movieDestinationUrl as URL

            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
            }) { saved, error in
                if saved {
                    print("Saved")
                }
            }

            self.playVideo()

        }
    })

}


func playVideo() {
    let player = AVPlayer(url: myurl!)
    let playerLayer = AVPlayerLayer(player: player)
    playerLayer.frame = self.view.bounds
    self.view.layer.addSublayer(playerLayer)
    player.play()
    print("playing...")
}



}


extension FileManager {
func removeItemIfExisted(_ url:URL) -> Void {
    if FileManager.default.fileExists(atPath: url.path) {
        do {
            try FileManager.default.removeItem(atPath: url.path)
        }
        catch {
            print("Failed to delete file")
        }
    }
}
}