iOS 使用 AVAssetWriter 捕获视频时如何正确处理方向

iOS how to correctly handle orientation when capturing video using AVAssetWriter

我正在制作一个利用 AVFoundation 录制视频的示例应用程序。关键是我可以更好地控制视频的录制方式。在我的示例项目中,我进行了视频捕获,但在正确处理方向方面遇到困难。

我在网络上进行了大量搜索,发现其他人建议我不允许我的捕获视图或捕获会话根据方向旋转,而是设置一个转换以在播放期间旋转视频。我在 iOS 和 Mac 设备上运行良好,但我想知道我是否会在 Windows 或 Android 等其他平台上遇到问题。

此外,当我查看录制视频的元数据时,我发现宽度和高度的设置不正确。这是有道理的,因为我只是在改变视频的呈现方式,而不是它的实际分辨率。

我的问题是如何正确支持纵向和横向方向并将其正确反映在视频文件输出中?我需要这些视频在所有平台上正确播放,所以我认为分辨率非常重要。

以下是我迄今为止所写的完整源代码。感谢大家提供的任何建议。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }


            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }


            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()


        videoQueue.async {

            do {

                try self.configureCaptureSession()
                try self.configureAssetWriter()

                DispatchQueue.main.async {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure video output")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure the session
            if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
                captureSession.sessionPreset = AVCaptureSessionPreset640x480
            }

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let vidSize = videoSize
            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true
            videoInput?.transform = getVideoTransform()

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    private func getVideoTransform() -> CGAffineTransform {

        switch UIDevice.current.orientation {

        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)

        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera

        case .landscapeRight:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera

        default:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {
            self.captureSession.startRunning()
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}

我找到了解决问题的方法。解决方案是使用 AVAssetExportSession 导出视频来处理设置视频大小,然后在导出时而不是在录制期间处理旋转。我仍然有一个问题,我需要修复比例因子以从我的原始视频大小变为较小的 640x480 分辨率,但至少我解决了我的旋转问题。请参阅下面的更新代码。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }

            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var exportUrl: URL {
        get {

            if let url = _exportUrl {
                return url
            }

            _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
            return _exportUrl!
        }
    }

    private var _exportUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }

            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)
    private var exportPreset = AVAssetExportPreset640x480

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()

        videoQueue.async {

            do {

                try self.configureCaptureSession()

                DispatchQueue.main.sync {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure capture session")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            if assetWriter != nil {
                assetWriter = nil
                videoInput = nil
                audioInput = nil
            }

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {

            do {
                try self.configureAssetWriter()
                self.captureSession.startRunning()

            } catch {
                print("Unable to start recording")
                DispatchQueue.main.async { self.showAlert("Unable to start recording") }
            }
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()

            do {
                try self.export()
            } catch {
                print("Export failed")
                DispatchQueue.main.async { self.showAlert("Unable to export video") }
            }
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: - Export

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {

        guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
            print("Unable to get video tracks")
            return nil
        }

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = videoSize

        let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
        videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);

        let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

        var transforms = asset.preferredTransform

        var isPortrait = true;

        if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
        || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
            isPortrait = false;
        }

        if isPortrait {
            transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
            transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
        }

        layerInst.setTransform(transforms, at: kCMTimeZero)

        let inst = AVMutableVideoCompositionInstruction()
        inst.backgroundColor = UIColor.black.cgColor
        inst.layerInstructions = [layerInst]
        inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)

        videoComposition.instructions = [inst]

        return videoComposition

    }

    private func export() throws {

        let videoAsset = AVURLAsset(url: outputUrl)

        if FileManager.default.fileExists(atPath: exportUrl.path) {
            try FileManager.default.removeItem(at: exportUrl)
        }

        let videoSize = getVideoSize()

        guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
            print("Unable to create encoder")
            return
        }

        guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
            print("Unable to create video composition")
            return
        }

        encoder.videoComposition = vidcomp
        encoder.outputFileType = AVFileTypeMPEG4  // MP4 format
        encoder.outputURL = exportUrl
        encoder.shouldOptimizeForNetworkUse = true

        encoder.exportAsynchronously(completionHandler: {
            print("Video exported successfully")
        })
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}

只需在编写器设置中交换宽度和高度

并且不要忘记 HEVC

assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

https://developer.apple.com/videos/play/wwdc2017/503 https://developer.apple.com/videos/play/wwdc2017/511

- (BOOL)
configureWriterInput {
    const BOOL isError = YES;

    AVFileType
        mov = AVFileTypeQuickTimeMovie;

    NSDictionary <NSString *, id> *settings;

    // HEVC
    if (@available(iOS 11.0, *)) {
        NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
            mov];

        const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];

        if (isHEVC) {
            settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
                AVVideoCodecTypeHEVC

            assetWriterOutputFileType:
                mov];
        }
        else {
            settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
                mov];
        }
    }
    else {
        settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
            mov];
    }

    if ([writer
    canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {

        // swap width and height to fix orientation

        NSMutableDictionary <NSString *, id> *rotate =
            [settings mutableCopy];

        if (YES
            && settings[AVVideoHeightKey]
            && settings[AVVideoWidthKey]
        ) {
            rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
            rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];

            if ([writer
            canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
                settings = rotate;
            }
            else {
            }
        }
        else {

        }
    }
    else {
        return isError;
    }

    writerInput = [AVAssetWriterInput
        assetWriterInputWithMediaType:AVMediaTypeVideo
        outputSettings:settings];

    {
        // AVCaptureConnection *con =
            // [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];

        // const AVCaptureVideoOrientation o = con.videoOrientation;

        // writerInput.transform = [[self class] configureOrientationTransform:o];
    }

    if ([writer canAddInput:writerInput]) {
        [writer addInput:writerInput];
        return ! isError;
    }
    else {
        return isError;
    }
}

视频方向由 AVAssetWriterInput.transform 处理,看起来 getVideoTransform() 实现不正确 - CGAffineTransform 期望旋转角度以弧度为单位,因此需要更改为某些内容像这样:

private func getVideoTransform() -> CGAffineTransform {
    switch UIDevice.current.orientation {
        case .portrait:
            return .identity
        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: .pi)
        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: .pi/2)
        case .landscapeRight:
            return CGAffineTransform(rotationAngle: -.pi/2)
        default:
            return .identity
        }
    }

来自 Apple 技术问答: https://developer.apple.com/library/archive/qa/qa1744/_index.html

If you are using an AVAssetWriter object to write a movie file, you can use the transform property of the associated AVAssetWriterInput to specify the output file orientation. This will write a display transform property into the output file as the preferred transformation of the visual media data for display purposes. See the AVAssetWriterInput.h interface file for the details.

我发现最简单的方法是根据所需的方向在视频合成轨道上设置 preferredTransform。

解决方案

        // Devices orientation
        var orientation = UIDevice.current.orientation

        // The composition
        let audioVideoComposition = AVMutableComposition()
        
        // The video track of the composition
        let videoCompositionTrack = audioVideoComposition
            .addMutableTrack(withMediaType: .video, preferredTrackID: .init())!

        // Set preferred transform
        videoCompositionTrack.preferredTransform = getVideoTransform()

辅助函数和扩展

    func getVideoTransform() -> CGAffineTransform {
        switch orientation {
            case .portrait:
                return CGAffineTransform(rotationAngle: 90.degreesToRadians)
            case .portraitUpsideDown:
                return CGAffineTransform(rotationAngle: 180)
            case .landscapeLeft:
                return CGAffineTransform(rotationAngle: 0.degreesToRadians)
            case .landscapeRight:
                return CGAffineTransform(rotationAngle: 180.degreesToRadians)
            default:
                return CGAffineTransform(rotationAngle: 90.degreesToRadians)
        }
    }

extension BinaryInteger {
    var degreesToRadians: CGFloat { CGFloat(self) * .pi / 180 }
}

extension FloatingPoint {
    var degreesToRadians: Self { self * .pi / 180 }
    var radiansToDegrees: Self { self * 180 / .pi }
}