连接 Swift 中的两个音频文件并播放它们
Concatenate two audio files in Swift and play them
我尝试在 swift 中连接 .wav 音频文件。
这是我的代码:
func merge(audio1: NSURL, audio2: NSURL) {
var error:NSError?
var ok1 = false
var ok2 = false
var documentsDirectory:String = paths[0] as! String
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
var composition = AVMutableComposition()
var compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//create new file to receive data
var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
var fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("resultmerge.wav")
println(fileDestinationUrl)
var url1 = audio1
var url2 = audio2
var avAsset1 = AVURLAsset(URL: url1, options: nil)
var avAsset2 = AVURLAsset(URL: url2, options: nil)
var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
var tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
var assetTrack1:AVAssetTrack = tracks1[0] as! AVAssetTrack
var assetTrack2:AVAssetTrack = tracks2[0] as! AVAssetTrack
var duration1: CMTime = assetTrack1.timeRange.duration
var duration2: CMTime = assetTrack2.timeRange.duration
var timeRange1 = CMTimeRangeMake(kCMTimeZero, duration1)
var timeRange2 = CMTimeRangeMake(duration1, duration2)
ok1 = compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero, error: nil)
if ok1 {
ok2 = compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1, error: nil)
if ok2 {
println("success")
}
}
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeWAVE
assetExport.outputURL = fileDestinationUrl
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
var audioPlayer = AVAudioPlayer()
audioPlayer = AVAudioPlayer(contentsOfURL: fileDestinationUrl, error: nil)
audioPlayer.prepareToPlay()
audioPlayer.play()
}
})
}
并在终端中得到这个错误(运行 在 iPhone 上):
file:///var/mobile/Containers/Data/Application/3F49D360-B363-4600-B3BB-EE0810501910/Documents/resultmerge.wav
成功
failed Error Domain=AVFoundationErrorDomain Code=-11838 "Opération interrompue" UserInfo=0x174269ac0 {NSLocalizedDescription=Opération interrompue, NSLocalizedFailureReason=L'opération n'est pas prize en charge pour ce contenu multimedia。 }
但我不知道为什么会出现此错误。如果您能给我任何帮助,我将不胜感激:)
我通过改变两件事让你的代码工作:
预设名称:从AVAssetExportPresetPassthrough
到AVAssetExportPresetAppleM4A
输出文件类型:从AVFileTypeWAVE
到AVFileTypeAppleM4A
像这样修改您的 assetExport
声明:
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
然后它将正确合并文件。
貌似AVAssetExportSession
只导出M4A格式,忽略其他预设。可能有一种方法可以让它导出其他格式(通过子类化?),尽管我还没有探索过这种可能性。
这是代码,放在 println(fileDestinationUrl) 之后:
var file = "resultmerge.m4a"
var dirs : [String] = (NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.AllDomainsMask, true) as? [String])!
var dir = dirs[0] //documents directory
var path = dir.stringByAppendingPathComponent(file)
var pathURLarray:Array = (NSURL(fileURLWithPath: path)!).pathComponents!
var pathURL:String = ""
var final = ""
var debut = ""
for i in 1...(pathURLarray.count-1) {
if i == pathURLarray.count-1 {
final = ""
} else {
final = "/"
}
if i == 1 {
debut = "/"
} else {
debut = ""
}
pathURL = debut + pathURL + (pathURLarray[i] as! String) + final
}
var checkValidation = NSFileManager.defaultManager()
if checkValidation.fileExistsAtPath(pathURL) {
println("file exist")
if NSFileManager.defaultManager().removeItemAtURL(fileDestinationUrl, error: nil) {
println("delete")
}
} else {
println("no file")
}
有了这个和@Eric D. 回答,它正在工作。
为了帮助别人"concatenate"我也改了
var timeRange2 = CMTimeRangeMake(duration1, duration2)
进入
var timeRange2 = CMTimeRangeMake(kCMTimeZero, duration2)
(@Eric D. @Pierre Louis Bresson 代码除外)。
此外,如果你希望它被同步处理,只需添加一个 dispatch
var errorExport = true
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
assetExport.outputURL = fileDestinationUrl
let sessionWaitSemaphore = dispatch_semaphore_create(0)
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
//
errorExport = false
}
dispatch_semaphore_signal(sessionWaitSemaphore)
return Void()
})
dispatch_semaphore_wait(sessionWaitSemaphore, DISPATCH_TIME_FOREVER)
return errorExport
我需要合并多个音频文件,所以我重写了接受 NSURL 数组的函数。以为我会在这里分享。
我是 Swift 的新手,所以请留下反馈。
在信用到期时给予信用:@Eric D. @Pierre Louis Bresson
代码如下:
func mergeAudioFiles(audioFileUrls: NSArray, callback: (url: NSURL?, error: NSError?)->()) {
// Create the audio composition
let composition = AVMutableComposition()
// Merge
for (var i = 0; i < audioFileUrls.count; i++) {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(URL: audioFileUrls[i] as! NSURL)
let track = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, ofTrack: track, atTime: composition.duration)
}
// Create output url
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate()))-merge.m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let outputUrl = documentsDirectory.URLByAppendingPathComponent(currentFileName)
// Export it
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = outputUrl
assetExport?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch assetExport!.status {
case AVAssetExportSessionStatus.Failed:
callback(url: nil, error: assetExport?.error)
default:
callback(url: assetExport?.outputURL, error: nil)
}
})
}
对于Swift 2.1:
class func getDocumentsDirectory() -> NSString {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) as [String]
let documentsDirectory = paths[0]
return documentsDirectory
}
class func getFileURL() -> NSURL {
let audioFilename = getDocumentsDirectory().stringByAppendingPathComponent("\(NSDate()).m4a")
let audioURL = NSURL(fileURLWithPath: audioFilename)
return audioURL
}
func merge(audio1: NSURL, audio2: NSURL) {
finalURL = ProcessViewController.getFileURL()
let preferredTimeScale : Int32 = 100
//This object will be edited to include both audio files
let composition = AVMutableComposition()
//Song 1 setup
let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url1 = audio1
let avAsset1 = AVURLAsset(URL: url1, options: nil)
let tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = tracks1[0]
let duration1: CMTime = CMTimeMakeWithSeconds(trimmedLength1, preferredTimeScale)
let startCMTime = CMTimeMakeWithSeconds(Double(startTime1), preferredTimeScale)
let timeRange1 = CMTimeRangeMake(startCMTime, duration1)
//Song 2 setup
let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url2 = audio2
let avAsset2 = AVURLAsset(URL: url2, options: nil)
let tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack2:AVAssetTrack = tracks2[0]
let duration2: CMTime = CMTimeMakeWithSeconds(trimmedLength2, preferredTimeScale)
let startCMTime2 = CMTimeMakeWithSeconds(Double(startTime2), preferredTimeScale)
let timeRange2 = CMTimeRangeMake(startCMTime, duration1)
//Insert the tracks into the composition
do {
try compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero)
try compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1)
} catch {
print(error)
}
//Perform the merge
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = finalURL
assetExport!.exportAsynchronouslyWithCompletionHandler({
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
default:
print("complete")
self.initializeAudioPlayer()
}
})
}
对于 swift 3.0 -
归功于 @Peyman(经过轻微修改)
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}
我尝试在 swift 中连接 .wav 音频文件。
这是我的代码:
func merge(audio1: NSURL, audio2: NSURL) {
var error:NSError?
var ok1 = false
var ok2 = false
var documentsDirectory:String = paths[0] as! String
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
var composition = AVMutableComposition()
var compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//create new file to receive data
var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
var fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("resultmerge.wav")
println(fileDestinationUrl)
var url1 = audio1
var url2 = audio2
var avAsset1 = AVURLAsset(URL: url1, options: nil)
var avAsset2 = AVURLAsset(URL: url2, options: nil)
var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
var tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
var assetTrack1:AVAssetTrack = tracks1[0] as! AVAssetTrack
var assetTrack2:AVAssetTrack = tracks2[0] as! AVAssetTrack
var duration1: CMTime = assetTrack1.timeRange.duration
var duration2: CMTime = assetTrack2.timeRange.duration
var timeRange1 = CMTimeRangeMake(kCMTimeZero, duration1)
var timeRange2 = CMTimeRangeMake(duration1, duration2)
ok1 = compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero, error: nil)
if ok1 {
ok2 = compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1, error: nil)
if ok2 {
println("success")
}
}
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeWAVE
assetExport.outputURL = fileDestinationUrl
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
var audioPlayer = AVAudioPlayer()
audioPlayer = AVAudioPlayer(contentsOfURL: fileDestinationUrl, error: nil)
audioPlayer.prepareToPlay()
audioPlayer.play()
}
})
}
并在终端中得到这个错误(运行 在 iPhone 上):
file:///var/mobile/Containers/Data/Application/3F49D360-B363-4600-B3BB-EE0810501910/Documents/resultmerge.wav
成功
failed Error Domain=AVFoundationErrorDomain Code=-11838 "Opération interrompue" UserInfo=0x174269ac0 {NSLocalizedDescription=Opération interrompue, NSLocalizedFailureReason=L'opération n'est pas prize en charge pour ce contenu multimedia。 }
但我不知道为什么会出现此错误。如果您能给我任何帮助,我将不胜感激:)
我通过改变两件事让你的代码工作:
预设名称:从
AVAssetExportPresetPassthrough
到AVAssetExportPresetAppleM4A
输出文件类型:从
AVFileTypeWAVE
到AVFileTypeAppleM4A
像这样修改您的 assetExport
声明:
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
然后它将正确合并文件。
貌似AVAssetExportSession
只导出M4A格式,忽略其他预设。可能有一种方法可以让它导出其他格式(通过子类化?),尽管我还没有探索过这种可能性。
这是代码,放在 println(fileDestinationUrl) 之后:
var file = "resultmerge.m4a"
var dirs : [String] = (NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.AllDomainsMask, true) as? [String])!
var dir = dirs[0] //documents directory
var path = dir.stringByAppendingPathComponent(file)
var pathURLarray:Array = (NSURL(fileURLWithPath: path)!).pathComponents!
var pathURL:String = ""
var final = ""
var debut = ""
for i in 1...(pathURLarray.count-1) {
if i == pathURLarray.count-1 {
final = ""
} else {
final = "/"
}
if i == 1 {
debut = "/"
} else {
debut = ""
}
pathURL = debut + pathURL + (pathURLarray[i] as! String) + final
}
var checkValidation = NSFileManager.defaultManager()
if checkValidation.fileExistsAtPath(pathURL) {
println("file exist")
if NSFileManager.defaultManager().removeItemAtURL(fileDestinationUrl, error: nil) {
println("delete")
}
} else {
println("no file")
}
有了这个和@Eric D. 回答,它正在工作。
为了帮助别人"concatenate"我也改了
var timeRange2 = CMTimeRangeMake(duration1, duration2)
进入
var timeRange2 = CMTimeRangeMake(kCMTimeZero, duration2)
(@Eric D. @Pierre Louis Bresson 代码除外)。
此外,如果你希望它被同步处理,只需添加一个 dispatch
var errorExport = true
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
assetExport.outputURL = fileDestinationUrl
let sessionWaitSemaphore = dispatch_semaphore_create(0)
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
//
errorExport = false
}
dispatch_semaphore_signal(sessionWaitSemaphore)
return Void()
})
dispatch_semaphore_wait(sessionWaitSemaphore, DISPATCH_TIME_FOREVER)
return errorExport
我需要合并多个音频文件,所以我重写了接受 NSURL 数组的函数。以为我会在这里分享。
我是 Swift 的新手,所以请留下反馈。
在信用到期时给予信用:@Eric D. @Pierre Louis Bresson
代码如下:
func mergeAudioFiles(audioFileUrls: NSArray, callback: (url: NSURL?, error: NSError?)->()) {
// Create the audio composition
let composition = AVMutableComposition()
// Merge
for (var i = 0; i < audioFileUrls.count; i++) {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(URL: audioFileUrls[i] as! NSURL)
let track = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, ofTrack: track, atTime: composition.duration)
}
// Create output url
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate()))-merge.m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let outputUrl = documentsDirectory.URLByAppendingPathComponent(currentFileName)
// Export it
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = outputUrl
assetExport?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch assetExport!.status {
case AVAssetExportSessionStatus.Failed:
callback(url: nil, error: assetExport?.error)
default:
callback(url: assetExport?.outputURL, error: nil)
}
})
}
对于Swift 2.1:
class func getDocumentsDirectory() -> NSString {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) as [String]
let documentsDirectory = paths[0]
return documentsDirectory
}
class func getFileURL() -> NSURL {
let audioFilename = getDocumentsDirectory().stringByAppendingPathComponent("\(NSDate()).m4a")
let audioURL = NSURL(fileURLWithPath: audioFilename)
return audioURL
}
func merge(audio1: NSURL, audio2: NSURL) {
finalURL = ProcessViewController.getFileURL()
let preferredTimeScale : Int32 = 100
//This object will be edited to include both audio files
let composition = AVMutableComposition()
//Song 1 setup
let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url1 = audio1
let avAsset1 = AVURLAsset(URL: url1, options: nil)
let tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = tracks1[0]
let duration1: CMTime = CMTimeMakeWithSeconds(trimmedLength1, preferredTimeScale)
let startCMTime = CMTimeMakeWithSeconds(Double(startTime1), preferredTimeScale)
let timeRange1 = CMTimeRangeMake(startCMTime, duration1)
//Song 2 setup
let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url2 = audio2
let avAsset2 = AVURLAsset(URL: url2, options: nil)
let tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack2:AVAssetTrack = tracks2[0]
let duration2: CMTime = CMTimeMakeWithSeconds(trimmedLength2, preferredTimeScale)
let startCMTime2 = CMTimeMakeWithSeconds(Double(startTime2), preferredTimeScale)
let timeRange2 = CMTimeRangeMake(startCMTime, duration1)
//Insert the tracks into the composition
do {
try compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero)
try compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1)
} catch {
print(error)
}
//Perform the merge
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = finalURL
assetExport!.exportAsynchronouslyWithCompletionHandler({
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
default:
print("complete")
self.initializeAudioPlayer()
}
})
}
对于 swift 3.0 - 归功于 @Peyman(经过轻微修改)
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}