在 iOS 安排音频样本播放
Scheduling Audio Sample Playback on iOS
在 iOS 应用程序 (Swift) 中,我如何安排音频样本在准确的时间播放?
我想通过安排音频样本在准确的时间播放来制作音乐 - 一系列“曲目”。我有一些想法,我应该使用 AVFoundation,但我发现缺乏服务于这个特定用例的文档。
我知道 AudioKit 存在,但我希望最终将我的应用程序移动到 Apple Watch,目前 AudioKit 不支持它。
我想出了我需要做的事情。
创建一个 AVMutableComposition
,添加一个可变轨道,并在该轨道上添加片段(静音和声音资产视图)
import AVFoundation
let drumUrl = Bundle.main.url(forResource: "bd_909dwsd", withExtension: "wav")!
func makePlayer(bpm: Float, beatCount: Int) -> AVPlayer? {
let beatDuration = CMTime(seconds: Double(60 / bpm), preferredTimescale: .max)
let composition = AVMutableComposition()
guard let track = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) else {
return nil
}
let drumAsset = AVAsset(url: drumUrl)
let drumDuration = drumAsset.duration
let drumTimeRange = CMTimeRange(start: CMTime.zero, duration: drumDuration)
let silenceDuration = beatDuration - drumDuration
var prevBeatEnd = CMTime.zero
for deatIndex in 0 ..< beatCount {
let drumTargetRange = CMTimeRange(start: prevBeatEnd, duration: drumDuration)
let drumSegment = AVCompositionTrackSegment(url: drumUrl, trackID: track.trackID, sourceTimeRange: drumTimeRange, targetTimeRange: drumTargetRange)
track.segments.append(drumSegment)
if deatIndex == 0 {
prevBeatEnd = prevBeatEnd + drumDuration
} else {
let silenceTargetRange = CMTimeRange(start: prevBeatEnd, duration: silenceDuration)
track.insertEmptyTimeRange(silenceTargetRange)
prevBeatEnd = prevBeatEnd + silenceDuration + drumDuration
}
}
try! track.validateSegments(track.segments)
let playerItem = AVPlayerItem(asset: composition)
return AVPlayer(playerItem: playerItem)
}
在 iOS 应用程序 (Swift) 中,我如何安排音频样本在准确的时间播放?
我想通过安排音频样本在准确的时间播放来制作音乐 - 一系列“曲目”。我有一些想法,我应该使用 AVFoundation,但我发现缺乏服务于这个特定用例的文档。
我知道 AudioKit 存在,但我希望最终将我的应用程序移动到 Apple Watch,目前 AudioKit 不支持它。
我想出了我需要做的事情。
创建一个 AVMutableComposition
,添加一个可变轨道,并在该轨道上添加片段(静音和声音资产视图)
import AVFoundation
let drumUrl = Bundle.main.url(forResource: "bd_909dwsd", withExtension: "wav")!
func makePlayer(bpm: Float, beatCount: Int) -> AVPlayer? {
let beatDuration = CMTime(seconds: Double(60 / bpm), preferredTimescale: .max)
let composition = AVMutableComposition()
guard let track = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) else {
return nil
}
let drumAsset = AVAsset(url: drumUrl)
let drumDuration = drumAsset.duration
let drumTimeRange = CMTimeRange(start: CMTime.zero, duration: drumDuration)
let silenceDuration = beatDuration - drumDuration
var prevBeatEnd = CMTime.zero
for deatIndex in 0 ..< beatCount {
let drumTargetRange = CMTimeRange(start: prevBeatEnd, duration: drumDuration)
let drumSegment = AVCompositionTrackSegment(url: drumUrl, trackID: track.trackID, sourceTimeRange: drumTimeRange, targetTimeRange: drumTargetRange)
track.segments.append(drumSegment)
if deatIndex == 0 {
prevBeatEnd = prevBeatEnd + drumDuration
} else {
let silenceTargetRange = CMTimeRange(start: prevBeatEnd, duration: silenceDuration)
track.insertEmptyTimeRange(silenceTargetRange)
prevBeatEnd = prevBeatEnd + silenceDuration + drumDuration
}
}
try! track.validateSegments(track.segments)
let playerItem = AVPlayerItem(asset: composition)
return AVPlayer(playerItem: playerItem)
}