AudioKit:如何在 Objective-C 中使用 AKPlayer playAt 方法?
AudioKit: How to use AKPlayer playAt method in Objective-C?
我想在 iOS 应用程序中使用 Objective-C 在给定的短暂延迟后同步启动多个 AKPlayer。
我在 AudioKit 的源代码中找到了以下 swift 代码,文件 AKTiming.swift:
let bufferDuration = AKSettings.ioBufferDuration
let referenceTime = AudioKit.engine.outputNode.lastRenderTime ?? AVAudioTime.now()
let startTime = referenceTime + bufferDuration
for node in nodes {
node.start(at: startTime)
}
如何在 objective c 中使用 NSTimeInterval 参数中的给定缓冲持续时间执行类似操作。
不幸的是,在 objective c 中不可能添加像 referenceTime + bufferDuration 和 AVAudioTime 变量,并且 now() 方法也不存在。
Apples documentation of the AVAudioTime class 很短,对我帮助不大。
我可以使用静态方法 hostTimeForSeconds 将 NSTimeInterval 转换为 hostTime,然后使用 timeWithHostTime 创建 AVAudioTime 实例吗?
感谢您的帮助!
马蒂亚斯
May I use the static method hostTimeForSeconds to convert NSTimeInterval to a hostTime and then timeWithHostTime to create an AVAudioTime instance?
是的!
如果您想处理 lastRenderTime 为 NULL 的情况,您还需要 #import <mach/mach_time.h>
并使用 mach_absolute_time
。
double bufferDuration = AKSettings.ioBufferDuration;
AVAudioTime *referenceTime = AudioKit.engine.outputNode.lastRenderTime ?: [[AVAudioTime alloc] initWithHostTime:mach_absolute_time()];
uint64_t startHostTime = referenceTime.hostTime + [AVAudioTime hostTimeForSeconds:bufferDuration];
AVAudioTime *startTime = [[AVAudioTime alloc] initWithHostTime:startHostTime];
for (AKPlayer *node in nodes) {
[node startAt:startTime];
}
Unfortunately an addition like referenceTime + bufferDuration with AVAudioTime variable is not possible in objective c, and a now() method also doesn't exist.
它们不存在,因为它不是 AVAudioTime 的一部分,它是 AudioKit 的扩展。
如果您查看他们的 source code,您会发现:
// An AVAudioTime with a valid hostTime representing now.
public static func now() -> AVAudioTime {
return AVAudioTime(hostTime: mach_absolute_time())
}
/// Returns an AVAudioTime offset by seconds.
open func offset(seconds: Double) -> AVAudioTime {
if isSampleTimeValid && isHostTimeValid {
return AVAudioTime(hostTime: hostTime + seconds / ticksToSeconds,
sampleTime: sampleTime + AVAudioFramePosition(seconds * sampleRate),
atRate: sampleRate)
} else if isHostTimeValid {
return AVAudioTime(hostTime: hostTime + seconds / ticksToSeconds)
} else if isSampleTimeValid {
return AVAudioTime(sampleTime: sampleTime + AVAudioFramePosition(seconds * sampleRate),
atRate: sampleRate)
}
return self
}
public func + (left: AVAudioTime, right: Double) -> AVAudioTime {
return left.offset(seconds: right)
}
您也可以自己实现这些扩展。我不认为你可以在 Objective C 中实现 + 运算符,所以你只需要使用偏移方法。
(注意:我没有检查以下内容)
double ticksToSeconds() {
struct mach_timebase_info tinfo;
kern_return_t err = mach_timebase_info(&tinfo);
double timecon = (double)(tinfo.numer) / (double)(tinfo.denom);
return timecon * 0.000000001;
}
@interface AVAudioTime (Extensions)
+ (AVAudioTime *)now;
- (AVAudioTime *)offsetWithSeconds:(double)seconds;
@end
@implementation AVAudioTime (Extensions)
+ (AVAudioTime *)now {
return [[AVAudioTime alloc] initWithHostTime:mach_absolute_time()];
}
- (AVAudioTime *)offsetWithSeconds:(double)seconds {
if ([self isSampleTimeValid] && [self isHostTimeValid]) {
return [[AVAudioTime alloc] initWithHostTime:self.hostTime + (seconds / ticksToSeconds())
sampleTime:self.sampleTime + (seconds * self.sampleRate)
atRate:self.sampleRate];
}
else if ([self isHostTimeValid]) {
return [[AVAudioTime alloc] initWithHostTime:self.hostTime + (seconds / ticksToSeconds())];
}
else if ([self isSampleTimeValid]) {
return [[AVAudioTime alloc] initWithSampleTime:self.sampleTime + (seconds * self.sampleRate)
atRate:self.sampleRate];
}
return self;
}
@end
我想在 iOS 应用程序中使用 Objective-C 在给定的短暂延迟后同步启动多个 AKPlayer。
我在 AudioKit 的源代码中找到了以下 swift 代码,文件 AKTiming.swift:
let bufferDuration = AKSettings.ioBufferDuration
let referenceTime = AudioKit.engine.outputNode.lastRenderTime ?? AVAudioTime.now()
let startTime = referenceTime + bufferDuration
for node in nodes {
node.start(at: startTime)
}
如何在 objective c 中使用 NSTimeInterval 参数中的给定缓冲持续时间执行类似操作。
不幸的是,在 objective c 中不可能添加像 referenceTime + bufferDuration 和 AVAudioTime 变量,并且 now() 方法也不存在。
Apples documentation of the AVAudioTime class 很短,对我帮助不大。
我可以使用静态方法 hostTimeForSeconds 将 NSTimeInterval 转换为 hostTime,然后使用 timeWithHostTime 创建 AVAudioTime 实例吗?
感谢您的帮助!
马蒂亚斯
May I use the static method hostTimeForSeconds to convert NSTimeInterval to a hostTime and then timeWithHostTime to create an AVAudioTime instance?
是的!
如果您想处理 lastRenderTime 为 NULL 的情况,您还需要 #import <mach/mach_time.h>
并使用 mach_absolute_time
。
double bufferDuration = AKSettings.ioBufferDuration;
AVAudioTime *referenceTime = AudioKit.engine.outputNode.lastRenderTime ?: [[AVAudioTime alloc] initWithHostTime:mach_absolute_time()];
uint64_t startHostTime = referenceTime.hostTime + [AVAudioTime hostTimeForSeconds:bufferDuration];
AVAudioTime *startTime = [[AVAudioTime alloc] initWithHostTime:startHostTime];
for (AKPlayer *node in nodes) {
[node startAt:startTime];
}
Unfortunately an addition like referenceTime + bufferDuration with AVAudioTime variable is not possible in objective c, and a now() method also doesn't exist.
它们不存在,因为它不是 AVAudioTime 的一部分,它是 AudioKit 的扩展。
如果您查看他们的 source code,您会发现:
// An AVAudioTime with a valid hostTime representing now.
public static func now() -> AVAudioTime {
return AVAudioTime(hostTime: mach_absolute_time())
}
/// Returns an AVAudioTime offset by seconds.
open func offset(seconds: Double) -> AVAudioTime {
if isSampleTimeValid && isHostTimeValid {
return AVAudioTime(hostTime: hostTime + seconds / ticksToSeconds,
sampleTime: sampleTime + AVAudioFramePosition(seconds * sampleRate),
atRate: sampleRate)
} else if isHostTimeValid {
return AVAudioTime(hostTime: hostTime + seconds / ticksToSeconds)
} else if isSampleTimeValid {
return AVAudioTime(sampleTime: sampleTime + AVAudioFramePosition(seconds * sampleRate),
atRate: sampleRate)
}
return self
}
public func + (left: AVAudioTime, right: Double) -> AVAudioTime {
return left.offset(seconds: right)
}
您也可以自己实现这些扩展。我不认为你可以在 Objective C 中实现 + 运算符,所以你只需要使用偏移方法。 (注意:我没有检查以下内容)
double ticksToSeconds() {
struct mach_timebase_info tinfo;
kern_return_t err = mach_timebase_info(&tinfo);
double timecon = (double)(tinfo.numer) / (double)(tinfo.denom);
return timecon * 0.000000001;
}
@interface AVAudioTime (Extensions)
+ (AVAudioTime *)now;
- (AVAudioTime *)offsetWithSeconds:(double)seconds;
@end
@implementation AVAudioTime (Extensions)
+ (AVAudioTime *)now {
return [[AVAudioTime alloc] initWithHostTime:mach_absolute_time()];
}
- (AVAudioTime *)offsetWithSeconds:(double)seconds {
if ([self isSampleTimeValid] && [self isHostTimeValid]) {
return [[AVAudioTime alloc] initWithHostTime:self.hostTime + (seconds / ticksToSeconds())
sampleTime:self.sampleTime + (seconds * self.sampleRate)
atRate:self.sampleRate];
}
else if ([self isHostTimeValid]) {
return [[AVAudioTime alloc] initWithHostTime:self.hostTime + (seconds / ticksToSeconds())];
}
else if ([self isSampleTimeValid]) {
return [[AVAudioTime alloc] initWithSampleTime:self.sampleTime + (seconds * self.sampleRate)
atRate:self.sampleRate];
}
return self;
}
@end