初始化 AVAudio 会话 模式 测量
Initialising AVAudioSessionModeMeasurement
嘿,我在这里肯定超出了我的深度,但不幸的是,由于讲师向我提供了该项目,现在回头已经太晚了。
我试图通过在我的项目中使用 AVAudio Session Mode Measurement 来禁用系统提供的信号处理应用于我的输入。
但是,我正在努力寻找有关执行此操作的任何资源。
我想要的结果是,通过启用此功能,我将能够在我的应用程序中获得更准确的读数。
代码如下:
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
#define kOutputBus 0
#define kInputBus 1
@interface ViewController ()
@property (nonatomic, weak) IBOutlet UILabel *dBSPLView2;
@end
@implementation ViewController
static AudioComponentInstance audioUnit;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status = AudioComponentInstanceNew(comp, &audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 96000.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
flag = 0;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitSetProperty(audioUnit , kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void*)self;
status = AudioUnitSetProperty(audioUnit , kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
static OSStatus recordingCallback(
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
) {
AudioBuffer buffer;
buffer.mNumberChannels = 1;
buffer.mDataByteSize = inNumberFrames * sizeof(SInt16);
buffer.mData = malloc(buffer.mDataByteSize);
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
OSStatus status = AudioUnitRender(audioUnit , ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
if (status != noErr) {
printf("Error\n");
return -1;
}
SInt16 *frameBuffer = buffer.mData;
double totalAmplitude = 0;
for (int i = 0; i < inNumberFrames; i++) {
// printf("%i\n",frameBuffer[i]);
totalAmplitude += frameBuffer[i] * frameBuffer[i];
}
totalAmplitude /= inNumberFrames;
totalAmplitude = sqrt(totalAmplitude);
//Creates a negative number that goes no higher than zero
//float SPLFloat = totalAmplitude / (float)SHRT_MAX * 2;
float dBFloat = (20 * log10(totalAmplitude)) + 11;
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.dBSPLView2.text = [NSString stringWithFormat:@"%.f", dBFloat];
});
return noErr;
}
- (IBAction)recordButtonPressed:(id)sender {
NSError *error;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
if (granted) {
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
} else {
NSAssert(NO, @"Error");
}
}];
}
- (IBAction)stopButtonPressed:(id)sender {
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
NSError *error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) dealloc {
OSStatus status = AudioComponentInstanceDispose(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
@end
为会话配置类别后:
[[AVAudioSession sharedInstance] setMode:AVAudioSessionModeMeasurement error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
此外,您的错误处理没有遵循既定模式。您应该检查 setMode:error:
的 return 值。 error
in/out 参数仅在方法的 return 值为 NO
时保证有效。 (实际上,在大多数情况下,检查错误是否为 nil 可能工作正常,但没有记录以这种方式工作 - 所以你不应该依赖它。)
嘿,我在这里肯定超出了我的深度,但不幸的是,由于讲师向我提供了该项目,现在回头已经太晚了。
我试图通过在我的项目中使用 AVAudio Session Mode Measurement 来禁用系统提供的信号处理应用于我的输入。
但是,我正在努力寻找有关执行此操作的任何资源。
我想要的结果是,通过启用此功能,我将能够在我的应用程序中获得更准确的读数。
代码如下:
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
#define kOutputBus 0
#define kInputBus 1
@interface ViewController ()
@property (nonatomic, weak) IBOutlet UILabel *dBSPLView2;
@end
@implementation ViewController
static AudioComponentInstance audioUnit;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status = AudioComponentInstanceNew(comp, &audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 96000.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
flag = 0;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitSetProperty(audioUnit , kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void*)self;
status = AudioUnitSetProperty(audioUnit , kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
static OSStatus recordingCallback(
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
) {
AudioBuffer buffer;
buffer.mNumberChannels = 1;
buffer.mDataByteSize = inNumberFrames * sizeof(SInt16);
buffer.mData = malloc(buffer.mDataByteSize);
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
OSStatus status = AudioUnitRender(audioUnit , ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
if (status != noErr) {
printf("Error\n");
return -1;
}
SInt16 *frameBuffer = buffer.mData;
double totalAmplitude = 0;
for (int i = 0; i < inNumberFrames; i++) {
// printf("%i\n",frameBuffer[i]);
totalAmplitude += frameBuffer[i] * frameBuffer[i];
}
totalAmplitude /= inNumberFrames;
totalAmplitude = sqrt(totalAmplitude);
//Creates a negative number that goes no higher than zero
//float SPLFloat = totalAmplitude / (float)SHRT_MAX * 2;
float dBFloat = (20 * log10(totalAmplitude)) + 11;
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.dBSPLView2.text = [NSString stringWithFormat:@"%.f", dBFloat];
});
return noErr;
}
- (IBAction)recordButtonPressed:(id)sender {
NSError *error;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
if (granted) {
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
} else {
NSAssert(NO, @"Error");
}
}];
}
- (IBAction)stopButtonPressed:(id)sender {
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
NSError *error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) dealloc {
OSStatus status = AudioComponentInstanceDispose(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
@end
为会话配置类别后:
[[AVAudioSession sharedInstance] setMode:AVAudioSessionModeMeasurement error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
此外,您的错误处理没有遵循既定模式。您应该检查 setMode:error:
的 return 值。 error
in/out 参数仅在方法的 return 值为 NO
时保证有效。 (实际上,在大多数情况下,检查错误是否为 nil 可能工作正常,但没有记录以这种方式工作 - 所以你不应该依赖它。)