以数值显示 SPL 级别而不是改变颜色强度
Display SPL level in numeric value rather than varying colour intensity
我正在尝试为 iOS 中的一个 uni 项目开发 SPL 表,我一直在关注如何获取音频输入和显示输入电平的在线教程,但是本教程将输入显示为不同强度的颜色以及 RMS 值,而不是峰值。我发现之前有两篇与此相关的帖子,但是它们提供的信息不足以提供帮助,而且作为堆栈溢出的新手,我无法就所提供的答案寻求进一步的支持。
Previous Post 2
我一直关注的项目教程是 Objective-C 中的 运行 而不是 Swift,因为它是一个旧视频集,但它(几乎)满足了我的需要所以没有尝试更改为 swift,直到我了解自己在做什么以及为什么这样做。
这里是录音和输入电平监控的完整代码:
//Lynda.Com Tutorial Project
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
#define kOutputBus 0
#define kInputBus 1
@interface ViewController ()
@property (nonatomic,weak) IBOutlet UIView *indicatorView;
@end
@implementation ViewController
static AudioComponentInstance audioUnit;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status = AudioComponentInstanceNew(comp, &audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
flag = 0;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitSetProperty(audioUnit , kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void*)self;
status = AudioUnitSetProperty(audioUnit , kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
static OSStatus recordingCallback(
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
) {
AudioBuffer buffer;
buffer.mNumberChannels = 1;
buffer.mDataByteSize = inNumberFrames * sizeof(SInt16);
buffer.mData = malloc(buffer.mDataByteSize);
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
OSStatus status = AudioUnitRender(audioUnit , ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
if (status != noErr) {
printf("Error\n");
return -1;
}
SInt16 *frameBuffer = buffer.mData;
double totalAmplitude = 0;
for (int i = 0; i < inNumberFrames; i++) {
//printf("%i\n",frameBuffer[i]);
totalAmplitude += frameBuffer[i] * frameBuffer[i];
}
totalAmplitude /= inNumberFrames;
totalAmplitude = sqrt(totalAmplitude);
float alphaFloat = totalAmplitude / (float)SHRT_MAX * 2;
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.indicatorView.backgroundColor = [UIColor colorWithRed:1.0 green:0 blue:0 alpha:alphaFloat];
});
return noErr;
}
- (IBAction)recordButtonPressed:(id)sender {
NSError *error;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
if (granted) {
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
} else {
NSAssert(NO, @"Error");
}
}];
}
- (IBAction)stopButtonPressed:(id)sender {
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
NSError *error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
self.indicatorView.backgroundColor = [UIColor clearColor];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) dealloc {
OSStatus status = AudioComponentInstanceDispose(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
@end
我需要帮助理解的是,因为我相信这里的这段代码:
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.indicatorView.backgroundColor = [UIColor colorWithRed:1.0 green:0 blue:0 alpha:alphaFloat];
这是告诉 UIView 显示不同强度的红色的地方,我想要的是将其更改为不同的数字。
RMS 到 dBSPL 的值不是问题,它在 运行 应用程序中的红色框位置显示为连续变化的值,我真的可以在一些帮助下完成,所有提前非常感谢您的帮助!
谢谢,
克里斯
您要显示的值在变量 alphaFloat 中。您需要将 UILabel 属性 添加到您的 ViewController,您可以将其称为 dBSPLView。然后在 viewController.indicatorView.backgroundColor 被改变的那个地方你会做这样的事情:
viewController.dBSPLView.text = [NSString stringWithFormat:@"%f", alphaFloat];
我正在尝试为 iOS 中的一个 uni 项目开发 SPL 表,我一直在关注如何获取音频输入和显示输入电平的在线教程,但是本教程将输入显示为不同强度的颜色以及 RMS 值,而不是峰值。我发现之前有两篇与此相关的帖子,但是它们提供的信息不足以提供帮助,而且作为堆栈溢出的新手,我无法就所提供的答案寻求进一步的支持。
Previous Post 2
我一直关注的项目教程是 Objective-C 中的 运行 而不是 Swift,因为它是一个旧视频集,但它(几乎)满足了我的需要所以没有尝试更改为 swift,直到我了解自己在做什么以及为什么这样做。
这里是录音和输入电平监控的完整代码:
//Lynda.Com Tutorial Project
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
#define kOutputBus 0
#define kInputBus 1
@interface ViewController ()
@property (nonatomic,weak) IBOutlet UIView *indicatorView;
@end
@implementation ViewController
static AudioComponentInstance audioUnit;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status = AudioComponentInstanceNew(comp, &audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
flag = 0;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitSetProperty(audioUnit , kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void*)self;
status = AudioUnitSetProperty(audioUnit , kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
static OSStatus recordingCallback(
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
) {
AudioBuffer buffer;
buffer.mNumberChannels = 1;
buffer.mDataByteSize = inNumberFrames * sizeof(SInt16);
buffer.mData = malloc(buffer.mDataByteSize);
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
OSStatus status = AudioUnitRender(audioUnit , ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
if (status != noErr) {
printf("Error\n");
return -1;
}
SInt16 *frameBuffer = buffer.mData;
double totalAmplitude = 0;
for (int i = 0; i < inNumberFrames; i++) {
//printf("%i\n",frameBuffer[i]);
totalAmplitude += frameBuffer[i] * frameBuffer[i];
}
totalAmplitude /= inNumberFrames;
totalAmplitude = sqrt(totalAmplitude);
float alphaFloat = totalAmplitude / (float)SHRT_MAX * 2;
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.indicatorView.backgroundColor = [UIColor colorWithRed:1.0 green:0 blue:0 alpha:alphaFloat];
});
return noErr;
}
- (IBAction)recordButtonPressed:(id)sender {
NSError *error;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
if (granted) {
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
} else {
NSAssert(NO, @"Error");
}
}];
}
- (IBAction)stopButtonPressed:(id)sender {
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
NSError *error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
self.indicatorView.backgroundColor = [UIColor clearColor];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) dealloc {
OSStatus status = AudioComponentInstanceDispose(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
@end
我需要帮助理解的是,因为我相信这里的这段代码:
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.indicatorView.backgroundColor = [UIColor colorWithRed:1.0 green:0 blue:0 alpha:alphaFloat];
这是告诉 UIView 显示不同强度的红色的地方,我想要的是将其更改为不同的数字。
RMS 到 dBSPL 的值不是问题,它在 运行 应用程序中的红色框位置显示为连续变化的值,我真的可以在一些帮助下完成,所有提前非常感谢您的帮助!
谢谢,
克里斯
您要显示的值在变量 alphaFloat 中。您需要将 UILabel 属性 添加到您的 ViewController,您可以将其称为 dBSPLView。然后在 viewController.indicatorView.backgroundColor 被改变的那个地方你会做这样的事情:
viewController.dBSPLView.text = [NSString stringWithFormat:@"%f", alphaFloat];