将 ExtAudioFile 放入浮点缓冲区会产生零
ExtAudioFile into a float buffer produces zeros
我在 Swift 3 中有这段代码,大多数时候我的输出是 0.0 零,我很少看到 e^-50 的非常小的数字
文件 URL 是 recording.caf 里面有声音。
有人知道怎么回事吗?
func readBuff(_ fileURL:CFURL) {
var fileRef:ExtAudioFileRef? = nil
let openStatus = ExtAudioFileOpenURL(fileURL , &fileRef)
guard openStatus == noErr else {
print("Failed to open audio file '\(fileURL)' with error \(openStatus)")
return
}
var audioFormat2 = AudioStreamBasicDescription()
audioFormat2.mSampleRate = 44100; // GIVE YOUR SAMPLING RATE
audioFormat2.mFormatID = kAudioFormatLinearPCM;
audioFormat2.mFormatFlags = kLinearPCMFormatFlagIsFloat;
audioFormat2.mBitsPerChannel = UInt32(MemoryLayout<Float32>.size) * 8
audioFormat2.mChannelsPerFrame = 1; // Mono
audioFormat2.mBytesPerFrame = audioFormat2.mChannelsPerFrame * UInt32(MemoryLayout<Float32>.size); // == sizeof(Float32)
audioFormat2.mFramesPerPacket = 1;
audioFormat2.mBytesPerPacket = audioFormat2.mFramesPerPacket * audioFormat2.mBytesPerFrame; // = sizeof(Float32)
//apply audioFormat2 to the extended audio file
ExtAudioFileSetProperty(fileRef!, kExtAudioFileProperty_ClientDataFormat,UInt32(MemoryLayout<AudioStreamBasicDescription>.size),&audioFormat2)
let numSamples = 1024 //How many samples to read in at a startTime
let sizePerPacket:UInt32 = audioFormat2.mBytesPerPacket // sizeof(Float32) = 32 byts
let packetsPerBuffer:UInt32 = UInt32(numSamples)
let outputBufferSize:UInt32 = packetsPerBuffer * sizePerPacket //4096
//so the 1 value of outputbuffer is a the memory location where we have reserved space
let outputbuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: MemoryLayout<UInt8>.size * Int(outputBufferSize))
var convertedData = AudioBufferList()
convertedData.mNumberBuffers = 1 //set this for Mono
convertedData.mBuffers.mNumberChannels = audioFormat2.mChannelsPerFrame // also = 1
convertedData.mBuffers.mDataByteSize = outputBufferSize
convertedData.mBuffers.mData = UnsafeMutableRawPointer(outputbuffer)
var frameCount:UInt32 = UInt32(numSamples)
while (frameCount > 0) {
Utility.check(ExtAudioFileRead(fileRef!,
&frameCount,
&convertedData),
operation: "Couldn't read from input file")
if frameCount == 0 {
Swift.print("done reading from file")
return
}
var arrayFloats:[Float] = []
let ptr = convertedData.mBuffers.mData?.assumingMemoryBound(to: Float.self)
var j = 0
var floatDataArray:[Double] = [882000]// SPECIFY YOUR DATA LIMIT MINE WAS 882000 , SHOULD BE EQUAL TO OR MORE THAN DATA LIMIT
if(frameCount > 0){
var audioBuffer:AudioBuffer = convertedData.mBuffers
let floatArr = UnsafeBufferPointer(start: audioBuffer.mData?.assumingMemoryBound(to: Float.self), count: 882000)
for i in 0...1024{
//floatDataArray[j] = Double(floatArr[i]) //put your data into float array
// print("\(floatDataArray[j])")
floatDataArray.append(Double(floatArr[i]))
print(Float((ptr?[i])!))
j += 1
}
// print(floatDataArray)
}
}
}
我正在阅读
guard let fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, "./output.caf" as CFString!, .cfurlposixPathStyle, false) else {
// unable to create file
exit(-1)
}
录制后的步骤:
Swift.print("Recording, press <return> to stop:\n")
// wait for a key to be pressed
getchar()
// end recording
Swift.print("* recording done *\n")
recorder.running = false
// stop the Queue
Utility.check(AudioQueueStop(queue!, true),
operation: "AudioQueueStop failed")
// a codec may update its magic cookie at the end of an encoding session
// so reapply it to the file now
Utility.applyEncoderCookie(fromQueue: queue!, toFile: recorder.recordFile!)
// cleanup
AudioQueueDispose(queue!, true)
AudioFileClose(recorder.recordFile!)
readBuff(fileURL)
您正在设置 ExtAudioFile
及其客户端格式,但您实际上并未从中读取(使用 ExtAudioFileRead
),因此您的 "output" 实际上未初始化,在你的情况下,非常小。
我在 Swift 3 中有这段代码,大多数时候我的输出是 0.0 零,我很少看到 e^-50 的非常小的数字
文件 URL 是 recording.caf 里面有声音。
有人知道怎么回事吗?
func readBuff(_ fileURL:CFURL) {
var fileRef:ExtAudioFileRef? = nil
let openStatus = ExtAudioFileOpenURL(fileURL , &fileRef)
guard openStatus == noErr else {
print("Failed to open audio file '\(fileURL)' with error \(openStatus)")
return
}
var audioFormat2 = AudioStreamBasicDescription()
audioFormat2.mSampleRate = 44100; // GIVE YOUR SAMPLING RATE
audioFormat2.mFormatID = kAudioFormatLinearPCM;
audioFormat2.mFormatFlags = kLinearPCMFormatFlagIsFloat;
audioFormat2.mBitsPerChannel = UInt32(MemoryLayout<Float32>.size) * 8
audioFormat2.mChannelsPerFrame = 1; // Mono
audioFormat2.mBytesPerFrame = audioFormat2.mChannelsPerFrame * UInt32(MemoryLayout<Float32>.size); // == sizeof(Float32)
audioFormat2.mFramesPerPacket = 1;
audioFormat2.mBytesPerPacket = audioFormat2.mFramesPerPacket * audioFormat2.mBytesPerFrame; // = sizeof(Float32)
//apply audioFormat2 to the extended audio file
ExtAudioFileSetProperty(fileRef!, kExtAudioFileProperty_ClientDataFormat,UInt32(MemoryLayout<AudioStreamBasicDescription>.size),&audioFormat2)
let numSamples = 1024 //How many samples to read in at a startTime
let sizePerPacket:UInt32 = audioFormat2.mBytesPerPacket // sizeof(Float32) = 32 byts
let packetsPerBuffer:UInt32 = UInt32(numSamples)
let outputBufferSize:UInt32 = packetsPerBuffer * sizePerPacket //4096
//so the 1 value of outputbuffer is a the memory location where we have reserved space
let outputbuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: MemoryLayout<UInt8>.size * Int(outputBufferSize))
var convertedData = AudioBufferList()
convertedData.mNumberBuffers = 1 //set this for Mono
convertedData.mBuffers.mNumberChannels = audioFormat2.mChannelsPerFrame // also = 1
convertedData.mBuffers.mDataByteSize = outputBufferSize
convertedData.mBuffers.mData = UnsafeMutableRawPointer(outputbuffer)
var frameCount:UInt32 = UInt32(numSamples)
while (frameCount > 0) {
Utility.check(ExtAudioFileRead(fileRef!,
&frameCount,
&convertedData),
operation: "Couldn't read from input file")
if frameCount == 0 {
Swift.print("done reading from file")
return
}
var arrayFloats:[Float] = []
let ptr = convertedData.mBuffers.mData?.assumingMemoryBound(to: Float.self)
var j = 0
var floatDataArray:[Double] = [882000]// SPECIFY YOUR DATA LIMIT MINE WAS 882000 , SHOULD BE EQUAL TO OR MORE THAN DATA LIMIT
if(frameCount > 0){
var audioBuffer:AudioBuffer = convertedData.mBuffers
let floatArr = UnsafeBufferPointer(start: audioBuffer.mData?.assumingMemoryBound(to: Float.self), count: 882000)
for i in 0...1024{
//floatDataArray[j] = Double(floatArr[i]) //put your data into float array
// print("\(floatDataArray[j])")
floatDataArray.append(Double(floatArr[i]))
print(Float((ptr?[i])!))
j += 1
}
// print(floatDataArray)
}
}
}
我正在阅读
guard let fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, "./output.caf" as CFString!, .cfurlposixPathStyle, false) else {
// unable to create file
exit(-1)
}
录制后的步骤:
Swift.print("Recording, press <return> to stop:\n")
// wait for a key to be pressed
getchar()
// end recording
Swift.print("* recording done *\n")
recorder.running = false
// stop the Queue
Utility.check(AudioQueueStop(queue!, true),
operation: "AudioQueueStop failed")
// a codec may update its magic cookie at the end of an encoding session
// so reapply it to the file now
Utility.applyEncoderCookie(fromQueue: queue!, toFile: recorder.recordFile!)
// cleanup
AudioQueueDispose(queue!, true)
AudioFileClose(recorder.recordFile!)
readBuff(fileURL)
您正在设置 ExtAudioFile
及其客户端格式,但您实际上并未从中读取(使用 ExtAudioFileRead
),因此您的 "output" 实际上未初始化,在你的情况下,非常小。