我正在尝试使用我的RØDEiXYUSB麦克风以96kHz的速度录制.
录音没有错误,当我启动连接麦克风的应用程序时,我看到AVAudioSession以96kHz的采样率成功运行.
但是,如果我看一下光谱,很明显除了20kHz以上的重采样噪声之外什么都没有:
录音没有错误,当我启动连接麦克风的应用程序时,我看到AVAudioSession以96kHz的采样率成功运行.
但是,如果我看一下光谱,很明显除了20kHz以上的重采样噪声之外什么都没有:
为了进行比较,这是使用与USB麦克风捆绑的应用程序(RØDERec)进行相同录制的频谱:
还有什么我必须做的,以原生96kHz录制?
或者也许RØDERec应用程序通过USB通过一些专有协议与麦克风通信,我在这里运气不好?
我包含了我使用的源代码:
static AudioStreamBasicDescription AudioDescription24BitStereo96000 = (AudioStreamBasicDescription) { .mFormatID = kAudioFormatLinearPCM,.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger,.mChannelsPerFrame = 2,.mBytesPerPacket = 6,.mFramesPerPacket = 1,.mBytesPerFrame = 6,.mBitsPerChannel = 24,.mSampleRate = 96000.0 }; - (void)setupAudioSession { AVAudioSession *session = [AVAudioSession sharedInstance]; [session setCategory:AVAudioSessionCategoryRecord error:&error]; [session setActive:YES error:&error]; [session setPreferredSampleRate:96000.0f error:&error]; //I got my 96000Hz with the USB mic plugged in! NSLog(@"sampleRate = %lf",session.sampleRate); } - (void)startRecording { AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple; AudioComponent inputComponent = AudioComponentFindNext(NULL,&desc); AudioComponentInstanceNew(inputComponent,&audioUnit); AudioUnitScope inputBus = 1; UInt32 flag = 1; AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Input,inputBus,&flag,sizeof(flag)); audioDescription = AudioDescription24BitStereo96000; AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Output,&audioDescription,sizeof(audioDescription)); AURenderCallbackStruct callbackStruct; callbackStruct.inputProc = recordingCallback; callbackStruct.inputProcRefCon = (__bridge void *)(self); AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_SetInputCallback,kAudioUnitScope_Global,&callbackStruct,sizeof(callbackStruct)); AudioOutputUnitStart(audioUnit); } static OSStatus recordingCallback(void *inRefCon,AudioUnitRenderActionFlags *ioActionFlags,const AudioTimeStamp *inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,AudioBufferList *ioData) { AudioBuffer audioBuffer; audioBuffer.mNumberChannels = 1; audioBuffer.mDataByteSize = inNumberFrames * audioDescription.mBytesPerFrame; audioBuffer.mData = malloc( inNumberFrames * audioDescription.mBytesPerFrame ); // Put buffer in a AudioBufferList AudioBufferList bufferList; bufferList.mNumberBuffers = 1; bufferList.mBuffers[0] = audioBuffer; AudioUnitRender(audioUnit,ioActionFlags,inTimeStamp,inBusNumber,inNumberFrames,&bufferList); //I then take the samples and write them to WAV file }