我正在尝试编写一个iOS应用程序,它将从麦克风接收的声音传递给扬声器而不做任何更改.我读过苹果文档和指南.我从这个
guide中选择了第一个模式.但是什么也没发生 – 沉默.正如你所看到的,我已经尝试使用AUAudioGraph(注释) – 结果相同(我在这个简单的例子中是否需要它?).
我在互联网上看到了几个使用回调的例子,但我不想使用任何回调.可能吗?
有什么建议?
谢谢你的关注.
实际的代码
#import "AudioController.h" #import <AudioToolBox/AudioToolBox.h> #import <AVFoundation/AVFoundation.h> #import <AudioToolBox/AudioServices.h> #define kInputBus 1 #define kOutputBus 0 @interface AudioController () { AudioComponentDescription desc; AudioComponent component; AudioUnit unit; AudioStreamBasicDescription audioFormat; double rate; //AUGraph graph; } @end @implementation AudioController - (void) setUp { AVAudioSession *sess = [AVAudioSession sharedInstance]; NSError *error = nil; rate = 44100.0; [sess setPreferredSampleRate:rate error:&error]; [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; [sess setActive:YES error:&error]; rate = [sess sampleRate]; if (error) { NSLog(@"%@",error); } NSLog(@"Init..."); [self createUnitDesc]; [self getComponent]; [self getAudioUnit]; [self enableIORec]; [self enableIOPb]; [self createFormat]; [self applyFormat]; OSStatus err = AudioUnitInitialize(unit); if (noErr != err) { [self showStatus:err]; } /*NewAUGraph(&graph); AUNode node; AUGraphAddNode(graph,&desc,&node); AUGraphInitialize(graph); AUGraphOpen(graph);*/ } - (void) createUnitDesc { desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple; } - (void) getComponent { component = AudioComponentFindNext(NULL,&desc); } - (void) getAudioUnit { OSStatus res = AudioComponentInstanceNew(component,&unit); if (noErr != res) { [self showStatus:res]; } } - (void) enableIORec { UInt32 flag = 1; OSStatus err = AudioUnitSetProperty(unit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Input,kInputBus,&flag,sizeof(flag)); if (noErr != err) { [self showStatus:err]; } } - (void) enableIOPb { UInt32 flag = 1; OSStatus err = AudioUnitSetProperty(unit,kAudioUnitScope_Output,kOutputBus,sizeof(flag)); if (noErr != err) { [self showStatus:err]; } } - (void) createFormat { // Describe format audioFormat.mSampleRate = rate;//44100.00; audioFormat.mFormatID = kAudioFormatLinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = 1; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = 2; audioFormat.mBytesPerFrame = 2; } - (void) applyFormat { OSStatus err = AudioUnitSetProperty(unit,kAudioUnitProperty_StreamFormat,&audioFormat,sizeof(audioFormat)); if (noErr != err) { [self showStatus:err]; } } - (void) start { NSLog(@"starting"); OSStatus err = AudioOutputUnitStart(unit); //AUGraphStart(graph); if (noErr != err) { [self showStatus:err]; } } - (void) end { NSLog(@"ending"); OSStatus err = AudioOutputUnitStop(unit); //AUGraphStop(graph); if (noErr != err) { [self showStatus:err]; } } - (void) showStatus:(OSStatus) st{ NSString *text = nil; switch (st) { case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; default: text = @"unknown error"; } NSLog(@"TRANSLATED_ERROR = %li = %@",st,text); } - (void) dealloc { AudioUnitUninitialize(unit); [super dealloc]; } @end
解决方法
正如warrenm所说,在Remote IO元素之间建立连接有所帮助.
所以在完成所有初始化后放置的代码:
所以在完成所有初始化后放置的代码:
AudioUnitConnection conn; conn.destInputNumber = kOutputBus; conn.sourceAudioUnit = unit; conn.sourceOutputNumber = kInputBus; err = AudioUnitSetProperty(unit,kAudioUnitProperty_MakeConnection,&conn,sizeof(conn)); if (noErr != err) { [self showStatus:err]; }
UPDATE
为了方便其他人使用该解决方案,我将在此处发布完整代码:
.h文件
#import <Foundation/Foundation.h> @interface AudioController : NSObject - (void)setUp; - (void)start; - (void)end; @end
.m文件
#import "AudioController.h" #import <AudioToolBox/AudioToolBox.h> #import <AVFoundation/AVFoundation.h> #import <AudioToolBox/AudioServices.h> #define kInputBus 1 #define kOutputBus 0 @interface AudioController () { AudioComponentDescription desc; AudioComponent component; AudioUnit unit; AudioStreamBasicDescription audioFormat; double rate; } @end @implementation AudioController - (void)setUp { AVAudioSession *sess = [AVAudioSession sharedInstance]; NSError *error = nil; rate = 44100.0; [sess setPreferredSampleRate:rate error:&error]; [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; [sess setActive:YES error:&error]; rate = [sess sampleRate]; if (error) { NSLog(@"%@",error); } NSLog(@"Initing"); [self createUnitDesc]; [self getComponent]; [self getAudioUnit]; [self enableIORec]; [self enableIOPb]; [self createFormat]; [self applyFormat]; OSStatus err = AudioUnitInitialize(unit); if (noErr != err) { [self showStatus:err]; } AudioUnitConnection conn; conn.destInputNumber = 0; conn.sourceAudioUnit = unit; conn.sourceOutputNumber = 1; err = AudioUnitSetProperty(unit,sizeof(conn)); if (noErr != err) { [self showStatus:err]; } } - (void)createUnitDesc { desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple; } - (void)getComponent { component = AudioComponentFindNext(NULL,&desc); } - (void)getAudioUnit { OSStatus res = AudioComponentInstanceNew(component,&unit); if (noErr != res) { [self showStatus:res]; } } - (void)enableIORec { UInt32 flag = 1; OSStatus err = AudioUnitSetProperty(unit,sizeof(flag)); if (noErr != err) { [self showStatus:err]; } } - (void)enableIOPb { UInt32 flag = 1; OSStatus err = AudioUnitSetProperty(unit,sizeof(flag)); if (noErr != err) { [self showStatus:err]; } } - (void)createFormat { // Describe format audioFormat.mSampleRate = rate; audioFormat.mFormatID = kAudioFormatLinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = 1; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = 2; audioFormat.mBytesPerFrame = 2; } - (void)applyFormat { OSStatus err = AudioUnitSetProperty(unit,sizeof(audioFormat)); if (noErr != err) { [self showStatus:err]; } } - (void)start { NSLog(@"starting"); OSStatus err = AudioOutputUnitStart(unit); if (noErr != err) { [self showStatus:err]; } } - (void)end { NSLog(@"ending"); OSStatus err = AudioOutputUnitStop(unit); if (noErr != err) { [self showStatus:err]; } } - (void)showStatus:(OSStatus)st { NSString *text = nil; switch (st) { case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; default: text = @"unknown error"; } NSLog(@"TRANSLATED_ERROR = %li = %@",text); } - (void)dealloc { AudioUnitUninitialize(unit); [super dealloc]; } @end