我正在Objective C中创建一个音乐应用程序.我需要同时播放多个声音,我可以使用DiracLE音频播放器来实现这一点.
但是这个库的音量控制方法 – (void)setVolume:(float)volume;似乎没有工作.
@H_502_6@DiracAudioPlayer *player1,*player2; NSURL *url1,*url2; NSError *error = nil; url1 = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"song1" ofType:@"mp3"]]; url2 = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"song2" ofType:@"mp3"]]; player1 = [[DiracAudioPlayer alloc] initWithContentsOfURL:mainURL channels:1 error:&error]; [player1 setDelegate:self]; player2 = [[DiracAudioPlayer alloc] initWithContentsOfURL:mainURL channels:1 error:&error]; [player2 setDelegate:self];设置播放器音量的方法:
@H_502_6@-(void)setPlayerVolume { [player1 setVolume:0.5]; [player2 setVolume:0.2]; }setVolume:即使对于单个播放器也不起作用,也没有抛出异常.
如何解决这个问题?
解决方法
您还可以通过依次初始化2个AVAudioPlayers并使用[self.player setVolume:volumeFloat]来控制音量来实现此目的.
@H_502_6@...
NSString *songA = [[NSBundle mainBundle] pathForResource:@"songA" ofType:@"mp3"];
NSError *soundError = nil;
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:songA] error:&soundError];
if(self.player == nil)
NSLog(@"%@",soundError);
else
{
[self.player setDelegate:self];
[self.player setVolume:0.75];
[self.player play];
}
NSString *songB = [[NSBundle mainBundle] pathForResource:@"songB" ofType:@"mp3"];
soundError = nil;
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:songB] error:&soundError];
if(self.player == nil)
NSLog(@"%@",soundError);
else
{
[self.player setDelegate:self];
[self.player setVolume:0.25];
[self.player play];
}
...
如果您决定使用DiracAudioPlayer,则可以使用自己的方法使用DiracAudioPlayerBase.mm中的正确卷参数重新初始化AudioUnit:
@H_502_6@-(void)setupInstanceWithUrl:(NSURL*)inUrl numChannels:(int)channels volume:(float)volume { mDelegate = nil; mInUrl = [inUrl copy]; mIsPrepared = NO; mIsProcessing = NO; mWorkerThread = nil; mTotalFramesInFile = 0; mIsRunning = NO; mVolume = volume; mLoopCount = mNumberOfLoops = 0; mHasFinishedPlaying = YES; if (channels < 1) channels = 1; else if (channels > 2) channels = 2; mNumChannels = channels; mPeak = new SInt16[mNumChannels]; mPeakOut = new SInt16[mNumChannels]; for (long v = 0; v < mNumChannels; v++) { mPeakOut[v] = 0; mPeak[v] = -1; } OSStatus status = noErr; mTimeFactor = 1./kOversample; mPitchFactor = kOversample; // This is boilerplate code to set up CoreAudio on iOS in order to play audio via its default output // Desired audio component AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; #if TARGET_OS_IPHONE desc.componentSubType = kAudioUnitSubType_RemoteIO; #else desc.componentSubType = kAudioUnitSubType_HALOutput; #endif desc.componentManufacturer = kAudioUnitManufacturer_Apple; desc.componentFlags = 0; desc.componentFlagsMask = 0; // Get ref to component AudioComponent defaultOutput = AudioComponentFindNext(NULL,&desc); // Get matching audio unit status = AudioComponentInstanceNew(defaultOutput,&mAudioUnit); checkStatus(status); // this is the format we want AudioStreamBasicDescription audioFormat; mSampleRate=audioFormat.mSampleRate = 44100.00; audioFormat.mFormatID = kAudioFormatLinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = mNumChannels; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = sizeof(short)*mNumChannels; audioFormat.mBytesPerFrame = sizeof(short)*mNumChannels; status = AudioUnitSetProperty(mAudioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,kOutputBus,&audioFormat,sizeof(audioFormat)); checkStatus(status); // here we set up CoreAudio in order to call our PlaybackCallback AURenderCallbackStruct callbackStruct; callbackStruct.inputProc = PlaybackCallback; callbackStruct.inputProcRefCon = (__bridge void*) self; status = AudioUnitSetProperty(mAudioUnit,kAudioUnitProperty_SetRenderCallback,&callbackStruct,sizeof(callbackStruct)); checkStatus(status); // Initialize unit status = AudioUnitInitialize(mAudioUnit); checkStatus(status); // here we allocate our audio cache mAudioBuffer = AllocateAudioBufferSInt16(mNumChannels,kAudioBufferNumFrames); // Avoid delay when hitting play by making sure the graph is pre-initialized AudioOutputUnitStart(mAudioUnit); AudioOutputUnitStop(mAudioUnit); [self prepareToPlay]; }仔细检查您的卷是否通过记录设置:
@H_502_6@NSLog(@"Volume P1: %f",[player1 volume]); NSLog(@"Volume P2: %f",[player2 volume]);此外,您还可以通过硬件音量按钮使用的相同方法来控制混音的输出音量:(这将启动iOS音量更改的UI)
@H_502_6@-(void)addVolumeObserver { MPVolumeView *volumeView = [MPVolumeView new]; volumeView.showsRouteButton = NO; volumeView.showsVolumeSlider = NO; AppDelegate *appDelegate = (AppDelegate *) [[UIApplication sharedApplication] delegate]; [appDelegate.window.rootViewController.view addSubview:volumeView]; __weak __typeof(self)weakSelf = self; [[volumeView subviews] enumerateObjectsUsingBlock:^(id obj,NSUInteger idx,BOOL *stop) { if ([obj isKindOfClass:[UiSlider class]]) { __strong __typeof(weakSelf)strongSelf = weakSelf; strongSelf->volumeSlider = obj; [obj addTarget:strongSelf action:@selector(handleVolumeChanged:) forControlEvents:UIControlEventValueChanged]; *stop = YES; } }]; } - (void)handleVolumeChanged:(id)sender { NSLog(@"Volume: %f",volumeSlider.value); } - (void)setVolumeHandlerTo:(float)volume { volumeSlider.value = volume; }