123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365 |
- //
- // QRDMicrophoneSource.m
- // QNRTCKit
- //
- // Created by lawder on 16/5/20.
- // Copyright © 2018年 Pili Engineering, Qiniu Inc. All rights reserved.
- //
- #import "QRDMicrophoneSource.h"
- #import <AudioToolbox/AudioToolbox.h>
- #import <AVFoundation/AVFoundation.h>
- const NSInteger kQNAudioCaptureSampleRate = 48000;
- @interface QRDMicrophoneSource ()
- @property (nonatomic, assign) AudioComponentInstance componetInstance;
- @property (nonatomic, assign) AudioComponent component;
- @property (nonatomic, strong) dispatch_queue_t taskQueue;
- @property (nonatomic, assign) BOOL isRunning;
- @property (nonatomic, assign) AudioStreamBasicDescription asbd;
- @end
- @implementation QRDMicrophoneSource
- - (instancetype)init {
- if(self = [super init]) {
- self.isRunning = NO;
- self.taskQueue = dispatch_queue_create("com.qiniu.rtc.audiocapture", NULL);
- _allowAudioMixWithOthers = YES;
- [self setupASBD];
- [self setupAudioComponent];
- [self setupAudioSession];
- [self addObservers];
- }
- return self;
- }
- - (void)setupASBD {
- _asbd.mSampleRate = kQNAudioCaptureSampleRate;
- _asbd.mFormatID = kAudioFormatLinearPCM;
- _asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
- _asbd.mChannelsPerFrame = 1;
- _asbd.mFramesPerPacket = 1;
- _asbd.mBitsPerChannel = 16;
- _asbd.mBytesPerFrame = _asbd.mBitsPerChannel / 8 * _asbd.mChannelsPerFrame;
- _asbd.mBytesPerPacket = _asbd.mBytesPerFrame * _asbd.mFramesPerPacket;
- }
- -(AudioStreamBasicDescription)getASBD{
-
- if (!_asbd.mBitsPerChannel) {
- [self setupASBD];
- }
-
- return _asbd;
- }
- - (void)setupAudioComponent {
- AudioComponentDescription acd;
- acd.componentType = kAudioUnitType_Output;
- //acd.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
- acd.componentSubType = kAudioUnitSubType_RemoteIO;
- acd.componentManufacturer = kAudioUnitManufacturer_Apple;
- acd.componentFlags = 0;
- acd.componentFlagsMask = 0;
- self.component = AudioComponentFindNext(NULL, &acd);
- OSStatus status = noErr;
- status = AudioComponentInstanceNew(self.component, &_componetInstance);
- if (noErr != status) {
- //NSLog(@"AudioComponentInstanceNew error, status: %d", status);
- return;
- }
- UInt32 flagOne = 1;
- AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne));
- AURenderCallbackStruct cb;
- cb.inputProcRefCon = (__bridge void *)(self);
- cb.inputProc = handleInputBuffer;
- AudioUnitSetProperty(self.componetInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &_asbd, sizeof(_asbd));
- AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb));
- status = AudioUnitInitialize(self.componetInstance);
- if (noErr != status) {
- //NSLog(@"AudioUnitInitialize error, status: %d", status);
- return;
- }
- }
- - (void)setupAudioSession {
- NSError *sessionError = nil;
- AVAudioSession *session = [AVAudioSession sharedInstance];
- // AVAudioSession *session = [AVAudioSession sharedInstance];
- if (![self resetAudioSessionCategorySettings]) {
- return;
- }
- [session setMode:AVAudioSessionModeVideoRecording error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set session mode error : %@", sessionError.code, sessionError.localizedDescription);
- return;
- }
- [session setActive:YES error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set session active error : %@", sessionError.code, sessionError.localizedDescription);
- return;
- }
- [session setPreferredSampleRate:kQNAudioCaptureSampleRate error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, setPreferredSampleRate error : %@", sessionError.code, sessionError.localizedDescription);
- return;
- }
- [session setPreferredIOBufferDuration:1024.0 / kQNAudioCaptureSampleRate error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, setPreferredIOBufferDuration error : %@", sessionError.code, sessionError.localizedDescription);
- return;
- }
- // use bottom microphone for capture by default
- if (AVAudioSessionOrientationBottom != [session inputDataSource].orientation) {
- for (AVAudioSessionDataSourceDescription *dataSource in [session inputDataSources]) {
- if (AVAudioSessionOrientationBottom == dataSource.orientation) {
- [session setInputDataSource:dataSource error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set input data source error : %@", sessionError.code, sessionError.localizedDescription);
- }
- }
- }
- }
- return;
- }
- - (void)addObservers {
- [[NSNotificationCenter defaultCenter] addObserver:self
- selector:@selector(handleRouteChange:)
- name: AVAudioSessionRouteChangeNotification
- object:nil];
- [[NSNotificationCenter defaultCenter] addObserver:self
- selector:@selector(handleInterruption:)
- name: AVAudioSessionInterruptionNotification
- object:nil];
- }
- - (BOOL)resetAudioSessionCategorySettings {
- NSError *sessionError = nil;
- AVAudioSession *session = [AVAudioSession sharedInstance];
- AVAudioSessionCategoryOptions options = AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth;
- if (self.allowAudioMixWithOthers) {
- options = AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth;
- }
- [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:options error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set session category error : %@", sessionError.code, sessionError.localizedDescription);
- return NO;
- }
- return YES;
- }
- - (void)clear {
- [[NSNotificationCenter defaultCenter] removeObserver:self];
- dispatch_sync(self.taskQueue, ^{
- if (self.componetInstance) {
- self.isRunning = NO;
- AudioOutputUnitStop(self.componetInstance);
- AudioComponentInstanceDispose(self.componetInstance);
- self.componetInstance = nil;
- self.component = nil;
- }
- });
- }
- - (void)startRunning {
- //NSLog(@"call startRunning");
- if (self.isRunning) {
- return;
- }
- dispatch_async(self.taskQueue, ^{
- if ([self resetAudioSession]) {
- AudioOutputUnitStart(self.componetInstance);
- //NSLog(@"AudioOutputUnitStart");
- self.isRunning = YES;
- }
- });
- }
- - (void)stopRunning {
- //NSLog(@"call stopRunning");
- if (!self.isRunning) {
- return;
- }
- dispatch_sync(self.taskQueue, ^{
- AudioOutputUnitStop(self.componetInstance);
- //NSLog(@"AudioOutputUnitStop");
- self.isRunning = NO;
- });
- }
- #pragma mark - NSNotification
- - (void)handleRouteChange:(NSNotification *)notification {
- NSString* seccReason = @"";
- NSInteger reason = [[[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
- switch (reason) {
- case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
- [self resetAudioSession];
- seccReason = @"The route changed because no suitable route is now available for the specified category.";
- break;
- case AVAudioSessionRouteChangeReasonWakeFromSleep:
- [self resetAudioSession];
- seccReason = @"The route changed when the device woke up from sleep.";
- break;
- case AVAudioSessionRouteChangeReasonOverride:
- [self resetAudioSession];
- seccReason = @"The output route was overridden by the app.";
- break;
- case AVAudioSessionRouteChangeReasonCategoryChange:
- seccReason = @"The category of the session object changed.";
- break;
- case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
- [self resetAudioSession];
- seccReason = @"The previous audio output path is no longer available.";
- break;
- case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
- [self resetAudioSession];
- seccReason = @"A preferred new audio output path is now available.";
- break;
- case AVAudioSessionRouteChangeReasonUnknown:
- default:
- seccReason = @"The reason for the change is unknown.";
- break;
- }
- //NSLog(@"handleRouteChange: %@ reason %@",[notification name], seccReason);
- }
- - (BOOL)resetAudioSession {
- NSError *sessionError = nil;
- AVAudioSession *session = [AVAudioSession sharedInstance];
- [session setActive:YES error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set session active error : %@", sessionError.code, sessionError.localizedDescription);
- return NO;
- }
- // use bottom microphone for capture by default
- if (AVAudioSessionOrientationBottom != [session inputDataSource].orientation) {
- for (AVAudioSessionDataSourceDescription *dataSource in [session inputDataSources]) {
- if (AVAudioSessionOrientationBottom == dataSource.orientation) {
- [session setInputDataSource:dataSource error:&sessionError];
- if (sessionError) {
- //NSLog(@"error:%ld, set input data source error : %@", sessionError.code, sessionError.localizedDescription);
- }
- }
- }
- }
- return YES;
- }
- - (void)handleInterruption:(NSNotification *)notification {
- NSString *reasonString;
- if ([notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
- NSInteger reason = [[[notification userInfo] objectForKey:AVAudioSessionInterruptionTypeKey] integerValue];
- if (reason == AVAudioSessionInterruptionTypeBegan) {
- reasonString = @"AVAudioSessionInterruptionTypeBegan";
- if (self.isRunning) {
- dispatch_sync(self.taskQueue, ^{
- //NSLog(@"stopRunning");
- AudioOutputUnitStop(self.componetInstance);
- });
- }
- }
- if (reason == AVAudioSessionInterruptionTypeEnded) {
- reasonString = @"AVAudioSessionInterruptionTypeEnded";
- NSNumber *seccondReason = [[notification userInfo] objectForKey:AVAudioSessionInterruptionOptionKey];
- switch ([seccondReason integerValue]) {
- case AVAudioSessionInterruptionOptionShouldResume:
- if (self.isRunning) {
- dispatch_async(self.taskQueue, ^{
- //NSLog(@"startRunning");
- AudioOutputUnitStart(self.componetInstance);
- });
- }
- break;
- default:
- break;
- }
- }
- }
- //NSLog(@"handleInterruption: %@ reason %@", [notification name], reasonString);
- }
- #pragma mark - CallBack
- static OSStatus handleInputBuffer(void *inRefCon,
- AudioUnitRenderActionFlags *ioActionFlags,
- const AudioTimeStamp *inTimeStamp,
- UInt32 inBusNumber,
- UInt32 inNumberFrames,
- AudioBufferList *ioData) {
- @autoreleasepool {
- QRDMicrophoneSource *source = (__bridge QRDMicrophoneSource *)inRefCon;
- if (!source) {
- return -1;
- }
- AudioBuffer buffer;
- buffer.mDataByteSize = inNumberFrames * 2;
- buffer.mData = malloc(buffer.mDataByteSize);
- buffer.mNumberChannels = 1;
- AudioBufferList bufferList;
- bufferList.mNumberBuffers = 1;
- bufferList.mBuffers[0] = buffer;
- OSStatus status = AudioUnitRender(source.componetInstance,
- ioActionFlags,
- inTimeStamp,
- inBusNumber,
- inNumberFrames,
- &bufferList);
- if (status || buffer.mDataByteSize <= 0) {
- //NSLog(@"AudioUnitRender error, status: %d", status);
- free(buffer.mData);
- return status;
- }
- if (source.muted) {
- memset(buffer.mData, 0, buffer.mDataByteSize);
- }
-
- if (source.delegate && [source.delegate respondsToSelector:@selector(microphoneSource:didGetAudioBuffer:)]) {
- [source.delegate microphoneSource:source didGetAudioBuffer:&buffer];
- }
- free(buffer.mData);
- return status;
- }
- }
- @end
|