QRDMicrophoneSource.m 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365
  1. //
  2. // QRDMicrophoneSource.m
  3. // QNRTCKit
  4. //
  5. // Created by lawder on 16/5/20.
  6. // Copyright © 2018年 Pili Engineering, Qiniu Inc. All rights reserved.
  7. //
  8. #import "QRDMicrophoneSource.h"
  9. #import <AudioToolbox/AudioToolbox.h>
  10. #import <AVFoundation/AVFoundation.h>
  11. const NSInteger kQNAudioCaptureSampleRate = 48000;
  12. @interface QRDMicrophoneSource ()
  13. @property (nonatomic, assign) AudioComponentInstance componetInstance;
  14. @property (nonatomic, assign) AudioComponent component;
  15. @property (nonatomic, strong) dispatch_queue_t taskQueue;
  16. @property (nonatomic, assign) BOOL isRunning;
  17. @property (nonatomic, assign) AudioStreamBasicDescription asbd;
  18. @end
  19. @implementation QRDMicrophoneSource
  20. - (instancetype)init {
  21. if(self = [super init]) {
  22. self.isRunning = NO;
  23. self.taskQueue = dispatch_queue_create("com.qiniu.rtc.audiocapture", NULL);
  24. _allowAudioMixWithOthers = YES;
  25. [self setupASBD];
  26. [self setupAudioComponent];
  27. [self setupAudioSession];
  28. [self addObservers];
  29. }
  30. return self;
  31. }
  32. - (void)setupASBD {
  33. _asbd.mSampleRate = kQNAudioCaptureSampleRate;
  34. _asbd.mFormatID = kAudioFormatLinearPCM;
  35. _asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
  36. _asbd.mChannelsPerFrame = 1;
  37. _asbd.mFramesPerPacket = 1;
  38. _asbd.mBitsPerChannel = 16;
  39. _asbd.mBytesPerFrame = _asbd.mBitsPerChannel / 8 * _asbd.mChannelsPerFrame;
  40. _asbd.mBytesPerPacket = _asbd.mBytesPerFrame * _asbd.mFramesPerPacket;
  41. }
  42. -(AudioStreamBasicDescription)getASBD{
  43. if (!_asbd.mBitsPerChannel) {
  44. [self setupASBD];
  45. }
  46. return _asbd;
  47. }
  48. - (void)setupAudioComponent {
  49. AudioComponentDescription acd;
  50. acd.componentType = kAudioUnitType_Output;
  51. //acd.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
  52. acd.componentSubType = kAudioUnitSubType_RemoteIO;
  53. acd.componentManufacturer = kAudioUnitManufacturer_Apple;
  54. acd.componentFlags = 0;
  55. acd.componentFlagsMask = 0;
  56. self.component = AudioComponentFindNext(NULL, &acd);
  57. OSStatus status = noErr;
  58. status = AudioComponentInstanceNew(self.component, &_componetInstance);
  59. if (noErr != status) {
  60. //NSLog(@"AudioComponentInstanceNew error, status: %d", status);
  61. return;
  62. }
  63. UInt32 flagOne = 1;
  64. AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne));
  65. AURenderCallbackStruct cb;
  66. cb.inputProcRefCon = (__bridge void *)(self);
  67. cb.inputProc = handleInputBuffer;
  68. AudioUnitSetProperty(self.componetInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &_asbd, sizeof(_asbd));
  69. AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb));
  70. status = AudioUnitInitialize(self.componetInstance);
  71. if (noErr != status) {
  72. //NSLog(@"AudioUnitInitialize error, status: %d", status);
  73. return;
  74. }
  75. }
  76. - (void)setupAudioSession {
  77. NSError *sessionError = nil;
  78. AVAudioSession *session = [AVAudioSession sharedInstance];
  79. // AVAudioSession *session = [AVAudioSession sharedInstance];
  80. if (![self resetAudioSessionCategorySettings]) {
  81. return;
  82. }
  83. [session setMode:AVAudioSessionModeVideoRecording error:&sessionError];
  84. if (sessionError) {
  85. //NSLog(@"error:%ld, set session mode error : %@", sessionError.code, sessionError.localizedDescription);
  86. return;
  87. }
  88. [session setActive:YES error:&sessionError];
  89. if (sessionError) {
  90. //NSLog(@"error:%ld, set session active error : %@", sessionError.code, sessionError.localizedDescription);
  91. return;
  92. }
  93. [session setPreferredSampleRate:kQNAudioCaptureSampleRate error:&sessionError];
  94. if (sessionError) {
  95. //NSLog(@"error:%ld, setPreferredSampleRate error : %@", sessionError.code, sessionError.localizedDescription);
  96. return;
  97. }
  98. [session setPreferredIOBufferDuration:1024.0 / kQNAudioCaptureSampleRate error:&sessionError];
  99. if (sessionError) {
  100. //NSLog(@"error:%ld, setPreferredIOBufferDuration error : %@", sessionError.code, sessionError.localizedDescription);
  101. return;
  102. }
  103. // use bottom microphone for capture by default
  104. if (AVAudioSessionOrientationBottom != [session inputDataSource].orientation) {
  105. for (AVAudioSessionDataSourceDescription *dataSource in [session inputDataSources]) {
  106. if (AVAudioSessionOrientationBottom == dataSource.orientation) {
  107. [session setInputDataSource:dataSource error:&sessionError];
  108. if (sessionError) {
  109. //NSLog(@"error:%ld, set input data source error : %@", sessionError.code, sessionError.localizedDescription);
  110. }
  111. }
  112. }
  113. }
  114. return;
  115. }
  116. - (void)addObservers {
  117. [[NSNotificationCenter defaultCenter] addObserver:self
  118. selector:@selector(handleRouteChange:)
  119. name: AVAudioSessionRouteChangeNotification
  120. object:nil];
  121. [[NSNotificationCenter defaultCenter] addObserver:self
  122. selector:@selector(handleInterruption:)
  123. name: AVAudioSessionInterruptionNotification
  124. object:nil];
  125. }
  126. - (BOOL)resetAudioSessionCategorySettings {
  127. NSError *sessionError = nil;
  128. AVAudioSession *session = [AVAudioSession sharedInstance];
  129. AVAudioSessionCategoryOptions options = AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth;
  130. if (self.allowAudioMixWithOthers) {
  131. options = AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth;
  132. }
  133. [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:options error:&sessionError];
  134. if (sessionError) {
  135. //NSLog(@"error:%ld, set session category error : %@", sessionError.code, sessionError.localizedDescription);
  136. return NO;
  137. }
  138. return YES;
  139. }
  140. - (void)clear {
  141. [[NSNotificationCenter defaultCenter] removeObserver:self];
  142. dispatch_sync(self.taskQueue, ^{
  143. if (self.componetInstance) {
  144. self.isRunning = NO;
  145. AudioOutputUnitStop(self.componetInstance);
  146. AudioComponentInstanceDispose(self.componetInstance);
  147. self.componetInstance = nil;
  148. self.component = nil;
  149. }
  150. });
  151. }
  152. - (void)startRunning {
  153. //NSLog(@"call startRunning");
  154. if (self.isRunning) {
  155. return;
  156. }
  157. dispatch_async(self.taskQueue, ^{
  158. if ([self resetAudioSession]) {
  159. AudioOutputUnitStart(self.componetInstance);
  160. //NSLog(@"AudioOutputUnitStart");
  161. self.isRunning = YES;
  162. }
  163. });
  164. }
  165. - (void)stopRunning {
  166. //NSLog(@"call stopRunning");
  167. if (!self.isRunning) {
  168. return;
  169. }
  170. dispatch_sync(self.taskQueue, ^{
  171. AudioOutputUnitStop(self.componetInstance);
  172. //NSLog(@"AudioOutputUnitStop");
  173. self.isRunning = NO;
  174. });
  175. }
  176. #pragma mark - NSNotification
  177. - (void)handleRouteChange:(NSNotification *)notification {
  178. NSString* seccReason = @"";
  179. NSInteger reason = [[[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
  180. switch (reason) {
  181. case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
  182. [self resetAudioSession];
  183. seccReason = @"The route changed because no suitable route is now available for the specified category.";
  184. break;
  185. case AVAudioSessionRouteChangeReasonWakeFromSleep:
  186. [self resetAudioSession];
  187. seccReason = @"The route changed when the device woke up from sleep.";
  188. break;
  189. case AVAudioSessionRouteChangeReasonOverride:
  190. [self resetAudioSession];
  191. seccReason = @"The output route was overridden by the app.";
  192. break;
  193. case AVAudioSessionRouteChangeReasonCategoryChange:
  194. seccReason = @"The category of the session object changed.";
  195. break;
  196. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
  197. [self resetAudioSession];
  198. seccReason = @"The previous audio output path is no longer available.";
  199. break;
  200. case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
  201. [self resetAudioSession];
  202. seccReason = @"A preferred new audio output path is now available.";
  203. break;
  204. case AVAudioSessionRouteChangeReasonUnknown:
  205. default:
  206. seccReason = @"The reason for the change is unknown.";
  207. break;
  208. }
  209. //NSLog(@"handleRouteChange: %@ reason %@",[notification name], seccReason);
  210. }
  211. - (BOOL)resetAudioSession {
  212. NSError *sessionError = nil;
  213. AVAudioSession *session = [AVAudioSession sharedInstance];
  214. [session setActive:YES error:&sessionError];
  215. if (sessionError) {
  216. //NSLog(@"error:%ld, set session active error : %@", sessionError.code, sessionError.localizedDescription);
  217. return NO;
  218. }
  219. // use bottom microphone for capture by default
  220. if (AVAudioSessionOrientationBottom != [session inputDataSource].orientation) {
  221. for (AVAudioSessionDataSourceDescription *dataSource in [session inputDataSources]) {
  222. if (AVAudioSessionOrientationBottom == dataSource.orientation) {
  223. [session setInputDataSource:dataSource error:&sessionError];
  224. if (sessionError) {
  225. //NSLog(@"error:%ld, set input data source error : %@", sessionError.code, sessionError.localizedDescription);
  226. }
  227. }
  228. }
  229. }
  230. return YES;
  231. }
  232. - (void)handleInterruption:(NSNotification *)notification {
  233. NSString *reasonString;
  234. if ([notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
  235. NSInteger reason = [[[notification userInfo] objectForKey:AVAudioSessionInterruptionTypeKey] integerValue];
  236. if (reason == AVAudioSessionInterruptionTypeBegan) {
  237. reasonString = @"AVAudioSessionInterruptionTypeBegan";
  238. if (self.isRunning) {
  239. dispatch_sync(self.taskQueue, ^{
  240. //NSLog(@"stopRunning");
  241. AudioOutputUnitStop(self.componetInstance);
  242. });
  243. }
  244. }
  245. if (reason == AVAudioSessionInterruptionTypeEnded) {
  246. reasonString = @"AVAudioSessionInterruptionTypeEnded";
  247. NSNumber *seccondReason = [[notification userInfo] objectForKey:AVAudioSessionInterruptionOptionKey];
  248. switch ([seccondReason integerValue]) {
  249. case AVAudioSessionInterruptionOptionShouldResume:
  250. if (self.isRunning) {
  251. dispatch_async(self.taskQueue, ^{
  252. //NSLog(@"startRunning");
  253. AudioOutputUnitStart(self.componetInstance);
  254. });
  255. }
  256. break;
  257. default:
  258. break;
  259. }
  260. }
  261. }
  262. //NSLog(@"handleInterruption: %@ reason %@", [notification name], reasonString);
  263. }
  264. #pragma mark - CallBack
  265. static OSStatus handleInputBuffer(void *inRefCon,
  266. AudioUnitRenderActionFlags *ioActionFlags,
  267. const AudioTimeStamp *inTimeStamp,
  268. UInt32 inBusNumber,
  269. UInt32 inNumberFrames,
  270. AudioBufferList *ioData) {
  271. @autoreleasepool {
  272. QRDMicrophoneSource *source = (__bridge QRDMicrophoneSource *)inRefCon;
  273. if (!source) {
  274. return -1;
  275. }
  276. AudioBuffer buffer;
  277. buffer.mDataByteSize = inNumberFrames * 2;
  278. buffer.mData = malloc(buffer.mDataByteSize);
  279. buffer.mNumberChannels = 1;
  280. AudioBufferList bufferList;
  281. bufferList.mNumberBuffers = 1;
  282. bufferList.mBuffers[0] = buffer;
  283. OSStatus status = AudioUnitRender(source.componetInstance,
  284. ioActionFlags,
  285. inTimeStamp,
  286. inBusNumber,
  287. inNumberFrames,
  288. &bufferList);
  289. if (status || buffer.mDataByteSize <= 0) {
  290. //NSLog(@"AudioUnitRender error, status: %d", status);
  291. free(buffer.mData);
  292. return status;
  293. }
  294. if (source.muted) {
  295. memset(buffer.mData, 0, buffer.mDataByteSize);
  296. }
  297. if (source.delegate && [source.delegate respondsToSelector:@selector(microphoneSource:didGetAudioBuffer:)]) {
  298. [source.delegate microphoneSource:source didGetAudioBuffer:&buffer];
  299. }
  300. free(buffer.mData);
  301. return status;
  302. }
  303. }
  304. @end