diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.h b/ios/RCTWebRTC/AudioDeviceModuleObserver.h new file mode 100644 index 000000000..a1f415909 --- /dev/null +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.h @@ -0,0 +1,20 @@ +#import +#import "WebRTCModule.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface AudioDeviceModuleObserver : NSObject + +- (instancetype)initWithWebRTCModule:(WebRTCModule *)module; + +// Methods to receive results from JS +- (void)resolveEngineCreatedWithResult:(NSInteger)result; +- (void)resolveWillEnableEngineWithResult:(NSInteger)result; +- (void)resolveWillStartEngineWithResult:(NSInteger)result; +- (void)resolveDidStopEngineWithResult:(NSInteger)result; +- (void)resolveDidDisableEngineWithResult:(NSInteger)result; +- (void)resolveWillReleaseEngineWithResult:(NSInteger)result; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/RCTWebRTC/AudioDeviceModuleObserver.m b/ios/RCTWebRTC/AudioDeviceModuleObserver.m new file mode 100644 index 000000000..b619a5c37 --- /dev/null +++ b/ios/RCTWebRTC/AudioDeviceModuleObserver.m @@ -0,0 +1,227 @@ +#import "AudioDeviceModuleObserver.h" +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface AudioDeviceModuleObserver () + +@property(weak, nonatomic) WebRTCModule *module; +@property(nonatomic, strong) dispatch_semaphore_t engineCreatedSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willEnableEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willStartEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t didStopEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t didDisableEngineSemaphore; +@property(nonatomic, strong) dispatch_semaphore_t willReleaseEngineSemaphore; + +@property(nonatomic, assign) NSInteger engineCreatedResult; +@property(nonatomic, assign) NSInteger willEnableEngineResult; +@property(nonatomic, assign) NSInteger willStartEngineResult; +@property(nonatomic, assign) NSInteger didStopEngineResult; +@property(nonatomic, assign) NSInteger didDisableEngineResult; +@property(nonatomic, assign) NSInteger willReleaseEngineResult; + +@end + +@implementation AudioDeviceModuleObserver + +- (instancetype)initWithWebRTCModule:(WebRTCModule *)module { + self = [super init]; + if (self) { + self.module = module; + _engineCreatedSemaphore = dispatch_semaphore_create(0); + _willEnableEngineSemaphore = dispatch_semaphore_create(0); + _willStartEngineSemaphore = dispatch_semaphore_create(0); + _didStopEngineSemaphore = dispatch_semaphore_create(0); + _didDisableEngineSemaphore = dispatch_semaphore_create(0); + _willReleaseEngineSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +#pragma mark - RTCAudioDeviceModuleDelegate + +- (void)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didReceiveSpeechActivityEvent:(RTCSpeechActivityEvent)speechActivityEvent { + NSString *eventType = speechActivityEvent == RTCSpeechActivityEventStarted ? @"started" : @"ended"; + + [self.module sendEventWithName:kEventAudioDeviceModuleSpeechActivity + body:@{ + @"event" : eventType, + }]; + + RCTLog(@"[AudioDeviceModuleObserver] Speech activity event: %@", eventType); +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule didCreateEngine:(AVAudioEngine *)engine { + RCTLog(@"[AudioDeviceModuleObserver] Engine created - waiting for JS response"); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineCreated body:@{}]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.engineCreatedSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine created - JS returned: %ld", (long)self.engineCreatedResult); + return self.engineCreatedResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + willEnableEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillEnable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willEnableEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will enable - JS returned: %ld", (long)self.willEnableEngineResult); + + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + RCTLog(@"[AudioDeviceModuleObserver] Audio session category: %@", audioSession.category); + + return self.willEnableEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + willStartEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillStart + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willStartEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will start - JS returned: %ld", (long)self.willStartEngineResult); + return self.willStartEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didStopEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidStop + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didStopEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine did stop - JS returned: %ld", (long)self.didStopEngineResult); + return self.didStopEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + didDisableEngine:(AVAudioEngine *)engine + isPlayoutEnabled:(BOOL)isPlayoutEnabled + isRecordingEnabled:(BOOL)isRecordingEnabled { + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - playout: %d, recording: %d - waiting for JS response", + isPlayoutEnabled, + isRecordingEnabled); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineDidDisable + body:@{ + @"isPlayoutEnabled" : @(isPlayoutEnabled), + @"isRecordingEnabled" : @(isRecordingEnabled), + }]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.didDisableEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine did disable - JS returned: %ld", (long)self.didDisableEngineResult); + return self.didDisableEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule willReleaseEngine:(AVAudioEngine *)engine { + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - waiting for JS response"); + + [self.module sendEventWithName:kEventAudioDeviceModuleEngineWillRelease body:@{}]; + + // Wait indefinitely for JS to respond + dispatch_semaphore_wait(self.willReleaseEngineSemaphore, DISPATCH_TIME_FOREVER); + + RCTLog(@"[AudioDeviceModuleObserver] Engine will release - JS returned: %ld", (long)self.willReleaseEngineResult); + return self.willReleaseEngineResult; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + engine:(AVAudioEngine *)engine + configureInputFromSource:(nullable AVAudioNode *)source + toDestination:(AVAudioNode *)destination + withFormat:(AVAudioFormat *)format + context:(NSDictionary *)context { + RCTLog(@"[AudioDeviceModuleObserver] Configure input - format: %@", format); + return 0; +} + +- (NSInteger)audioDeviceModule:(RTCAudioDeviceModule *)audioDeviceModule + engine:(AVAudioEngine *)engine + configureOutputFromSource:(AVAudioNode *)source + toDestination:(nullable AVAudioNode *)destination + withFormat:(AVAudioFormat *)format + context:(NSDictionary *)context { + RCTLog(@"[AudioDeviceModuleObserver] Configure output - format: %@", format); + return 0; +} + +- (void)audioDeviceModuleDidUpdateDevices:(RTCAudioDeviceModule *)audioDeviceModule { + [self.module sendEventWithName:kEventAudioDeviceModuleDevicesUpdated body:@{}]; + + RCTLog(@"[AudioDeviceModuleObserver] Devices updated"); +} + +#pragma mark - Resolve methods from JS + +- (void)resolveEngineCreatedWithResult:(NSInteger)result { + self.engineCreatedResult = result; + dispatch_semaphore_signal(self.engineCreatedSemaphore); +} + +- (void)resolveWillEnableEngineWithResult:(NSInteger)result { + self.willEnableEngineResult = result; + dispatch_semaphore_signal(self.willEnableEngineSemaphore); +} + +- (void)resolveWillStartEngineWithResult:(NSInteger)result { + self.willStartEngineResult = result; + dispatch_semaphore_signal(self.willStartEngineSemaphore); +} + +- (void)resolveDidStopEngineWithResult:(NSInteger)result { + self.didStopEngineResult = result; + dispatch_semaphore_signal(self.didStopEngineSemaphore); +} + +- (void)resolveDidDisableEngineWithResult:(NSInteger)result { + self.didDisableEngineResult = result; + dispatch_semaphore_signal(self.didDisableEngineSemaphore); +} + +- (void)resolveWillReleaseEngineWithResult:(NSInteger)result { + self.willReleaseEngineResult = result; + dispatch_semaphore_signal(self.willReleaseEngineSemaphore); +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h new file mode 100644 index 000000000..32fcd47f5 --- /dev/null +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.h @@ -0,0 +1,5 @@ +#import "WebRTCModule.h" + +@interface WebRTCModule (RTCAudioDeviceModule) + +@end diff --git a/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m new file mode 100644 index 000000000..24a0199ba --- /dev/null +++ b/ios/RCTWebRTC/WebRTCModule+RTCAudioDeviceModule.m @@ -0,0 +1,263 @@ +#import + +#import +#import + +#import "AudioDeviceModuleObserver.h" +#import "WebRTCModule.h" + +@implementation WebRTCModule (RTCAudioDeviceModule) + +#pragma mark - Recording & Playback Control + +RCT_EXPORT_METHOD(audioDeviceModuleStartPlayout + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule startPlayout]; + if (result == 0) { + resolve(nil); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to start playout: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopPlayout + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule stopPlayout]; + if (result == 0) { + resolve(nil); + } else { + reject(@"playout_error", [NSString stringWithFormat:@"Failed to stop playout: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStartRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule startRecording]; + if (result == 0) { + resolve(nil); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to start recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(nil); + } else { + reject(@"recording_error", [NSString stringWithFormat:@"Failed to stop recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStartLocalRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule initAndStartRecording]; + if (result == 0) { + resolve(nil); + } else { + reject( + @"recording_error", [NSString stringWithFormat:@"Failed to start local recording: %ld", (long)result], nil); + } +} + +RCT_EXPORT_METHOD(audioDeviceModuleStopLocalRecording + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule stopRecording]; + if (result == 0) { + resolve(nil); + } else { + reject( + @"recording_error", [NSString stringWithFormat:@"Failed to stop local recording: %ld", (long)result], nil); + } +} + +#pragma mark - Microphone Control + +RCT_EXPORT_METHOD(audioDeviceModuleSetMicrophoneMuted + : (BOOL)muted resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setMicrophoneMuted:muted]; + if (result == 0) { + resolve(nil); + } else { + reject(@"mute_error", [NSString stringWithFormat:@"Failed to set microphone mute: %ld", (long)result], nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsMicrophoneMuted) { + return @(self.audioDeviceModule.isMicrophoneMuted); +} + +#pragma mark - Voice Processing + +RCT_EXPORT_METHOD(audioDeviceModuleSetVoiceProcessingEnabled + : (BOOL)enabled resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setVoiceProcessingEnabled:enabled]; + if (result == 0) { + resolve(nil); + } else { + reject(@"voice_processing_error", + [NSString stringWithFormat:@"Failed to set voice processing: %ld", (long)result], + nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingEnabled) { + return @(self.audioDeviceModule.isVoiceProcessingEnabled); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingBypassed : (BOOL)bypassed) { + self.audioDeviceModule.voiceProcessingBypassed = bypassed; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingBypassed) { + return @(self.audioDeviceModule.isVoiceProcessingBypassed); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetVoiceProcessingAGCEnabled : (BOOL)enabled) { + self.audioDeviceModule.voiceProcessingAGCEnabled = enabled; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsVoiceProcessingAGCEnabled) { + return @(self.audioDeviceModule.isVoiceProcessingAGCEnabled); +} + +#pragma mark - Status + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsPlaying) { + return @(self.audioDeviceModule.isPlaying); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecording) { + return @(self.audioDeviceModule.isRecording); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsEngineRunning) { + return @(self.audioDeviceModule.isEngineRunning); +} + +#pragma mark - Advanced Features + +RCT_EXPORT_METHOD(audioDeviceModuleSetMuteMode + : (NSInteger)mode resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setMuteMode:(RTCAudioEngineMuteMode)mode]; + if (result == 0) { + resolve(nil); + } else { + reject(@"mute_mode_error", [NSString stringWithFormat:@"Failed to set mute mode: %ld", (long)result], nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetMuteMode) { + return @(self.audioDeviceModule.muteMode); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetAdvancedDuckingEnabled : (BOOL)enabled) { + self.audioDeviceModule.advancedDuckingEnabled = enabled; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsAdvancedDuckingEnabled) { + return @(self.audioDeviceModule.isAdvancedDuckingEnabled); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleSetDuckingLevel : (NSInteger)level) { + self.audioDeviceModule.duckingLevel = level; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetDuckingLevel) { + return @(self.audioDeviceModule.duckingLevel); +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleIsRecordingAlwaysPreparedMode) { + return @(self.audioDeviceModule.recordingAlwaysPreparedMode); +} + +RCT_EXPORT_METHOD(audioDeviceModuleSetRecordingAlwaysPreparedMode + : (BOOL)enabled resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + NSInteger result = [self.audioDeviceModule setRecordingAlwaysPreparedMode:enabled]; + if (result == 0) { + resolve(nil); + } else { + reject(@"recording_always_prepared_mode_error", + [NSString stringWithFormat:@"Failed to set recording always prepared mode: %ld", (long)result], + nil); + } +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleGetEngineAvailability) { + RTCAudioEngineAvailability availability = self.audioDeviceModule.engineAvailability; + return @{ + @"isInputAvailable" : @(availability.isInputAvailable), + @"isOutputAvailable" : @(availability.isOutputAvailable) + }; +} + +RCT_EXPORT_METHOD(audioDeviceModuleSetEngineAvailability + : (NSDictionary *)availabilityDict resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) { + RTCAudioEngineAvailability availability; + availability.isInputAvailable = [availabilityDict[@"isInputAvailable"] boolValue]; + availability.isOutputAvailable = [availabilityDict[@"isOutputAvailable"] boolValue]; + [self.audioDeviceModule setEngineAvailability:availability]; + NSInteger result = [self.audioDeviceModule setEngineAvailability:availability]; + if (result == 0) { + resolve(nil); + } else { + reject(@"engine_availability_error", + [NSString stringWithFormat:@"Failed to set engine availability: %ld", (long)result], + nil); + } +} + +#pragma mark - Observer Delegate Response Methods + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveEngineCreated : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveEngineCreatedWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillEnableEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillEnableEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillStartEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillStartEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidStopEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveDidStopEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveDidDisableEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveDidDisableEngineWithResult:result]; + return nil; +} + +RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(audioDeviceModuleResolveWillReleaseEngine : (NSInteger)result) { + [self.audioDeviceModuleObserver resolveWillReleaseEngineWithResult:result]; + return nil; +} + +@end diff --git a/ios/RCTWebRTC/WebRTCModule.h b/ios/RCTWebRTC/WebRTCModule.h index 4e0767743..7badb2552 100644 --- a/ios/RCTWebRTC/WebRTCModule.h +++ b/ios/RCTWebRTC/WebRTCModule.h @@ -22,6 +22,16 @@ static NSString *const kEventMediaStreamTrackEnded = @"mediaStreamTrackEnded"; static NSString *const kEventPeerConnectionOnRemoveTrack = @"peerConnectionOnRemoveTrack"; static NSString *const kEventPeerConnectionOnTrack = @"peerConnectionOnTrack"; static NSString *const kEventFrameCryptionStateChanged = @"frameCryptionStateChanged"; +static NSString *const kEventAudioDeviceModuleSpeechActivity = @"audioDeviceModuleSpeechActivity"; +static NSString *const kEventAudioDeviceModuleEngineCreated = @"audioDeviceModuleEngineCreated"; +static NSString *const kEventAudioDeviceModuleEngineWillEnable = @"audioDeviceModuleEngineWillEnable"; +static NSString *const kEventAudioDeviceModuleEngineWillStart = @"audioDeviceModuleEngineWillStart"; +static NSString *const kEventAudioDeviceModuleEngineDidStop = @"audioDeviceModuleEngineDidStop"; +static NSString *const kEventAudioDeviceModuleEngineDidDisable = @"audioDeviceModuleEngineDidDisable"; +static NSString *const kEventAudioDeviceModuleEngineWillRelease = @"audioDeviceModuleEngineWillRelease"; +static NSString *const kEventAudioDeviceModuleDevicesUpdated = @"audioDeviceModuleDevicesUpdated"; + +@class AudioDeviceModuleObserver; @interface WebRTCModule : RCTEventEmitter @@ -39,6 +49,9 @@ static NSString *const kEventFrameCryptionStateChanged = @"frameCryptionStateCha @property(nonatomic, strong) NSMutableDictionary *keyProviders; @property(nonatomic, strong) NSMutableDictionary *dataPacketCryptors; +@property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; +@property(nonatomic, strong) AudioDeviceModuleObserver *audioDeviceModuleObserver; + - (RTCMediaStream *)streamForReactTag:(NSString *)reactTag; @end diff --git a/ios/RCTWebRTC/WebRTCModule.m b/ios/RCTWebRTC/WebRTCModule.m index 51397a17b..ce54d8db9 100644 --- a/ios/RCTWebRTC/WebRTCModule.m +++ b/ios/RCTWebRTC/WebRTCModule.m @@ -7,6 +7,7 @@ #import #import +#import "AudioDeviceModuleObserver.h" #import "WebRTCModule+RTCPeerConnection.h" #import "WebRTCModule.h" #import "WebRTCModuleOptions.h" @@ -70,11 +71,7 @@ - (instancetype)init { RCTLogInfo(@"Using video encoder factory: %@", NSStringFromClass([encoderFactory class])); RCTLogInfo(@"Using video decoder factory: %@", NSStringFromClass([decoderFactory class])); - if (audioProcessingModule != nil) { - if (audioDevice != nil) { - NSLog(@"Both audioProcessingModule and audioDevice are provided, but only one can be used. Ignoring " - @"audioDevice."); - } + if (audioDevice == nil) { RCTLogInfo(@"Using audio processing module: %@", NSStringFromClass([audioProcessingModule class])); _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithAudioDeviceModuleType:RTCAudioDeviceModuleTypeAudioEngine @@ -97,6 +94,10 @@ - (instancetype)init { _keyProviders = [NSMutableDictionary new]; _dataPacketCryptors = [NSMutableDictionary new]; + _audioDeviceModule = _peerConnectionFactory.audioDeviceModule; + _audioDeviceModuleObserver = [[AudioDeviceModuleObserver alloc] initWithWebRTCModule:self]; + _audioDeviceModule.observer = _audioDeviceModuleObserver; + dispatch_queue_attr_t attributes = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INITIATED, -1); _workerQueue = dispatch_queue_create("WebRTCModule.queue", attributes); @@ -141,7 +142,15 @@ - (dispatch_queue_t)methodQueue { kEventMediaStreamTrackEnded, kEventPeerConnectionOnRemoveTrack, kEventPeerConnectionOnTrack, - kEventFrameCryptionStateChanged + kEventFrameCryptionStateChanged, + kEventAudioDeviceModuleSpeechActivity, + kEventAudioDeviceModuleEngineCreated, + kEventAudioDeviceModuleEngineWillEnable, + kEventAudioDeviceModuleEngineWillStart, + kEventAudioDeviceModuleEngineDidStop, + kEventAudioDeviceModuleEngineDidDisable, + kEventAudioDeviceModuleEngineWillRelease, + kEventAudioDeviceModuleDevicesUpdated ]; } diff --git a/src/AudioDeviceModule.ts b/src/AudioDeviceModule.ts new file mode 100644 index 000000000..a00fdce13 --- /dev/null +++ b/src/AudioDeviceModule.ts @@ -0,0 +1,329 @@ +import { NativeModules, Platform } from 'react-native'; + +const { WebRTCModule } = NativeModules; + +export enum AudioEngineMuteMode { + Unknown = -1, + VoiceProcessing = 0, + RestartEngine = 1, + InputMixer = 2, +} + +export interface AudioEngineAvailability { + isInputAvailable: boolean; + isOutputAvailable: boolean; +} + +export const AudioEngineAvailability = { + default: { + isInputAvailable: true, + isOutputAvailable: true, + }, + none: { + isInputAvailable: false, + isOutputAvailable: false, + }, +} as const; + +/** + * Audio Device Module API for controlling audio devices and settings. + * iOS/macOS only - will throw on Android. + */ +export class AudioDeviceModule { + /** + * Start audio playback + */ + static async startPlayout(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartPlayout(); + } + + /** + * Stop audio playback + */ + static async stopPlayout(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopPlayout(); + } + + /** + * Start audio recording + */ + static async startRecording(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartRecording(); + } + + /** + * Stop audio recording + */ + static async stopRecording(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopRecording(); + } + + /** + * Initialize and start local audio recording (calls initAndStartRecording) + */ + static async startLocalRecording(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStartLocalRecording(); + } + + /** + * Stop local audio recording + */ + static async stopLocalRecording(): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleStopLocalRecording(); + } + + /** + * Mute or unmute the microphone + */ + static async setMicrophoneMuted(muted: boolean): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetMicrophoneMuted(muted); + } + + /** + * Check if microphone is currently muted + */ + static isMicrophoneMuted(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsMicrophoneMuted(); + } + + /** + * Enable or disable voice processing (requires engine restart) + */ + static async setVoiceProcessingEnabled(enabled: boolean): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetVoiceProcessingEnabled(enabled); + } + + /** + * Check if voice processing is enabled + */ + static isVoiceProcessingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingEnabled(); + } + + /** + * Temporarily bypass voice processing without restarting the engine + */ + static setVoiceProcessingBypassed(bypassed: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + WebRTCModule.audioDeviceModuleSetVoiceProcessingBypassed(bypassed); + } + + /** + * Check if voice processing is currently bypassed + */ + static isVoiceProcessingBypassed(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingBypassed(); + } + + /** + * Enable or disable Automatic Gain Control (AGC) + */ + static setVoiceProcessingAGCEnabled(enabled: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetVoiceProcessingAGCEnabled(enabled); + } + + /** + * Check if AGC is enabled + */ + static isVoiceProcessingAGCEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsVoiceProcessingAGCEnabled(); + } + + /** + * Check if audio is currently playing + */ + static isPlaying(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsPlaying(); + } + + /** + * Check if audio is currently recording + */ + static isRecording(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsRecording(); + } + + /** + * Check if the audio engine is running + */ + static isEngineRunning(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsEngineRunning(); + } + + /** + * Set the microphone mute mode + */ + static async setMuteMode(mode: AudioEngineMuteMode): Promise { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetMuteMode(mode); + } + + /** + * Get the current mute mode + */ + static getMuteMode(): AudioEngineMuteMode { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetMuteMode(); + } + + /** + * Enable or disable advanced audio ducking + */ + static setAdvancedDuckingEnabled(enabled: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetAdvancedDuckingEnabled(enabled); + } + + /** + * Check if advanced ducking is enabled + */ + static isAdvancedDuckingEnabled(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsAdvancedDuckingEnabled(); + } + + /** + * Set the audio ducking level (0-100) + */ + static setDuckingLevel(level: number): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetDuckingLevel(level); + } + + /** + * Get the current ducking level + */ + static getDuckingLevel(): number { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetDuckingLevel(); + } + + /** + * Check if recording always prepared mode is enabled + */ + static isRecordingAlwaysPreparedMode(): boolean { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleIsRecordingAlwaysPreparedMode(); + } + + /** + * Enable or disable recording always prepared mode + */ + static setRecordingAlwaysPreparedMode(enabled: boolean): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetRecordingAlwaysPreparedMode(enabled); + } + + /** + * Get the current engine availability (input/output availability) + */ + static getEngineAvailability(): AudioEngineAvailability { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleGetEngineAvailability(); + } + + /** + * Set the engine availability (input/output availability) + */ + static setEngineAvailability(availability: AudioEngineAvailability): void { + if (Platform.OS === 'android') { + throw new Error('AudioDeviceModule is only available on iOS/macOS'); + } + + return WebRTCModule.audioDeviceModuleSetEngineAvailability(availability); + } +} diff --git a/src/AudioDeviceModuleEvents.ts b/src/AudioDeviceModuleEvents.ts new file mode 100644 index 000000000..e0997d87e --- /dev/null +++ b/src/AudioDeviceModuleEvents.ts @@ -0,0 +1,233 @@ +import { NativeEventEmitter, NativeModules, Platform } from 'react-native'; + +const { WebRTCModule } = NativeModules; + +export type SpeechActivityEvent = 'started' | 'ended'; + +export interface SpeechActivityEventData { + event: SpeechActivityEvent; +} + +export interface EngineStateEventData { + isPlayoutEnabled: boolean; + isRecordingEnabled: boolean; +} + +export type AudioDeviceModuleEventType = + | 'speechActivity' + | 'devicesUpdated'; + +export type AudioDeviceModuleEventData = + | SpeechActivityEventData + | EngineStateEventData + | Record; // Empty object for events with no data + +export type AudioDeviceModuleEventListener = (data: AudioDeviceModuleEventData) => void; + +/** + * Handler function that must return a number (0 for success, non-zero for error) + */ +export type AudioEngineEventNoParamsHandler = () => Promise; +export type AudioEngineEventHandler = (params: { + isPlayoutEnabled: boolean; + isRecordingEnabled: boolean; +}) => Promise; + +/** + * Event emitter for RTCAudioDeviceModule delegate callbacks. + * iOS/macOS only. + */ +class AudioDeviceModuleEventEmitter { + private eventEmitter: NativeEventEmitter | null = null; + private engineCreatedHandler: AudioEngineEventNoParamsHandler | null = null; + private willEnableEngineHandler: AudioEngineEventHandler | null = null; + private willStartEngineHandler: AudioEngineEventHandler | null = null; + private didStopEngineHandler: AudioEngineEventHandler | null = null; + private didDisableEngineHandler: AudioEngineEventHandler | null = null; + private willReleaseEngineHandler: AudioEngineEventNoParamsHandler | null = null; + + public setupListeners() { + if (Platform.OS !== 'android' && WebRTCModule) { + this.eventEmitter = new NativeEventEmitter(WebRTCModule); + + // Setup handlers for blocking delegate methods + this.eventEmitter.addListener('audioDeviceModuleEngineCreated', async () => { + let result = 0; + + if (this.engineCreatedHandler) { + try { + await this.engineCreatedHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveEngineCreated(result); + }); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineWillEnable', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.willEnableEngineHandler) { + try { + await this.willEnableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillEnableEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineWillStart', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.willStartEngineHandler) { + try { + await this.willStartEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillStartEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineDidStop', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.didStopEngineHandler) { + try { + await this.didStopEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveDidStopEngine(result); + }, + ); + + this.eventEmitter.addListener( + 'audioDeviceModuleEngineDidDisable', + async (params: { isPlayoutEnabled: boolean; isRecordingEnabled: boolean }) => { + const { isPlayoutEnabled, isRecordingEnabled } = params; + let result = 0; + + if (this.didDisableEngineHandler) { + try { + await this.didDisableEngineHandler({ isPlayoutEnabled, isRecordingEnabled }); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveDidDisableEngine(result); + }, + ); + + this.eventEmitter.addListener('audioDeviceModuleEngineWillRelease', async () => { + let result = 0; + + if (this.willReleaseEngineHandler) { + try { + await this.willReleaseEngineHandler(); + } catch (error) { + // If error is a number, use it as the error code, otherwise use -1 + result = typeof error === 'number' ? error : -1; + } + } + + WebRTCModule.audioDeviceModuleResolveWillReleaseEngine(result); + }); + } + } + + /** + * Subscribe to speech activity events (started/ended) + */ + addSpeechActivityListener(listener: (data: SpeechActivityEventData) => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + + return this.eventEmitter.addListener('audioDeviceModuleSpeechActivity', listener); + } + + /** + * Subscribe to devices updated event (input/output devices changed) + */ + addDevicesUpdatedListener(listener: () => void) { + if (!this.eventEmitter) { + throw new Error('AudioDeviceModuleEvents is only available on iOS/macOS'); + } + + return this.eventEmitter.addListener('audioDeviceModuleDevicesUpdated', listener); + } + + /** + * Set handler for engine created delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setEngineCreatedHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.engineCreatedHandler = handler; + } + + /** + * Set handler for will enable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setWillEnableEngineHandler(handler: AudioEngineEventHandler | null) { + this.willEnableEngineHandler = handler; + } + + /** + * Set handler for will start engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setWillStartEngineHandler(handler: AudioEngineEventHandler | null) { + this.willStartEngineHandler = handler; + } + + /** + * Set handler for did stop engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setDidStopEngineHandler(handler: AudioEngineEventHandler | null) { + this.didStopEngineHandler = handler; + } + + /** + * Set handler for did disable engine delegate - MUST return 0 for success or error code + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setDidDisableEngineHandler(handler: AudioEngineEventHandler | null) { + this.didDisableEngineHandler = handler; + } + + /** + * Set handler for will release engine delegate + * This handler blocks the native thread until it returns, throw to cancel audio engine's operation + */ + setWillReleaseEngineHandler(handler: AudioEngineEventNoParamsHandler | null) { + this.willReleaseEngineHandler = handler; + } +} + +export const audioDeviceModuleEvents = new AudioDeviceModuleEventEmitter(); diff --git a/src/index.ts b/src/index.ts index eaedc773e..dacb06d08 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,6 +8,8 @@ if (WebRTCModule === null) { }`); } +import { AudioDeviceModule, AudioEngineMuteMode, type AudioEngineAvailability } from './AudioDeviceModule'; +import { audioDeviceModuleEvents } from './AudioDeviceModuleEvents'; import { setupNativeEvents } from './EventEmitter'; import Logger from './Logger'; import mediaDevices from './MediaDevices'; @@ -68,6 +70,10 @@ export { registerGlobals, startIOSPIP, stopIOSPIP, + AudioDeviceModule, + AudioEngineMuteMode, + type AudioEngineAvailability, + audioDeviceModuleEvents, }; declare const global: any; @@ -98,4 +104,7 @@ function registerGlobals(): void { global.RTCRtpReceiver = RTCRtpReceiver; global.RTCRtpSender = RTCRtpSender; global.RTCErrorEvent = RTCErrorEvent; + + // Ensure audioDeviceModuleEvents is initialized and event listeners are registered + audioDeviceModuleEvents.setupListeners(); }