Skip to content

Commit 160d8b1

Browse files
committed
chore: made mixer initialization lazy
1 parent b099459 commit 160d8b1

3 files changed

Lines changed: 1 addition & 18 deletions

File tree

ios/RCTWebRTC/Utils/AudioDeviceModule/AudioDeviceModule.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ import WebRTC
181181

182182
/// Delegate that receives synchronous input graph configuration callbacks.
183183
/// Used by `ScreenShareAudioMixer` to modify the engine graph during mixing.
184-
@objc public weak var audioGraphDelegate: AudioGraphConfigurationDelegate?
184+
@objc public var audioGraphDelegate: AudioGraphConfigurationDelegate?
185185

186186
/// Cached input context from the last `configureInputFromSource` callback.
187187
/// These allow `startMixing` to configure the graph immediately when the

ios/RCTWebRTC/WebRTCModule.m

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -86,12 +86,6 @@ - (instancetype)init {
8686
RCTLogInfo(@"Using video encoder factory: %@", NSStringFromClass([encoderFactory class]));
8787
RCTLogInfo(@"Using video decoder factory: %@", NSStringFromClass([decoderFactory class]));
8888

89-
// Always create the screen share audio mixer eagerly.
90-
// It stays dormant (isMixing=false) until startMixing is called.
91-
// It will be wired as audioGraphDelegate on the ADM after factory creation.
92-
ScreenShareAudioMixer *mixer = [[ScreenShareAudioMixer alloc] init];
93-
options.screenShareAudioMixer = mixer;
94-
9589
if (audioProcessingModule != nil) {
9690
if (audioDevice != nil) {
9791
NSLog(@"Both audioProcessingModule and audioDevice are provided, but only one can be used. Ignoring audioDevice.");
@@ -122,10 +116,6 @@ - (instancetype)init {
122116
_audioDeviceModule = [[AudioDeviceModule alloc] initWithSource:_peerConnectionFactory.audioDeviceModule
123117
delegateObserver:_rtcAudioDeviceModuleObserver];
124118

125-
// Wire the mixer as the audio graph delegate so it receives
126-
// onConfigureInputFromSource callbacks to modify the engine graph.
127-
_audioDeviceModule.audioGraphDelegate = mixer;
128-
129119
_peerConnections = [NSMutableDictionary new];
130120
_localStreams = [NSMutableDictionary new];
131121
_localTracks = [NSMutableDictionary new];

ios/RCTWebRTC/WebRTCModuleOptions.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,6 @@
44
@class InAppScreenCapturer;
55
NS_ASSUME_NONNULL_BEGIN
66

7-
// Forward declare the Swift class — the actual import happens in the .m file.
8-
@class ScreenShareAudioMixer;
9-
107
@interface WebRTCModuleOptions : NSObject
118

129
@property(nonatomic, strong, nullable) id<RTCVideoDecoderFactory> videoDecoderFactory;
@@ -25,10 +22,6 @@ NS_ASSUME_NONNULL_BEGIN
2522
/// When YES, in-app screen capture will route .audioApp buffers to the audio mixer.
2623
@property(nonatomic, assign) BOOL includeScreenShareAudio;
2724

28-
/// The screen share audio mixer instance. Created eagerly during WebRTCModule
29-
/// init and retained for the lifetime of the module (never cleared).
30-
@property(nonatomic, strong, nullable) ScreenShareAudioMixer *screenShareAudioMixer;
31-
3225
/// Weak reference to the current in-app screen capturer, set during
3326
/// `createScreenCaptureVideoTrack` when in-app mode is used.
3427
@property(nonatomic, weak, nullable) InAppScreenCapturer *activeInAppScreenCapturer;

0 commit comments

Comments
 (0)