|
1 | | -/* global AudioContext, navigator */ |
| 1 | +/* global MessageChannel, navigator, setTimeout, URL, window */ |
2 | 2 |
|
3 | 3 | /** |
4 | | - * Mocks navigator.mediaDevices.getUserMedia for testing speechToSpeech functionality. |
| 4 | + * Mocks browser audio APIs for speechToSpeech testing. |
| 5 | + * |
| 6 | + * - Intercepts AudioContext.audioWorklet.addModule() to prevent blob execution |
| 7 | + * - Mocks AudioWorkletNode for the 'audio-recorder' processor |
| 8 | + * - Mocks navigator.mediaDevices.getUserMedia() to return a test audio stream |
5 | 9 | */ |
6 | 10 | export function setupMockMediaDevices() { |
7 | | - if (!navigator.mediaDevices) { |
8 | | - navigator.mediaDevices = {}; |
9 | | - } |
| 11 | + const OriginalAudioContext = window.AudioContext; |
10 | 12 |
|
| 13 | + // Intercept AudioContext to mock audioWorklet.addModule |
| 14 | + window.AudioContext = function (options) { |
| 15 | + const ctx = new OriginalAudioContext(options); |
| 16 | + |
| 17 | + ctx.audioWorklet.addModule = url => { |
| 18 | + if (url.startsWith('blob:')) { |
| 19 | + URL.revokeObjectURL(url); |
| 20 | + } |
| 21 | + return Promise.resolve(); |
| 22 | + }; |
| 23 | + |
| 24 | + return ctx; |
| 25 | + }; |
| 26 | + |
| 27 | + Object.setPrototypeOf(window.AudioContext, OriginalAudioContext); |
| 28 | + window.AudioContext.prototype = OriginalAudioContext.prototype; |
| 29 | + |
| 30 | + // Mock AudioWorkletNode - uses GainNode as base so source.connect() works |
| 31 | + window.AudioWorkletNode = function (context, name, options) { |
| 32 | + const node = context.createGain(); |
| 33 | + const channel = new MessageChannel(); |
| 34 | + let recording = false; |
| 35 | + |
| 36 | + node.port = channel.port1; |
| 37 | + |
| 38 | + channel.port2.onmessage = ({ data }) => { |
| 39 | + if (data.command === 'START') { |
| 40 | + recording = true; |
| 41 | + const bufferSize = options?.processorOptions?.bufferSize || 2400; |
| 42 | + setTimeout(() => { |
| 43 | + if (recording) { |
| 44 | + channel.port1.postMessage({ eventType: 'audio', audioData: new Float32Array(bufferSize) }); |
| 45 | + } |
| 46 | + }, 100); |
| 47 | + } else if (data.command === 'STOP') { |
| 48 | + recording = false; |
| 49 | + } |
| 50 | + }; |
| 51 | + |
| 52 | + return node; |
| 53 | + }; |
| 54 | + |
| 55 | + // Mock getUserMedia with oscillator-based test stream |
11 | 56 | navigator.mediaDevices.getUserMedia = constraints => { |
12 | | - const audioContext = new AudioContext({ sampleRate: constraints?.audio?.sampleRate || 24000 }); |
13 | | - const oscillator = audioContext.createOscillator(); |
14 | | - const destination = audioContext.createMediaStreamDestination(); |
| 57 | + const sampleRate = constraints?.audio?.sampleRate || 24000; |
| 58 | + const ctx = new OriginalAudioContext({ sampleRate }); |
| 59 | + const oscillator = ctx.createOscillator(); |
| 60 | + const destination = ctx.createMediaStreamDestination(); |
15 | 61 |
|
16 | 62 | oscillator.connect(destination); |
17 | 63 | oscillator.start(); |
18 | 64 |
|
19 | | - const { stream } = destination; |
20 | | - |
21 | | - stream.getTracks().forEach(track => { |
| 65 | + destination.stream.getTracks().forEach(track => { |
22 | 66 | const originalStop = track.stop.bind(track); |
23 | 67 | track.stop = () => { |
24 | 68 | oscillator.stop(); |
25 | | - audioContext.close(); |
| 69 | + ctx.close(); |
26 | 70 | originalStop(); |
27 | 71 | }; |
28 | 72 | }); |
29 | 73 |
|
30 | | - return stream; |
| 74 | + return Promise.resolve(destination.stream); |
31 | 75 | }; |
32 | 76 | } |
0 commit comments