class WebRTCAudioProcessor { constructor() { this.audioContext = null; this.scriptProcessor = null; this.source = null; this.onProcessAudio = null; // 用于外部传入的音频处理回调 } /** * 初始化并开始捕获音频流 * @param {Function} onProcessAudio - 处理 PCM 数据的回调函数 * @returns {Promise} */ async start(onProcessAudio) { if (this.audioContext) { console.warn('Audio processor is already running.'); return; } if (typeof onProcessAudio !== 'function') { throw new Error('onProcessAudio must be a function'); } this.onProcessAudio = onProcessAudio; try { // 获取用户音频流 const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); this._setupAudioContext(stream); } catch (error) { console.error('Error accessing microphone:', error); throw error; } } /** * 设置 AudioContext 和音频处理节点 * @param {MediaStream} stream - 音频流 */ _setupAudioContext(stream) { this.audioContext = new (window.AudioContext || window.webkitAudioContext)(); this.source = this.audioContext.createMediaStreamSource(stream); // 创建 ScriptProcessorNode 处理音频数据 this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1); this.source.connect(this.scriptProcessor); this.scriptProcessor.connect(this.audioContext.destination); // 处理音频数据 this.scriptProcessor.onaudioprocess = (event) => { const inputBuffer = event.inputBuffer; const pcmData = inputBuffer.getChannelData(0); // 获取 PCM 数据 this.onProcessAudio(pcmData); // 调用外部传入的回调函数 }; } /** * 停止音频处理并释放资源 */ stop() { if (this.scriptProcessor) { this.scriptProcessor.disconnect(); this.scriptProcessor = null; } if (this.source) { this.source.disconnect(); this.source = null; } if (this.audioContext) { this.audioContext.close().then(() => { this.audioContext = null; }); } this.onProcessAudio = null; } }