From d1db590d6fb02424b30165213cdfc2088dfc51b0 Mon Sep 17 00:00:00 2001 From: WolverinDEV Date: Sun, 5 Aug 2018 18:59:24 +0200 Subject: [PATCH] Fixed not initializing AudioContext according to the M70 (Chrome) Web Audio API policy --- js/codec/CodecWrapper.ts | 1 + js/main.ts | 5 +++- js/voice/AudioController.ts | 51 ++++++++++++++++++++++++++++++--- js/voice/VoiceHandler.ts | 22 +++++++++------ js/voice/VoiceRecorder.ts | 56 ++++++++++++++++++++++++------------- 5 files changed, 101 insertions(+), 34 deletions(-) diff --git a/js/codec/CodecWrapper.ts b/js/codec/CodecWrapper.ts index 7d0c143a..8394eb26 100644 --- a/js/codec/CodecWrapper.ts +++ b/js/codec/CodecWrapper.ts @@ -192,6 +192,7 @@ class CodecWrapper extends BasicCodec { this._worker = new Worker(settings.static("worker_directory", "js/workers/") + "WorkerCodec.js"); this._worker.onmessage = event => this.onWorkerMessage(event.data); + this._worker.onerror = (error: ErrorEvent) => reject("Failed to load worker (" + error.message + ")"); }); } } \ No newline at end of file diff --git a/js/main.ts b/js/main.ts index eeb2502b..01761ac6 100644 --- a/js/main.ts +++ b/js/main.ts @@ -91,4 +91,7 @@ function main() { */ } -app.loadedListener.push(() => main()); \ No newline at end of file +app.loadedListener.push(() => { + main(); + $(document).one('click', event => AudioController.initializeFromGesture()); +}); \ No newline at end of file diff --git a/js/voice/AudioController.ts b/js/voice/AudioController.ts index ad563e64..4208dbdf 100644 --- a/js/voice/AudioController.ts +++ b/js/voice/AudioController.ts @@ -7,18 +7,61 @@ enum PlayerState { } class AudioController { - public static userMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; + private static getUserMediaFunction() { + if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) + return (settings, success, fail) => navigator.mediaDevices.getUserMedia(settings).then(success).catch(fail); + return navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; + } + public static userMedia = AudioController.getUserMediaFunction(); private static _globalContext: AudioContext; + private static _globalContextPromise: Promise; private static _audioInstances: AudioController[] = []; + private static _initialized_listener: (() => any)[] = []; private static _globalReplayScheduler: NodeJS.Timer; private static _timeIndex: number = 0; private static _audioDestinationStream: MediaStream; static get globalContext() : AudioContext { - if(this._globalContext) return this._globalContext; - this._globalContext = new AudioContext(); - return this._globalContext; + if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext; + + if(!this._globalContext) + this._globalContext = new AudioContext(); + if(this._globalContext.state == "suspended") { + if(!this._globalContextPromise) { + (this._globalContextPromise = this._globalContext.resume()).then(() => { + this.fire_initialized(); + }).catch(error => { + displayCriticalError("Failed to initialize global audio context! (" + error + ")", false); + }); + } + this._globalContext.resume(); //We already have our listener + return undefined; + } + + if(this._globalContext.state == "running") { + this.fire_initialized(); + return this._globalContext; + } + return undefined; } + + + private static fire_initialized() { + while(this._initialized_listener.length > 0) + this._initialized_listener.pop_front()(); + } + + static on_initialized(callback: () => any) { + if(this.globalContext) + callback(); + else + this._initialized_listener.push(callback); + } + + static initializeFromGesture() { + AudioController.globalContext; + } + static initializeAudioController() { //this._globalReplayScheduler = setInterval(() => { AudioController.invokeNextReplay(); }, 20); //Fix me } diff --git a/js/voice/VoiceHandler.ts b/js/voice/VoiceHandler.ts index 9b8336f6..569f4a4b 100644 --- a/js/voice/VoiceHandler.ts +++ b/js/voice/VoiceHandler.ts @@ -128,8 +128,10 @@ class VoiceConnection { this.voiceRecorder.on_end = this.handleVoiceEnded.bind(this); this.voiceRecorder.reinitialiseVAD(); - this.codecPool[4].initialize(2); - this.codecPool[5].initialize(2); + AudioController.on_initialized(() => { + this.codecPool[4].initialize(2); + this.codecPool[5].initialize(2); + }); this.send_task = setInterval(this.sendNextVoicePacket.bind(this), 20); } @@ -179,7 +181,7 @@ class VoiceConnection { this.dataChannel = this.rtcPeerConnection.createDataChannel('main', dataChannelConfig); this.dataChannel.onmessage = this.onDataChannelMessage.bind(this); this.dataChannel.onopen = this.onDataChannelOpen.bind(this); - this.dataChannel.binaryType = "arraybuffer"; + //this.dataChannel.binaryType = "arraybuffer"; let sdpConstraints : RTCOfferOptions = {}; sdpConstraints.offerToReceiveAudio = 0; @@ -198,10 +200,12 @@ class VoiceConnection { } handleControlPacket(json) { - if(json["request"] === "create") { - this.rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({type: "answer", sdp: json["sdp"]})); + if(json["request"] === "answer") { + console.log("Set remote sdp! (%o)", json["msg"]); + this.rtcPeerConnection.setRemoteDescription(new RTCSessionDescription(json["msg"])); } else if(json["request"] === "ice") { - this.rtcPeerConnection.addIceCandidate(new RTCIceCandidate({candidate: json["candidate"],sdpMid: json["session"],sdpMLineIndex: json["line"]})); + console.log("Add remote ice! (%s)", json["candidate"]); + this.rtcPeerConnection.addIceCandidate(new RTCIceCandidate({candidate: json["candidate"],sdpMid: json["session"], sdpMLineIndex: json["line"]})); } } @@ -213,9 +217,7 @@ class VoiceConnection { this.client.serverConnection.sendData(JSON.stringify({ type: 'WebRTC', request: "ice", - candidate: event.candidate.candidate, - line: event.candidate.sdpMLineIndex, - session: event.candidate.sdpMid + msg: event.candidate, })); } } @@ -224,6 +226,7 @@ class VoiceConnection { console.log("Offer created and accepted"); this.rtcPeerConnection.setLocalDescription(localSession); + console.log("Send offer: %o", localSession); this.client.serverConnection.sendData(JSON.stringify({type: 'WebRTC', request: "create", session: localSession})); } @@ -232,6 +235,7 @@ class VoiceConnection { } onDataChannelMessage(message) { + console.log("Got message! %o", message); if(this.client.controlBar.muteOutput) return; let bin = new Uint8Array(message.data); diff --git a/js/voice/VoiceRecorder.ts b/js/voice/VoiceRecorder.ts index 24c53317..41ffa8e7 100644 --- a/js/voice/VoiceRecorder.ts +++ b/js/voice/VoiceRecorder.ts @@ -55,25 +55,33 @@ class VoiceRecorder { this._deviceId = settings.global("microphone_device_id", "default"); this._deviceGroup = settings.global("microphone_device_group", "default"); - this.audioContext = AudioController.globalContext; - this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS); + AudioController.on_initialized(() => { + this.audioContext = AudioController.globalContext; + this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS); - this.processor.addEventListener('audioprocess', ev => { - if(this.microphoneStream && this.vadHandler.shouldRecord(ev.inputBuffer)) - this.on_data(ev.inputBuffer, this._chunkCount++ == 0); - else { - if(this._chunkCount != 0) this.on_end(); - this._chunkCount = 0 + this.processor.addEventListener('audioprocess', ev => { + if(this.microphoneStream && this.vadHandler.shouldRecord(ev.inputBuffer)) + this.on_data(ev.inputBuffer, this._chunkCount++ == 0); + else { + if(this._chunkCount != 0) this.on_end(); + this._chunkCount = 0 + } + }); + + //Not needed but make sure we have data for the preprocessor + this.mute = this.audioContext.createGain(); + this.mute.gain.setValueAtTime(0, 0); + this.mute.connect(this.audioContext.destination); + + this.processor.connect(this.audioContext.destination); + + if(this.vadHandler) { + this.vadHandler.initialise(); + if(this.microphoneStream) + this.vadHandler.initialiseNewStream(undefined, this.microphoneStream); } }); - //Not needed but make sure we have data for the preprocessor - this.mute = this.audioContext.createGain(); - this.mute.gain.setValueAtTime(0, 0); - - this.processor.connect(this.audioContext.destination); - this.mute.connect(this.audioContext.destination); - this.setVADHandler(new PassThroughVAD()); } @@ -123,8 +131,11 @@ class VoiceRecorder { } this.vadHandler = handler; this.vadHandler.changeHandle(this, false); - this.vadHandler.initialise(); - this.vadHandler.initialiseNewStream(undefined, this.microphoneStream); + if(this.audioContext) { + this.vadHandler.initialise(); + if(this.microphoneStream) + this.vadHandler.initialiseNewStream(undefined, this.microphoneStream); + } } getVADHandler() : VoiceActivityDetector { @@ -154,16 +165,21 @@ class VoiceRecorder { this._deviceId = device; console.log("Attempt recording! (Device: %o | Group: %o)", device, groupId); this._recording = true; - AudioController.userMedia({ + console.log("Function: %o", AudioController.userMedia); + let result = AudioController.userMedia({ + /* audio: { - deviceId: device, - groupId: groupId + deviceId: device + //groupId: groupId } + */ + audio: true }, this.on_microphone.bind(this), error => { createErrorModal("Could not resolve microphone!", "Could not resolve microphone!
Message: " + error).open(); console.error("Could not get microphone!"); console.error(error); }); + console.log(result); } stop(){