Fixed not initializing AudioContext according to the M70 (Chrome) Web Audio API policy

This commit is contained in:
WolverinDEV 2018-08-05 18:59:24 +02:00
parent e532acd82d
commit d1db590d6f
5 changed files with 101 additions and 34 deletions

View file

@ -192,6 +192,7 @@ class CodecWrapper extends BasicCodec {
this._worker = new Worker(settings.static("worker_directory", "js/workers/") + "WorkerCodec.js"); this._worker = new Worker(settings.static("worker_directory", "js/workers/") + "WorkerCodec.js");
this._worker.onmessage = event => this.onWorkerMessage(event.data); this._worker.onmessage = event => this.onWorkerMessage(event.data);
this._worker.onerror = (error: ErrorEvent) => reject("Failed to load worker (" + error.message + ")");
}); });
} }
} }

View file

@ -91,4 +91,7 @@ function main() {
*/ */
} }
app.loadedListener.push(() => main()); app.loadedListener.push(() => {
main();
$(document).one('click', event => AudioController.initializeFromGesture());
});

View file

@ -7,18 +7,61 @@ enum PlayerState {
} }
class AudioController { class AudioController {
public static userMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; private static getUserMediaFunction() {
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
return (settings, success, fail) => navigator.mediaDevices.getUserMedia(settings).then(success).catch(fail);
return navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
}
public static userMedia = AudioController.getUserMediaFunction();
private static _globalContext: AudioContext; private static _globalContext: AudioContext;
private static _globalContextPromise: Promise<void>;
private static _audioInstances: AudioController[] = []; private static _audioInstances: AudioController[] = [];
private static _initialized_listener: (() => any)[] = [];
private static _globalReplayScheduler: NodeJS.Timer; private static _globalReplayScheduler: NodeJS.Timer;
private static _timeIndex: number = 0; private static _timeIndex: number = 0;
private static _audioDestinationStream: MediaStream; private static _audioDestinationStream: MediaStream;
static get globalContext() : AudioContext { static get globalContext() : AudioContext {
if(this._globalContext) return this._globalContext; if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext;
this._globalContext = new AudioContext();
return this._globalContext; if(!this._globalContext)
this._globalContext = new AudioContext();
if(this._globalContext.state == "suspended") {
if(!this._globalContextPromise) {
(this._globalContextPromise = this._globalContext.resume()).then(() => {
this.fire_initialized();
}).catch(error => {
displayCriticalError("Failed to initialize global audio context! (" + error + ")", false);
});
}
this._globalContext.resume(); //We already have our listener
return undefined;
}
if(this._globalContext.state == "running") {
this.fire_initialized();
return this._globalContext;
}
return undefined;
} }
private static fire_initialized() {
while(this._initialized_listener.length > 0)
this._initialized_listener.pop_front()();
}
static on_initialized(callback: () => any) {
if(this.globalContext)
callback();
else
this._initialized_listener.push(callback);
}
static initializeFromGesture() {
AudioController.globalContext;
}
static initializeAudioController() { static initializeAudioController() {
//this._globalReplayScheduler = setInterval(() => { AudioController.invokeNextReplay(); }, 20); //Fix me //this._globalReplayScheduler = setInterval(() => { AudioController.invokeNextReplay(); }, 20); //Fix me
} }

View file

@ -128,8 +128,10 @@ class VoiceConnection {
this.voiceRecorder.on_end = this.handleVoiceEnded.bind(this); this.voiceRecorder.on_end = this.handleVoiceEnded.bind(this);
this.voiceRecorder.reinitialiseVAD(); this.voiceRecorder.reinitialiseVAD();
this.codecPool[4].initialize(2); AudioController.on_initialized(() => {
this.codecPool[5].initialize(2); this.codecPool[4].initialize(2);
this.codecPool[5].initialize(2);
});
this.send_task = setInterval(this.sendNextVoicePacket.bind(this), 20); this.send_task = setInterval(this.sendNextVoicePacket.bind(this), 20);
} }
@ -179,7 +181,7 @@ class VoiceConnection {
this.dataChannel = this.rtcPeerConnection.createDataChannel('main', dataChannelConfig); this.dataChannel = this.rtcPeerConnection.createDataChannel('main', dataChannelConfig);
this.dataChannel.onmessage = this.onDataChannelMessage.bind(this); this.dataChannel.onmessage = this.onDataChannelMessage.bind(this);
this.dataChannel.onopen = this.onDataChannelOpen.bind(this); this.dataChannel.onopen = this.onDataChannelOpen.bind(this);
this.dataChannel.binaryType = "arraybuffer"; //this.dataChannel.binaryType = "arraybuffer";
let sdpConstraints : RTCOfferOptions = {}; let sdpConstraints : RTCOfferOptions = {};
sdpConstraints.offerToReceiveAudio = 0; sdpConstraints.offerToReceiveAudio = 0;
@ -198,10 +200,12 @@ class VoiceConnection {
} }
handleControlPacket(json) { handleControlPacket(json) {
if(json["request"] === "create") { if(json["request"] === "answer") {
this.rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({type: "answer", sdp: json["sdp"]})); console.log("Set remote sdp! (%o)", json["msg"]);
this.rtcPeerConnection.setRemoteDescription(new RTCSessionDescription(json["msg"]));
} else if(json["request"] === "ice") { } else if(json["request"] === "ice") {
this.rtcPeerConnection.addIceCandidate(new RTCIceCandidate({candidate: json["candidate"],sdpMid: json["session"],sdpMLineIndex: json["line"]})); console.log("Add remote ice! (%s)", json["candidate"]);
this.rtcPeerConnection.addIceCandidate(new RTCIceCandidate({candidate: json["candidate"],sdpMid: json["session"], sdpMLineIndex: json["line"]}));
} }
} }
@ -213,9 +217,7 @@ class VoiceConnection {
this.client.serverConnection.sendData(JSON.stringify({ this.client.serverConnection.sendData(JSON.stringify({
type: 'WebRTC', type: 'WebRTC',
request: "ice", request: "ice",
candidate: event.candidate.candidate, msg: event.candidate,
line: event.candidate.sdpMLineIndex,
session: event.candidate.sdpMid
})); }));
} }
} }
@ -224,6 +226,7 @@ class VoiceConnection {
console.log("Offer created and accepted"); console.log("Offer created and accepted");
this.rtcPeerConnection.setLocalDescription(localSession); this.rtcPeerConnection.setLocalDescription(localSession);
console.log("Send offer: %o", localSession);
this.client.serverConnection.sendData(JSON.stringify({type: 'WebRTC', request: "create", session: localSession})); this.client.serverConnection.sendData(JSON.stringify({type: 'WebRTC', request: "create", session: localSession}));
} }
@ -232,6 +235,7 @@ class VoiceConnection {
} }
onDataChannelMessage(message) { onDataChannelMessage(message) {
console.log("Got message! %o", message);
if(this.client.controlBar.muteOutput) return; if(this.client.controlBar.muteOutput) return;
let bin = new Uint8Array(message.data); let bin = new Uint8Array(message.data);

View file

@ -55,25 +55,33 @@ class VoiceRecorder {
this._deviceId = settings.global("microphone_device_id", "default"); this._deviceId = settings.global("microphone_device_id", "default");
this._deviceGroup = settings.global("microphone_device_group", "default"); this._deviceGroup = settings.global("microphone_device_group", "default");
this.audioContext = AudioController.globalContext; AudioController.on_initialized(() => {
this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS); this.audioContext = AudioController.globalContext;
this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS);
this.processor.addEventListener('audioprocess', ev => { this.processor.addEventListener('audioprocess', ev => {
if(this.microphoneStream && this.vadHandler.shouldRecord(ev.inputBuffer)) if(this.microphoneStream && this.vadHandler.shouldRecord(ev.inputBuffer))
this.on_data(ev.inputBuffer, this._chunkCount++ == 0); this.on_data(ev.inputBuffer, this._chunkCount++ == 0);
else { else {
if(this._chunkCount != 0) this.on_end(); if(this._chunkCount != 0) this.on_end();
this._chunkCount = 0 this._chunkCount = 0
}
});
//Not needed but make sure we have data for the preprocessor
this.mute = this.audioContext.createGain();
this.mute.gain.setValueAtTime(0, 0);
this.mute.connect(this.audioContext.destination);
this.processor.connect(this.audioContext.destination);
if(this.vadHandler) {
this.vadHandler.initialise();
if(this.microphoneStream)
this.vadHandler.initialiseNewStream(undefined, this.microphoneStream);
} }
}); });
//Not needed but make sure we have data for the preprocessor
this.mute = this.audioContext.createGain();
this.mute.gain.setValueAtTime(0, 0);
this.processor.connect(this.audioContext.destination);
this.mute.connect(this.audioContext.destination);
this.setVADHandler(new PassThroughVAD()); this.setVADHandler(new PassThroughVAD());
} }
@ -123,8 +131,11 @@ class VoiceRecorder {
} }
this.vadHandler = handler; this.vadHandler = handler;
this.vadHandler.changeHandle(this, false); this.vadHandler.changeHandle(this, false);
this.vadHandler.initialise(); if(this.audioContext) {
this.vadHandler.initialiseNewStream(undefined, this.microphoneStream); this.vadHandler.initialise();
if(this.microphoneStream)
this.vadHandler.initialiseNewStream(undefined, this.microphoneStream);
}
} }
getVADHandler() : VoiceActivityDetector { getVADHandler() : VoiceActivityDetector {
@ -154,16 +165,21 @@ class VoiceRecorder {
this._deviceId = device; this._deviceId = device;
console.log("Attempt recording! (Device: %o | Group: %o)", device, groupId); console.log("Attempt recording! (Device: %o | Group: %o)", device, groupId);
this._recording = true; this._recording = true;
AudioController.userMedia({ console.log("Function: %o", AudioController.userMedia);
let result = AudioController.userMedia({
/*
audio: { audio: {
deviceId: device, deviceId: device
groupId: groupId //groupId: groupId
} }
*/
audio: true
}, this.on_microphone.bind(this), error => { }, this.on_microphone.bind(this), error => {
createErrorModal("Could not resolve microphone!", "Could not resolve microphone!<br>Message: " + error).open(); createErrorModal("Could not resolve microphone!", "Could not resolve microphone!<br>Message: " + error).open();
console.error("Could not get microphone!"); console.error("Could not get microphone!");
console.error(error); console.error(error);
}); });
console.log(result);
} }
stop(){ stop(){