Fixed voice initializing

This commit is contained in:
WolverinDEV 2018-09-26 15:30:22 +02:00
parent 90840cf08b
commit d9c858315f
7 changed files with 27 additions and 14 deletions

View file

@ -18,7 +18,6 @@ class AVGCalculator {
}
}
declare class webkitOfflineAudioContext extends OfflineAudioContext {}
abstract class BasicCodec implements Codec {
protected _audioContext: OfflineAudioContext;
protected _decodeResampler: AudioResampler;
@ -33,7 +32,7 @@ abstract class BasicCodec implements Codec {
constructor(codecSampleRate: number) {
this.channelCount = 1;
this.samplesPerUnit = 960;
this._audioContext = new (webkitOfflineAudioContext || OfflineAudioContext)(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
this._audioContext = new (window.webkitOfflineAudioContext || window.OfflineAudioContext)(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
this._codecSampleRate = codecSampleRate;
this._decodeResampler = new AudioResampler(AudioController.globalContext.sampleRate);
this._encodeResampler = new AudioResampler(codecSampleRate);

View file

@ -4,7 +4,8 @@ enum LogCategory {
SERVER,
PERMISSIONS,
GENERAL,
NETWORKING
NETWORKING,
VOICE
}
namespace log {
@ -22,7 +23,8 @@ namespace log {
[LogCategory.SERVER, "Server "],
[LogCategory.PERMISSIONS, "Permission "],
[LogCategory.GENERAL, "General "],
[LogCategory.NETWORKING, "Network "]
[LogCategory.NETWORKING, "Network "],
[LogCategory.VOICE, "Voice "]
]);
function logDirect(type: LogType, message: string, ...optionalParams: any[]) {

View file

@ -92,8 +92,8 @@ function main() {
app.loadedListener.push(() => {
try {
main();
if(!AudioController.initialized) {
console.log("Initialize audio controller later!");
if(!AudioController.initialized()) {
log.info(LogCategory.VOICE, "Initialize audio controller later!");
$(document).one('click', event => AudioController.initializeFromGesture());
}
} catch (ex) {

View file

@ -172,4 +172,14 @@ function calculate_width(text: string) : number {
let size = element.width();
element.detach();
return size;
}
declare class webkitAudioContext extends AudioContext {}
declare class webkitOfflineAudioContext extends OfflineAudioContext {}
interface Window {
readonly webkitAudioContext: typeof webkitAudioContext;
readonly AudioContext: typeof webkitAudioContext;
readonly OfflineAudioContext: typeof OfflineAudioContext;
readonly webkitOfflineAudioContext: typeof webkitOfflineAudioContext;
readonly RTCPeerConnection: typeof RTCPeerConnection;
}

View file

@ -11,8 +11,6 @@ interface Navigator {
webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
}
declare class webkitAudioContext extends AudioContext {}
class AudioController {
private static getUserMediaFunction() {
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
@ -32,7 +30,7 @@ class AudioController {
if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext;
if(!this._globalContext)
this._globalContext = new (webkitAudioContext || AudioContext)();
this._globalContext = new (window.webkitAudioContext || window.AudioContext)();
if(this._globalContext.state == "suspended") {
if(!this._globalContextPromise) {
(this._globalContextPromise = this._globalContext.resume()).then(() => {

View file

@ -17,7 +17,7 @@ class AudioResampler {
return new Promise<AudioBuffer>(resolve => resolve(buffer));
let context;
context = new (webkitOfflineAudioContext || OfflineAudioContext)(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
context = new (window.webkitOfflineAudioContext || window.OfflineAudioContext)(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
let source = context.createBufferSource();
source.buffer = buffer;

View file

@ -117,7 +117,6 @@ interface RTCPeerConnection {
createOffer(successCallback?: RTCSessionDescriptionCallback, failureCallback?: RTCPeerConnectionErrorCallback, options?: RTCOfferOptions): Promise<RTCSessionDescription>;
}
class VoiceConnection {
client: TSClient;
rtcPeerConnection: RTCPeerConnection;
@ -150,6 +149,7 @@ class VoiceConnection {
this.voiceRecorder.reinitialiseVAD();
AudioController.on_initialized(() => {
log.info(LogCategory.VOICE, "Initializing voice handler after AudioController has been initialized!");
this.codec_pool[4].initialize(2);
this.codec_pool[5].initialize(2);
@ -163,12 +163,12 @@ class VoiceConnection {
}
native_encoding_supported() : boolean {
if(!(webkitAudioContext || AudioContext).prototype.createMediaStreamDestination) return false; //Required, but not available within edge
if(!(window.webkitAudioContext || window.AudioContext || {prototype: {}} as typeof AudioContext).prototype.createMediaStreamDestination) return false; //Required, but not available within edge
return true;
}
javascript_encoding_supported() : boolean {
if(!RTCPeerConnection.prototype.createDataChannel) return false;
if(!(window.RTCPeerConnection || {prototype: {}} as typeof RTCPeerConnection).prototype.createDataChannel) return false;
return true;
}
@ -183,7 +183,11 @@ class VoiceConnection {
}
private setup_native() {
if(!this.native_encoding_supported()) return;
log.info(LogCategory.VOICE, "Setting up native voice stream!");
if(!this.native_encoding_supported()) {
log.warn(LogCategory.VOICE, "Native codec isnt supported!");
return;
}
this.voiceRecorder.on_data = undefined;