Made the microphone selectable

This commit is contained in:
WolverinDEV 2018-06-19 20:31:05 +02:00
parent 460f815152
commit 7fc922552a
6 changed files with 61 additions and 48 deletions

View file

@ -75,8 +75,8 @@ abstract class BasicCodec implements Codec {
let time = Date.now() - encodeBegin; let time = Date.now() - encodeBegin;
if(time > 20) if(time > 20)
console.error("Required time: %d", time); console.error("Required time: %d", time);
if(time > 20) //if(time > 20)
chat.serverChat().appendMessage("Required decode time: " + time); // chat.serverChat().appendMessage("Required decode time: " + time);
this.on_encoded_data(result); this.on_encoded_data(result);
} }
else console.error("[Codec][" + this.name() + "] Could not encode buffer. Result: " + result); else console.error("[Codec][" + this.name() + "] Could not encode buffer. Result: " + result);

View file

@ -148,10 +148,10 @@ function loadDebug() {
"js/ui/modal/ModalBanClient.js", "js/ui/modal/ModalBanClient.js",
"js/ui/channel.js", "js/ui/channel.js",
"js/ui/client.js", "js/ui/client.js",
"js/ui/MusicClient.js",
"js/ui/server.js", "js/ui/server.js",
"js/ui/view.js", "js/ui/view.js",
"js/ui/ControlBar.js", "js/ui/ControlBar.js",
"js/ui/MusicClient.js",
//Load permissions //Load permissions
"js/permission/PermissionManager.js", "js/permission/PermissionManager.js",

View file

@ -58,7 +58,6 @@ class StaticSettings {
static?<T>(key: string, _default?: T) : T { static?<T>(key: string, _default?: T) : T {
if(this._handle) return this._handle.static<T>(key, _default); if(this._handle) return this._handle.static<T>(key, _default);
let result = this._staticPropsTag.find("[key='" + key + "']"); let result = this._staticPropsTag.find("[key='" + key + "']");
console.log("%d | %o", result.length, result);
return StaticSettings.transformStO(result.length > 0 ? decodeURIComponent(result.last().attr("value")) : undefined, _default); return StaticSettings.transformStO(result.length > 0 ? decodeURIComponent(result.last().attr("value")) : undefined, _default);
} }

View file

@ -148,8 +148,9 @@ namespace Modals {
select_microphone.change(event => { select_microphone.change(event => {
let deviceSelected = select_microphone.find("option:selected"); let deviceSelected = select_microphone.find("option:selected");
let deviceId = deviceSelected.attr("device-id"); let deviceId = deviceSelected.attr("device-id");
console.log("Selected microphone device: " + deviceId); let groupId = deviceSelected.attr("device-group");
globalClient.voiceConnection.voiceRecorder.changeDevice(deviceId); console.log("Selected microphone device: id: %o group: %o", deviceId, groupId);
globalClient.voiceConnection.voiceRecorder.changeDevice(deviceId, groupId);
}); });
//Initialise speakers //Initialise speakers

View file

@ -73,7 +73,7 @@ class CodecPool {
freeSlot = this.entries.length; freeSlot = this.entries.length;
let entry = new CodecPoolEntry(); let entry = new CodecPoolEntry();
entry.instance = this.creator(); entry.instance = this.creator();
entry.instance.on_encoded_data = buffer => this.handle.sendVoicePacket(buffer, this.codecIndex); entry.instance.on_encoded_data = buffer => this.handle.handleEncodedVoicePacket(buffer, this.codecIndex);
this.entries.push(entry); this.entries.push(entry);
} }
this.entries[freeSlot].owner = clientId; this.entries[freeSlot].owner = clientId;
@ -119,6 +119,7 @@ class VoiceConnection {
private vpacketId: number = 0; private vpacketId: number = 0;
private chunkVPacketId: number = 0; private chunkVPacketId: number = 0;
private send_task: number = 0;
constructor(client) { constructor(client) {
this.client = client; this.client = client;
@ -130,26 +131,23 @@ class VoiceConnection {
this.codecPool[4].initialize(2); this.codecPool[4].initialize(2);
this.codecPool[5].initialize(2); this.codecPool[5].initialize(2);
setTimeout(() => { this.send_task = setInterval(this.sendNextVoicePacket.bind(this), 20);
//if(Date.now() - this.last != 20)
// chat.serverChat().appendError("INVALID LAST: " + (Date.now() - this.last));
this.last = Date.now();
if(this.encodedCache.length == 0){
//console.log("MISSING VOICE!");
//chat.serverChat().appendError("MISSING VOICE!");
} else this.sendVoicePacket(this.encodedCache[0].data, this.encodedCache[0].codec);
this.encodedCache.pop_front();
}, 20);
} }
codecSupported(type: number) : boolean { codecSupported(type: number) : boolean {
return this.codecPool.length > type && this.codecPool[type].supported(); return this.codecPool.length > type && this.codecPool[type].supported();
} }
encodedCache: {data: Uint8Array, codec: number}[] = []; private voice_send_queue: {data: Uint8Array, codec: number}[] = [];
last: number;
handleEncodedVoicePacket(data: Uint8Array, codec: number){ handleEncodedVoicePacket(data: Uint8Array, codec: number){
this.encodedCache.push({data: data, codec: codec}); this.voice_send_queue.push({data: data, codec: codec});
}
private sendNextVoicePacket() {
let buffer = this.voice_send_queue.pop_front();
if(!buffer) return;
console.log("Sending packet!");
this.sendVoicePacket(buffer.data, buffer.codec);
} }
sendVoicePacket(data: Uint8Array, codec: number) { sendVoicePacket(data: Uint8Array, codec: number) {
@ -163,7 +161,11 @@ class VoiceConnection {
packet[3] = (this.vpacketId >> 0) & 0xFF; //LOW (voiceID) packet[3] = (this.vpacketId >> 0) & 0xFF; //LOW (voiceID)
packet[4] = codec; //Codec packet[4] = codec; //Codec
packet.set(data, 5); packet.set(data, 5);
this.dataChannel.send(packet); try {
this.dataChannel.send(packet);
} catch (e) {
//TODO may handle error?
}
} else { } else {
console.warn("Could not transfer audio (not connected)"); console.warn("Could not transfer audio (not connected)");
} }
@ -279,12 +281,12 @@ class VoiceConnection {
//TODO Use channel codec! //TODO Use channel codec!
this.codecPool[4].ownCodec(this.client.getClientId()) this.codecPool[4].ownCodec(this.client.getClientId())
.then(encoder => encoder.encodeSamples(this.client.getClient().getAudioController().codecCache(4),data)); .then(encoder => encoder.encodeSamples(this.client.getClient().getAudioController().codecCache(4), data));
//this.client.getClient().getAudioController().play(data);
} }
private handleVoiceEnded() { private handleVoiceEnded() {
if(!this.voiceRecorder) return; if(!this.voiceRecorder) return;
if(!this.client.connected) return;
console.log("Voice ended"); console.log("Voice ended");
this.client.getClient().speaking = false; this.client.getClient().speaking = false;

View file

@ -1,6 +1,8 @@
/// <reference path="VoiceHandler.ts" /> /// <reference path="VoiceHandler.ts" />
/// <reference path="../utils/modal.ts" /> /// <reference path="../utils/modal.ts" />
import group = log.group;
abstract class VoiceActivityDetector { abstract class VoiceActivityDetector {
protected handle: VoiceRecorder; protected handle: VoiceRecorder;
@ -20,6 +22,7 @@ abstract class VoiceActivityDetector {
//A small class extention //A small class extention
interface MediaStreamConstraints { interface MediaStreamConstraints {
deviceId?: string; deviceId?: string;
groupId?: string;
} }
class VoiceRecorder { class VoiceRecorder {
@ -44,11 +47,13 @@ class VoiceRecorder {
private _chunkCount: number = 0; private _chunkCount: number = 0;
private _deviceId: string; private _deviceId: string;
private _deviceGroup: string;
constructor(handle: VoiceConnection) { constructor(handle: VoiceConnection) {
this.handle = handle; this.handle = handle;
this._deviceId = settings.global("microphone_id", "default"); this._deviceId = settings.global("microphone_device_id", "default");
this._deviceGroup = settings.global("microphone_device_group", "default");
this.audioContext = AudioController.globalContext; this.audioContext = AudioController.globalContext;
this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS); this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS);
@ -66,14 +71,13 @@ class VoiceRecorder {
this.mute = this.audioContext.createGain(); this.mute = this.audioContext.createGain();
this.mute.gain.setValueAtTime(0, 0); this.mute.gain.setValueAtTime(0, 0);
this.processor.connect(this.mute); this.processor.connect(this.audioContext.destination);
this.mute.connect(this.audioContext.destination); this.mute.connect(this.audioContext.destination);
//this.setVADHander(new MuteVAD()); this.setVADHandler(new PassThroughVAD());
this.setVADHander(new PassThroughVAD());
} }
avariable() : boolean { available() : boolean {
return !!AudioController.userMedia; return !!AudioController.userMedia;
} }
@ -98,22 +102,21 @@ class VoiceRecorder {
if(type == "ppt") { if(type == "ppt") {
let keyCode: number = parseInt(settings.global("vad_ppt_key", JQuery.Key.T.toString())); let keyCode: number = parseInt(settings.global("vad_ppt_key", JQuery.Key.T.toString()));
if(!(this.getVADHandler() instanceof PushToTalkVAD)) if(!(this.getVADHandler() instanceof PushToTalkVAD))
this.setVADHander(new PushToTalkVAD(keyCode)); this.setVADHandler(new PushToTalkVAD(keyCode));
else (this.getVADHandler() as PushToTalkVAD).key = keyCode; else (this.getVADHandler() as PushToTalkVAD).key = keyCode;
} else if(type == "pt") { } else if(type == "pt") {
if(!(this.getVADHandler() instanceof PassThroughVAD)) if(!(this.getVADHandler() instanceof PassThroughVAD))
this.setVADHander(new PassThroughVAD()); this.setVADHandler(new PassThroughVAD());
} else if(type == "vad") { } else if(type == "vad") {
if(!(this.getVADHandler() instanceof VoiceActivityDetectorVAD)) if(!(this.getVADHandler() instanceof VoiceActivityDetectorVAD))
this.setVADHander(new VoiceActivityDetectorVAD()); this.setVADHandler(new VoiceActivityDetectorVAD());
let threshold = parseInt(settings.global("vad_threshold", "50")); (this.getVADHandler() as VoiceActivityDetectorVAD).percentageThreshold = settings.global("vad_threshold", 50);
(this.getVADHandler() as VoiceActivityDetectorVAD).percentageThreshold = threshold;
} else { } else {
console.warn("Invalid VAD handler! (" + type + ")"); console.warn("Invalid VAD (Voice activation detector) handler! (" + type + ")");
} }
} }
setVADHander(handler: VoiceActivityDetector) { setVADHandler(handler: VoiceActivityDetector) {
if(this.vadHandler) { if(this.vadHandler) {
this.vadHandler.changeHandle(null, true); this.vadHandler.changeHandle(null, true);
this.vadHandler.finalize(); this.vadHandler.finalize();
@ -130,27 +133,32 @@ class VoiceRecorder {
update(flag: boolean) { update(flag: boolean) {
if(this._recording == flag) return; if(this._recording == flag) return;
if(flag) this.start(this._deviceId); if(flag) this.start(this._deviceId, this._deviceGroup);
else this.stop(); else this.stop();
} }
changeDevice(device: string) { changeDevice(device: string, group: string) {
if(this._deviceId == device) return; if(this._deviceId == device && this._deviceGroup == group) return;
this._deviceId = device; this._deviceId = device;
settings.changeGlobal("microphone_id", device); this._deviceGroup = group;
settings.changeGlobal("microphone_device_id", device);
settings.changeServer("microphone_device_group", group);
if(this._recording) { if(this._recording) {
this.stop(); this.stop();
this.start(device); this.start(device, group);
} }
} }
start(device: string){ start(device: string, groupId: string){
this._deviceId = device; this._deviceId = device;
console.log("Attempt recording!"); console.log("Attempt recording! (Device: %o | Group: %o)", device, groupId);
this._recording = true; this._recording = true;
AudioController.userMedia({ AudioController.userMedia({
audio: true, audio: {
deviceId: device deviceId: device,
groupId: groupId
}
}, this.on_microphone.bind(this), error => { }, this.on_microphone.bind(this), error => {
createErrorModal("Could not resolve microphone!", "Could not resolve microphone!<br>Message: " + error).open(); createErrorModal("Could not resolve microphone!", "Could not resolve microphone!<br>Message: " + error).open();
console.error("Could not get microphone!"); console.error("Could not get microphone!");
@ -164,6 +172,8 @@ class VoiceRecorder {
if(this.microphoneStream) this.microphoneStream.disconnect(); if(this.microphoneStream) this.microphoneStream.disconnect();
this.microphoneStream = undefined; this.microphoneStream = undefined;
/*
if(this.mediaStream) { if(this.mediaStream) {
if(this.mediaStream.stop) if(this.mediaStream.stop)
this.mediaStream.stop(); this.mediaStream.stop();
@ -172,23 +182,24 @@ class VoiceRecorder {
value.stop(); value.stop();
}); });
} }
*/
this.mediaStream = undefined; this.mediaStream = undefined;
} }
private on_microphone(stream: MediaStream) { private on_microphone(stream: MediaStream) {
if(this.microphoneStream) { const oldStream = this.microphoneStream;
if(oldStream)
this.stop(); //Disconnect old stream this.stop(); //Disconnect old stream
}
console.log("Start recording!"); console.log("Start recording!");
this.mediaStream = stream as MediaStream; this.mediaStream = stream;
const oldStream = this.microphoneStream;
this.microphoneStream = this.audioContext.createMediaStreamSource(stream); this.microphoneStream = this.audioContext.createMediaStreamSource(stream);
this.microphoneStream.connect(this.processor); this.microphoneStream.connect(this.processor);
chat.serverChat().appendMessage("Mic channels " + this.microphoneStream.channelCount); chat.serverChat().appendMessage("Mic channels " + this.microphoneStream.channelCount);
chat.serverChat().appendMessage("Mic channel mode " + this.microphoneStream.channelCountMode); chat.serverChat().appendMessage("Mic channel mode " + this.microphoneStream.channelCountMode);
chat.serverChat().appendMessage("Max channel count " + this.audioContext.destination.maxChannelCount); chat.serverChat().appendMessage("Max channel count " + this.audioContext.destination.maxChannelCount);
chat.serverChat().appendMessage("Sample rate " + this.audioContext.sampleRate); chat.serverChat().appendMessage("Sample rate " + this.audioContext.sampleRate);
chat.serverChat().appendMessage("Stream ID " + stream.id);
this.vadHandler.initialiseNewStream(oldStream, this.microphoneStream); this.vadHandler.initialiseNewStream(oldStream, this.microphoneStream);
} }
} }