Made the microphone selectable

canary
WolverinDEV 2018-06-19 20:31:05 +02:00
parent 460f815152
commit 7fc922552a
6 changed files with 61 additions and 48 deletions

View File

@ -75,8 +75,8 @@ abstract class BasicCodec implements Codec {
let time = Date.now() - encodeBegin;
if(time > 20)
console.error("Required time: %d", time);
if(time > 20)
chat.serverChat().appendMessage("Required decode time: " + time);
//if(time > 20)
// chat.serverChat().appendMessage("Required decode time: " + time);
this.on_encoded_data(result);
}
else console.error("[Codec][" + this.name() + "] Could not encode buffer. Result: " + result);

View File

@ -148,10 +148,10 @@ function loadDebug() {
"js/ui/modal/ModalBanClient.js",
"js/ui/channel.js",
"js/ui/client.js",
"js/ui/MusicClient.js",
"js/ui/server.js",
"js/ui/view.js",
"js/ui/ControlBar.js",
"js/ui/MusicClient.js",
//Load permissions
"js/permission/PermissionManager.js",

View File

@ -58,7 +58,6 @@ class StaticSettings {
static?<T>(key: string, _default?: T) : T {
if(this._handle) return this._handle.static<T>(key, _default);
let result = this._staticPropsTag.find("[key='" + key + "']");
console.log("%d | %o", result.length, result);
return StaticSettings.transformStO(result.length > 0 ? decodeURIComponent(result.last().attr("value")) : undefined, _default);
}

View File

@ -148,8 +148,9 @@ namespace Modals {
select_microphone.change(event => {
let deviceSelected = select_microphone.find("option:selected");
let deviceId = deviceSelected.attr("device-id");
console.log("Selected microphone device: " + deviceId);
globalClient.voiceConnection.voiceRecorder.changeDevice(deviceId);
let groupId = deviceSelected.attr("device-group");
console.log("Selected microphone device: id: %o group: %o", deviceId, groupId);
globalClient.voiceConnection.voiceRecorder.changeDevice(deviceId, groupId);
});
//Initialise speakers

View File

@ -73,7 +73,7 @@ class CodecPool {
freeSlot = this.entries.length;
let entry = new CodecPoolEntry();
entry.instance = this.creator();
entry.instance.on_encoded_data = buffer => this.handle.sendVoicePacket(buffer, this.codecIndex);
entry.instance.on_encoded_data = buffer => this.handle.handleEncodedVoicePacket(buffer, this.codecIndex);
this.entries.push(entry);
}
this.entries[freeSlot].owner = clientId;
@ -119,6 +119,7 @@ class VoiceConnection {
private vpacketId: number = 0;
private chunkVPacketId: number = 0;
private send_task: number = 0;
constructor(client) {
this.client = client;
@ -130,26 +131,23 @@ class VoiceConnection {
this.codecPool[4].initialize(2);
this.codecPool[5].initialize(2);
setTimeout(() => {
//if(Date.now() - this.last != 20)
// chat.serverChat().appendError("INVALID LAST: " + (Date.now() - this.last));
this.last = Date.now();
if(this.encodedCache.length == 0){
//console.log("MISSING VOICE!");
//chat.serverChat().appendError("MISSING VOICE!");
} else this.sendVoicePacket(this.encodedCache[0].data, this.encodedCache[0].codec);
this.encodedCache.pop_front();
}, 20);
this.send_task = setInterval(this.sendNextVoicePacket.bind(this), 20);
}
codecSupported(type: number) : boolean {
return this.codecPool.length > type && this.codecPool[type].supported();
}
encodedCache: {data: Uint8Array, codec: number}[] = [];
last: number;
private voice_send_queue: {data: Uint8Array, codec: number}[] = [];
handleEncodedVoicePacket(data: Uint8Array, codec: number){
this.encodedCache.push({data: data, codec: codec});
this.voice_send_queue.push({data: data, codec: codec});
}
private sendNextVoicePacket() {
let buffer = this.voice_send_queue.pop_front();
if(!buffer) return;
console.log("Sending packet!");
this.sendVoicePacket(buffer.data, buffer.codec);
}
sendVoicePacket(data: Uint8Array, codec: number) {
@ -163,7 +161,11 @@ class VoiceConnection {
packet[3] = (this.vpacketId >> 0) & 0xFF; //LOW (voiceID)
packet[4] = codec; //Codec
packet.set(data, 5);
this.dataChannel.send(packet);
try {
this.dataChannel.send(packet);
} catch (e) {
//TODO may handle error?
}
} else {
console.warn("Could not transfer audio (not connected)");
}
@ -279,12 +281,12 @@ class VoiceConnection {
//TODO Use channel codec!
this.codecPool[4].ownCodec(this.client.getClientId())
.then(encoder => encoder.encodeSamples(this.client.getClient().getAudioController().codecCache(4),data));
//this.client.getClient().getAudioController().play(data);
.then(encoder => encoder.encodeSamples(this.client.getClient().getAudioController().codecCache(4), data));
}
private handleVoiceEnded() {
if(!this.voiceRecorder) return;
if(!this.client.connected) return;
console.log("Voice ended");
this.client.getClient().speaking = false;

View File

@ -1,6 +1,8 @@
/// <reference path="VoiceHandler.ts" />
/// <reference path="../utils/modal.ts" />
import group = log.group;
abstract class VoiceActivityDetector {
protected handle: VoiceRecorder;
@ -20,6 +22,7 @@ abstract class VoiceActivityDetector {
//A small class extention
interface MediaStreamConstraints {
deviceId?: string;
groupId?: string;
}
class VoiceRecorder {
@ -44,11 +47,13 @@ class VoiceRecorder {
private _chunkCount: number = 0;
private _deviceId: string;
private _deviceGroup: string;
constructor(handle: VoiceConnection) {
this.handle = handle;
this._deviceId = settings.global("microphone_id", "default");
this._deviceId = settings.global("microphone_device_id", "default");
this._deviceGroup = settings.global("microphone_device_group", "default");
this.audioContext = AudioController.globalContext;
this.processor = this.audioContext.createScriptProcessor(VoiceRecorder.BUFFER_SIZE, VoiceRecorder.CHANNELS, VoiceRecorder.CHANNELS);
@ -66,14 +71,13 @@ class VoiceRecorder {
this.mute = this.audioContext.createGain();
this.mute.gain.setValueAtTime(0, 0);
this.processor.connect(this.mute);
this.processor.connect(this.audioContext.destination);
this.mute.connect(this.audioContext.destination);
//this.setVADHander(new MuteVAD());
this.setVADHander(new PassThroughVAD());
this.setVADHandler(new PassThroughVAD());
}
avariable() : boolean {
available() : boolean {
return !!AudioController.userMedia;
}
@ -98,22 +102,21 @@ class VoiceRecorder {
if(type == "ppt") {
let keyCode: number = parseInt(settings.global("vad_ppt_key", JQuery.Key.T.toString()));
if(!(this.getVADHandler() instanceof PushToTalkVAD))
this.setVADHander(new PushToTalkVAD(keyCode));
this.setVADHandler(new PushToTalkVAD(keyCode));
else (this.getVADHandler() as PushToTalkVAD).key = keyCode;
} else if(type == "pt") {
if(!(this.getVADHandler() instanceof PassThroughVAD))
this.setVADHander(new PassThroughVAD());
this.setVADHandler(new PassThroughVAD());
} else if(type == "vad") {
if(!(this.getVADHandler() instanceof VoiceActivityDetectorVAD))
this.setVADHander(new VoiceActivityDetectorVAD());
let threshold = parseInt(settings.global("vad_threshold", "50"));
(this.getVADHandler() as VoiceActivityDetectorVAD).percentageThreshold = threshold;
this.setVADHandler(new VoiceActivityDetectorVAD());
(this.getVADHandler() as VoiceActivityDetectorVAD).percentageThreshold = settings.global("vad_threshold", 50);
} else {
console.warn("Invalid VAD handler! (" + type + ")");
console.warn("Invalid VAD (Voice activation detector) handler! (" + type + ")");
}
}
setVADHander(handler: VoiceActivityDetector) {
setVADHandler(handler: VoiceActivityDetector) {
if(this.vadHandler) {
this.vadHandler.changeHandle(null, true);
this.vadHandler.finalize();
@ -130,27 +133,32 @@ class VoiceRecorder {
update(flag: boolean) {
if(this._recording == flag) return;
if(flag) this.start(this._deviceId);
if(flag) this.start(this._deviceId, this._deviceGroup);
else this.stop();
}
changeDevice(device: string) {
if(this._deviceId == device) return;
changeDevice(device: string, group: string) {
if(this._deviceId == device && this._deviceGroup == group) return;
this._deviceId = device;
settings.changeGlobal("microphone_id", device);
this._deviceGroup = group;
settings.changeGlobal("microphone_device_id", device);
settings.changeServer("microphone_device_group", group);
if(this._recording) {
this.stop();
this.start(device);
this.start(device, group);
}
}
start(device: string){
start(device: string, groupId: string){
this._deviceId = device;
console.log("Attempt recording!");
console.log("Attempt recording! (Device: %o | Group: %o)", device, groupId);
this._recording = true;
AudioController.userMedia({
audio: true,
deviceId: device
audio: {
deviceId: device,
groupId: groupId
}
}, this.on_microphone.bind(this), error => {
createErrorModal("Could not resolve microphone!", "Could not resolve microphone!<br>Message: " + error).open();
console.error("Could not get microphone!");
@ -164,6 +172,8 @@ class VoiceRecorder {
if(this.microphoneStream) this.microphoneStream.disconnect();
this.microphoneStream = undefined;
/*
if(this.mediaStream) {
if(this.mediaStream.stop)
this.mediaStream.stop();
@ -172,23 +182,24 @@ class VoiceRecorder {
value.stop();
});
}
*/
this.mediaStream = undefined;
}
private on_microphone(stream: MediaStream) {
if(this.microphoneStream) {
const oldStream = this.microphoneStream;
if(oldStream)
this.stop(); //Disconnect old stream
}
console.log("Start recording!");
this.mediaStream = stream as MediaStream;
const oldStream = this.microphoneStream;
this.mediaStream = stream;
this.microphoneStream = this.audioContext.createMediaStreamSource(stream);
this.microphoneStream.connect(this.processor);
chat.serverChat().appendMessage("Mic channels " + this.microphoneStream.channelCount);
chat.serverChat().appendMessage("Mic channel mode " + this.microphoneStream.channelCountMode);
chat.serverChat().appendMessage("Max channel count " + this.audioContext.destination.maxChannelCount);
chat.serverChat().appendMessage("Sample rate " + this.audioContext.sampleRate);
chat.serverChat().appendMessage("Stream ID " + stream.id);
this.vadHandler.initialiseNewStream(oldStream, this.microphoneStream);
}
}