Adding the new echo test
parent
714a4e0a34
commit
b99d5cd6b9
|
@ -1,4 +1,7 @@
|
|||
# Changelog:
|
||||
* **05.09.20**
|
||||
- Smoother voice playback start (web client only)
|
||||
|
||||
* **02.09.20**
|
||||
- Fixed web client hangup on no device error
|
||||
- Improved default recorder device detection (selects by default the best device)
|
||||
|
|
2
file.ts
2
file.ts
|
@ -82,7 +82,7 @@ const APP_FILE_LIST_SHARED_SOURCE: ProjectResource[] = [
|
|||
"type": "img",
|
||||
"search-pattern": /.*\.(svg|png|gif)/,
|
||||
"build-target": "dev|rel",
|
||||
"search-exclude": /.*(client-icons|style)\/.*/,
|
||||
"search-exclude": /.*(client-icons)\/.*/,
|
||||
|
||||
"path": "img/",
|
||||
"local-path": "./shared/img/"
|
||||
|
|
|
@ -442,7 +442,6 @@ html:root {
|
|||
justify-content: stretch;
|
||||
|
||||
> .icon_em, > .container-icon {
|
||||
margin-top: .1em;
|
||||
margin-bottom: .1em;
|
||||
|
||||
font-size: 2em;
|
||||
|
|
|
@ -8,15 +8,15 @@ import {LocalClientEntry} from "tc-shared/ui/client";
|
|||
import {ConnectionProfile} from "tc-shared/profiles/ConnectionProfile";
|
||||
import {ServerAddress} from "tc-shared/ui/server";
|
||||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logError, logInfo} from "tc-shared/log";
|
||||
import {LogCategory, logError, logInfo, logWarn} from "tc-shared/log";
|
||||
import {createErrorModal, createInfoModal, createInputModal, Modal} from "tc-shared/ui/elements/Modal";
|
||||
import {hashPassword} from "tc-shared/utils/helpers";
|
||||
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
|
||||
import * as htmltags from "./ui/htmltags";
|
||||
import {ChannelEntry} from "tc-shared/ui/channel";
|
||||
import {InputStartResult, InputState} from "tc-shared/voice/RecorderBase";
|
||||
import {FilterMode, InputStartResult, InputState} from "tc-shared/voice/RecorderBase";
|
||||
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||
import {default_recorder, RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||
import {defaultRecorder, RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||
import {Frame} from "tc-shared/ui/frames/chat_frame";
|
||||
import {Hostbanner} from "tc-shared/ui/frames/hostbanner";
|
||||
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
|
||||
|
@ -37,7 +37,9 @@ import {PluginCmdRegistry} from "tc-shared/connection/PluginCmdHandler";
|
|||
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
|
||||
import {VoiceConnectionStatus, WhisperSessionInitializeData} from "tc-shared/connection/VoiceConnection";
|
||||
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
||||
import {WhisperSession} from "tc-shared/voice/Whisper";
|
||||
import {WhisperSession} from "tc-shared/voice/VoiceWhisper";
|
||||
import {spawnEchoTestModal} from "tc-shared/ui/modal/echo-test/Controller";
|
||||
import {ServerFeature, ServerFeatures} from "tc-shared/connection/ServerFeatures";
|
||||
|
||||
export enum InputHardwareState {
|
||||
MISSING,
|
||||
|
@ -110,12 +112,7 @@ export interface LocalClientStatus {
|
|||
input_muted: boolean;
|
||||
output_muted: boolean;
|
||||
|
||||
channel_codec_encoding_supported: boolean;
|
||||
channel_codec_decoding_supported: boolean;
|
||||
sound_playback_supported: boolean;
|
||||
|
||||
sound_record_supported;
|
||||
|
||||
lastChannelCodecWarned: number,
|
||||
away: boolean | string;
|
||||
|
||||
channel_subscribe_all: boolean;
|
||||
|
@ -156,6 +153,8 @@ export class ConnectionHandler {
|
|||
|
||||
tag_connection_handler: JQuery;
|
||||
|
||||
serverFeatures: ServerFeatures;
|
||||
|
||||
private _clientId: number = 0;
|
||||
private _local_client: LocalClientEntry;
|
||||
|
||||
|
@ -163,6 +162,7 @@ export class ConnectionHandler {
|
|||
private _reconnect_attempt: boolean = false;
|
||||
|
||||
private _connect_initialize_id: number = 1;
|
||||
private echoTestRunning = false;
|
||||
|
||||
private pluginCmdRegistry: PluginCmdRegistry;
|
||||
|
||||
|
@ -174,10 +174,7 @@ export class ConnectionHandler {
|
|||
channel_subscribe_all: true,
|
||||
queries_visible: false,
|
||||
|
||||
sound_playback_supported: undefined,
|
||||
sound_record_supported: undefined,
|
||||
channel_codec_encoding_supported: undefined,
|
||||
channel_codec_decoding_supported: undefined
|
||||
lastChannelCodecWarned: -1
|
||||
};
|
||||
|
||||
private inputHardwareState: InputHardwareState = InputHardwareState.MISSING;
|
||||
|
@ -199,9 +196,10 @@ export class ConnectionHandler {
|
|||
this.update_voice_status();
|
||||
});
|
||||
this.serverConnection.getVoiceConnection().events.on("notify_connection_status_changed", () => this.update_voice_status());
|
||||
|
||||
this.serverConnection.getVoiceConnection().setWhisperSessionInitializer(this.initializeWhisperSession.bind(this));
|
||||
|
||||
this.serverFeatures = new ServerFeatures(this);
|
||||
|
||||
this.channelTree = new ChannelTree(this);
|
||||
this.fileManager = new FileManager(this);
|
||||
this.permissions = new PermissionManager(this);
|
||||
|
@ -413,6 +411,20 @@ export class ConnectionHandler {
|
|||
if(control_bar.current_connection_handler() === this)
|
||||
control_bar.apply_server_voice_state();
|
||||
*/
|
||||
|
||||
/*
|
||||
this.serverConnection.getVoiceConnection().startWhisper({ target: "echo" }).catch(error => {
|
||||
logError(LogCategory.CLIENT, tr("Failed to start local echo: %o"), error);
|
||||
});
|
||||
*/
|
||||
this.serverFeatures.awaitFeatures().then(result => {
|
||||
if(!result) {
|
||||
return;
|
||||
}
|
||||
if(this.serverFeatures.supportsFeature(ServerFeature.WHISPER_ECHO)) {
|
||||
spawnEchoTestModal(this);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.setInputHardwareState(this.getVoiceRecorder() ? InputHardwareState.VALID : InputHardwareState.MISSING);
|
||||
}
|
||||
|
@ -652,6 +664,7 @@ export class ConnectionHandler {
|
|||
this.serverConnection.disconnect();
|
||||
|
||||
this.hostbanner.update();
|
||||
this.client_status.lastChannelCodecWarned = 0;
|
||||
|
||||
if(auto_reconnect) {
|
||||
if(!this.serverConnection) {
|
||||
|
@ -692,115 +705,98 @@ export class ConnectionHandler {
|
|||
});
|
||||
}
|
||||
|
||||
private _last_record_error_popup: number = 0;
|
||||
update_voice_status(targetChannel?: ChannelEntry) {
|
||||
private updateVoiceStatus() {
|
||||
if(!this._local_client) {
|
||||
/* we've been destroyed */
|
||||
return;
|
||||
}
|
||||
|
||||
if(typeof targetChannel === "undefined")
|
||||
targetChannel = this.getClient().currentChannel();
|
||||
let shouldRecord = false;
|
||||
|
||||
const vconnection = this.serverConnection.getVoiceConnection();
|
||||
const voiceConnection = this.serverConnection.getVoiceConnection();
|
||||
if(this.serverConnection.connected()) {
|
||||
let localClientUpdates: {
|
||||
client_output_hardware?: boolean,
|
||||
client_input_hardware?: boolean
|
||||
} = {};
|
||||
|
||||
const codecEncodeSupported = !targetChannel || vconnection.encodingSupported(targetChannel.properties.channel_codec);
|
||||
const codecDecodeSupported = !targetChannel || vconnection.decodingSupported(targetChannel.properties.channel_codec);
|
||||
const currentChannel = this.getClient().currentChannel();
|
||||
|
||||
const property_update = {
|
||||
client_input_muted: this.client_status.input_muted,
|
||||
client_output_muted: this.client_status.output_muted
|
||||
};
|
||||
if(!currentChannel) {
|
||||
/* Don't update the voice state, firstly await for us to be fully connected */
|
||||
} else if(voiceConnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
|
||||
/* We're currently not having a valid voice connection. We need to await that. */
|
||||
} else {
|
||||
let codecSupportEncode = voiceConnection.encodingSupported(currentChannel.properties.channel_codec);
|
||||
let codecSupportDecode = voiceConnection.decodingSupported(currentChannel.properties.channel_codec);
|
||||
|
||||
/* update the encoding codec */
|
||||
if(codecEncodeSupported && targetChannel) {
|
||||
vconnection.setEncoderCodec(targetChannel.properties.channel_codec);
|
||||
}
|
||||
localClientUpdates.client_input_hardware = codecSupportEncode;
|
||||
localClientUpdates.client_output_hardware = codecSupportDecode;
|
||||
|
||||
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
|
||||
property_update["client_input_hardware"] = false;
|
||||
property_update["client_output_hardware"] = false;
|
||||
} else {
|
||||
const recording_supported =
|
||||
this.getInputHardwareState() === InputHardwareState.VALID &&
|
||||
(!targetChannel || vconnection.encodingSupported(targetChannel.properties.channel_codec)) &&
|
||||
vconnection.getConnectionState() === VoiceConnectionStatus.Connected;
|
||||
if(this.client_status.lastChannelCodecWarned !== currentChannel.getChannelId()) {
|
||||
this.client_status.lastChannelCodecWarned = currentChannel.getChannelId();
|
||||
|
||||
const playback_supported = this.hasOutputHardware() && (!targetChannel || vconnection.decodingSupported(targetChannel.properties.channel_codec));
|
||||
if(!codecSupportEncode || !codecSupportDecode) {
|
||||
let message;
|
||||
if(!codecSupportEncode && !codecSupportDecode) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant speak or listen to anybody within this channel!");
|
||||
} else if(!codecSupportEncode) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant speak within this channel!");
|
||||
} else if(!codecSupportDecode) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant listen to anybody within this channel!");
|
||||
}
|
||||
|
||||
property_update["client_input_hardware"] = recording_supported;
|
||||
property_update["client_output_hardware"] = playback_supported;
|
||||
}
|
||||
createErrorModal(tr("Channel codec unsupported"), message).open();
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const client_properties = this.getClient().properties;
|
||||
for(const key of Object.keys(property_update)) {
|
||||
if(client_properties[key] === property_update[key])
|
||||
delete property_update[key];
|
||||
shouldRecord = codecSupportEncode && !!voiceConnection.voiceRecorder()?.input;
|
||||
}
|
||||
|
||||
if(Object.keys(property_update).length > 0) {
|
||||
this.serverConnection.send_command("clientupdate", property_update).catch(error => {
|
||||
log.warn(LogCategory.GENERAL, tr("Failed to update client audio hardware properties. Error: %o"), error);
|
||||
this.log.log(EventType.ERROR_CUSTOM, { message: tr("Failed to update audio hardware properties.") });
|
||||
/* update our owns client properties */
|
||||
{
|
||||
const currentClientProperties = this.getClient().properties;
|
||||
for(const key of Object.keys(localClientUpdates)) {
|
||||
if(currentClientProperties[key] === localClientUpdates[key])
|
||||
delete localClientUpdates[key];
|
||||
}
|
||||
|
||||
/* Update these properties anyways (for case the server fails to handle the command) */
|
||||
const updates = [];
|
||||
for(const key of Object.keys(property_update))
|
||||
updates.push({key: key, value: (property_update[key]) + ""});
|
||||
this.getClient().updateVariables(...updates);
|
||||
if(Object.keys(localClientUpdates).length > 0) {
|
||||
this.serverConnection.send_command("clientupdate", localClientUpdates).catch(error => {
|
||||
log.warn(LogCategory.GENERAL, tr("Failed to update client audio hardware properties. Error: %o"), error);
|
||||
this.log.log(EventType.ERROR_CUSTOM, { message: tr("Failed to update audio hardware properties.") });
|
||||
|
||||
/* Update these properties anyways (for case the server fails to handle the command) */
|
||||
const updates = [];
|
||||
for(const key of Object.keys(localClientUpdates))
|
||||
updates.push({ key: key, value: localClientUpdates[key] ? "1" : "0" });
|
||||
this.getClient().updateVariables(...updates);
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* we're not connect, so we should not record either */
|
||||
}
|
||||
|
||||
/* update the recorder state */
|
||||
const currentInput = voiceConnection.voiceRecorder()?.input;
|
||||
if(currentInput) {
|
||||
if(shouldRecord) {
|
||||
if(this.getInputHardwareState() !== InputHardwareState.START_FAILED) {
|
||||
this.startVoiceRecorder(Date.now() - this._last_record_error_popup > 10 * 1000).then(() => {});
|
||||
}
|
||||
} else {
|
||||
currentInput.stop().catch(error => {
|
||||
logWarn(LogCategory.AUDIO, tr("Failed to stop the microphone input recorder: %o"), error);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(targetChannel) {
|
||||
if(this.client_status.channel_codec_decoding_supported !== codecDecodeSupported || this.client_status.channel_codec_encoding_supported !== codecEncodeSupported) {
|
||||
this.client_status.channel_codec_decoding_supported = codecDecodeSupported;
|
||||
this.client_status.channel_codec_encoding_supported = codecEncodeSupported;
|
||||
|
||||
let message;
|
||||
if(!codecEncodeSupported && !codecDecodeSupported) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant speak or listen to anybody within this channel!");
|
||||
} else if(!codecEncodeSupported) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant speak within this channel!");
|
||||
} else if(!codecDecodeSupported) {
|
||||
message = tr("This channel has an unsupported codec.<br>You cant listen to anybody within this channel!");
|
||||
}
|
||||
|
||||
if(message) {
|
||||
createErrorModal(tr("Channel codec unsupported"), message).open();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.client_status = this.client_status || {} as any;
|
||||
this.client_status.sound_record_supported = codecEncodeSupported;
|
||||
this.client_status.sound_playback_supported = codecDecodeSupported;
|
||||
|
||||
{
|
||||
const enableRecording = !this.client_status.input_muted && !this.client_status.output_muted;
|
||||
/* No need to start the microphone when we're not even connected */
|
||||
|
||||
const input = vconnection.voiceRecorder()?.input;
|
||||
if(input) {
|
||||
if(enableRecording && this.serverConnection.connected()) {
|
||||
if(this.getInputHardwareState() !== InputHardwareState.START_FAILED)
|
||||
this.startVoiceRecorder(Date.now() - this._last_record_error_popup > 10 * 1000);
|
||||
} else {
|
||||
input.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: Only trigger events for stuff which has been updated
|
||||
this.event_registry.fire("notify_state_updated", {
|
||||
state: "microphone"
|
||||
});
|
||||
|
||||
this.event_registry.fire("notify_state_updated", {
|
||||
state: "speaker"
|
||||
});
|
||||
top_menu.update_state(); //TODO: Top-Menu should register their listener
|
||||
private _last_record_error_popup: number = 0;
|
||||
update_voice_status(targetChannel?: ChannelEntry) {
|
||||
this.updateVoiceStatus();
|
||||
return;
|
||||
}
|
||||
|
||||
sync_status_with_server() {
|
||||
|
@ -810,8 +806,9 @@ export class ConnectionHandler {
|
|||
client_output_muted: this.client_status.output_muted,
|
||||
client_away: typeof(this.client_status.away) === "string" || this.client_status.away,
|
||||
client_away_message: typeof(this.client_status.away) === "string" ? this.client_status.away : "",
|
||||
client_input_hardware: this.client_status.sound_record_supported && this.getInputHardwareState() === InputHardwareState.VALID,
|
||||
client_output_hardware: this.client_status.sound_playback_supported
|
||||
/* TODO: Somehow store this? */
|
||||
//client_input_hardware: this.client_status.sound_record_supported && this.getInputHardwareState() === InputHardwareState.VALID,
|
||||
//client_output_hardware: this.client_status.sound_playback_supported
|
||||
}).catch(error => {
|
||||
log.warn(LogCategory.GENERAL, tr("Failed to sync handler state with server. Error: %o"), error);
|
||||
this.log.log(EventType.ERROR_CUSTOM, {message: tr("Failed to sync handler state with server.")});
|
||||
|
@ -821,7 +818,7 @@ export class ConnectionHandler {
|
|||
/* can be called as much as you want, does nothing if nothing changed */
|
||||
async acquireInputHardware() {
|
||||
/* if we're having multiple recorders, try to get the right one */
|
||||
let recorder: RecorderProfile = default_recorder;
|
||||
let recorder: RecorderProfile = defaultRecorder;
|
||||
|
||||
try {
|
||||
await this.serverConnection.getVoiceConnection().acquireVoiceRecorder(recorder);
|
||||
|
@ -838,9 +835,11 @@ export class ConnectionHandler {
|
|||
}
|
||||
}
|
||||
|
||||
async startVoiceRecorder(notifyError: boolean) {
|
||||
async startVoiceRecorder(notifyError: boolean) : Promise<{ state: "success" | "no-input" } | { state: "error", message: string }> {
|
||||
const input = this.getVoiceRecorder()?.input;
|
||||
if(!input) return;
|
||||
if(!input) {
|
||||
return { state: "no-input" };
|
||||
}
|
||||
|
||||
if(input.currentState() === InputState.PAUSED && this.connection_state === ConnectionState.CONNECTED) {
|
||||
try {
|
||||
|
@ -851,6 +850,7 @@ export class ConnectionHandler {
|
|||
|
||||
this.setInputHardwareState(InputHardwareState.VALID);
|
||||
this.update_voice_status();
|
||||
return { state: "success" };
|
||||
} catch (error) {
|
||||
this.setInputHardwareState(InputHardwareState.START_FAILED);
|
||||
this.update_voice_status();
|
||||
|
@ -871,14 +871,17 @@ export class ConnectionHandler {
|
|||
} else {
|
||||
errorMessage = tr("lookup the console");
|
||||
}
|
||||
|
||||
log.warn(LogCategory.VOICE, tr("Failed to start microphone input (%s)."), error);
|
||||
if(notifyError) {
|
||||
this._last_record_error_popup = Date.now();
|
||||
createErrorModal(tr("Failed to start recording"), tra("Microphone start failed.\nError: {}", errorMessage)).open();
|
||||
}
|
||||
return { state: "error", message: errorMessage };
|
||||
}
|
||||
} else {
|
||||
this.setInputHardwareState(InputHardwareState.VALID);
|
||||
return { state: "success" };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -985,10 +988,10 @@ export class ConnectionHandler {
|
|||
clientName: session.getClientName(),
|
||||
clientUniqueId: session.getClientUniqueId(),
|
||||
|
||||
blocked: false,
|
||||
blocked: session.getClientId() !== this.getClient().clientId(),
|
||||
volume: 1,
|
||||
|
||||
sessionTimeout: 60 * 1000
|
||||
sessionTimeout: 5 * 1000
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -996,36 +999,39 @@ export class ConnectionHandler {
|
|||
this.event_registry.unregister_handler(this);
|
||||
this.cancel_reconnect(true);
|
||||
|
||||
this.tag_connection_handler && this.tag_connection_handler.remove();
|
||||
this.tag_connection_handler?.remove();
|
||||
this.tag_connection_handler = undefined;
|
||||
|
||||
this.hostbanner && this.hostbanner.destroy();
|
||||
this.hostbanner?.destroy();
|
||||
this.hostbanner = undefined;
|
||||
|
||||
this.pluginCmdRegistry && this.pluginCmdRegistry.destroy();
|
||||
this.pluginCmdRegistry?.destroy();
|
||||
this.pluginCmdRegistry = undefined;
|
||||
|
||||
this._local_client && this._local_client.destroy();
|
||||
this._local_client?.destroy();
|
||||
this._local_client = undefined;
|
||||
|
||||
this.channelTree && this.channelTree.destroy();
|
||||
this.channelTree?.destroy();
|
||||
this.channelTree = undefined;
|
||||
|
||||
this.side_bar && this.side_bar.destroy();
|
||||
this.side_bar?.destroy();
|
||||
this.side_bar = undefined;
|
||||
|
||||
this.log && this.log.destroy();
|
||||
this.log?.destroy();
|
||||
this.log = undefined;
|
||||
|
||||
this.permissions && this.permissions.destroy();
|
||||
this.permissions?.destroy();
|
||||
this.permissions = undefined;
|
||||
|
||||
this.groups && this.groups.destroy();
|
||||
this.groups?.destroy();
|
||||
this.groups = undefined;
|
||||
|
||||
this.fileManager && this.fileManager.destroy();
|
||||
this.fileManager?.destroy();
|
||||
this.fileManager = undefined;
|
||||
|
||||
this.serverFeatures?.destroy();
|
||||
this.serverFeatures = undefined;
|
||||
|
||||
this.settings && this.settings.destroy();
|
||||
this.settings = undefined;
|
||||
|
||||
|
@ -1136,6 +1142,37 @@ export class ConnectionHandler {
|
|||
hasOutputHardware() : boolean { return true; }
|
||||
|
||||
getPluginCmdRegistry() : PluginCmdRegistry { return this.pluginCmdRegistry; }
|
||||
|
||||
async startEchoTest() : Promise<void> {
|
||||
await this.serverConnection.getVoiceConnection().startWhisper({ target: "echo" });
|
||||
|
||||
/* TODO: store and later restore microphone status! */
|
||||
this.client_status.input_muted = false;
|
||||
this.update_voice_status();
|
||||
|
||||
try {
|
||||
this.echoTestRunning = true;
|
||||
const startResult = await this.startVoiceRecorder(false);
|
||||
|
||||
/* FIXME: Don't do it like that! */
|
||||
this.getVoiceRecorder()?.input?.setFilterMode(FilterMode.Bypass);
|
||||
|
||||
if(startResult.state === "error") {
|
||||
throw startResult.message;
|
||||
}
|
||||
} catch (error) {
|
||||
this.echoTestRunning = false;
|
||||
/* TODO: Restore voice recorder state! */
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
stopEchoTest() {
|
||||
this.echoTestRunning = false;
|
||||
this.serverConnection.getVoiceConnection().stopWhisper();
|
||||
this.getVoiceRecorder()?.input?.setFilterMode(FilterMode.Filter);
|
||||
this.update_voice_status();
|
||||
}
|
||||
}
|
||||
|
||||
export type ConnectionStateUpdateType = "microphone" | "speaker" | "away" | "subscribe" | "query";
|
||||
|
|
|
@ -149,36 +149,40 @@ export interface KeyEvent extends KeyDescriptor {
|
|||
export interface KeyHook extends KeyDescriptor {
|
||||
cancel: boolean;
|
||||
|
||||
|
||||
callback_press: () => any;
|
||||
callback_release: () => any;
|
||||
}
|
||||
|
||||
export function key_description(key: KeyDescriptor) {
|
||||
let result = "";
|
||||
if(key.key_shift)
|
||||
if(key.key_shift) {
|
||||
result += " + " + tr("Shift");
|
||||
if(key.key_alt)
|
||||
result += " + " + tr("Alt");
|
||||
if(key.key_ctrl)
|
||||
result += " + " + tr("CTRL");
|
||||
if(key.key_windows)
|
||||
result += " + " + tr("Win");
|
||||
}
|
||||
|
||||
if(!result && !key.key_code)
|
||||
return tr("unset");
|
||||
if(key.key_alt) {
|
||||
result += " + " + tr("Alt");
|
||||
}
|
||||
|
||||
if(key.key_ctrl) {
|
||||
result += " + " + tr("CTRL");
|
||||
}
|
||||
|
||||
if(key.key_windows) {
|
||||
result += " + " + tr("Win");
|
||||
}
|
||||
|
||||
if(key.key_code) {
|
||||
let key_name;
|
||||
if(key.key_code.startsWith("Key"))
|
||||
if(key.key_code.startsWith("Key")) {
|
||||
key_name = key.key_code.substr(3);
|
||||
else if(key.key_code.startsWith("Digit"))
|
||||
} else if(key.key_code.startsWith("Digit")) {
|
||||
key_name = key.key_code.substr(5);
|
||||
else if(key.key_code.startsWith("Numpad"))
|
||||
} else if(key.key_code.startsWith("Numpad")) {
|
||||
key_name = "Numpad " + key.key_code.substr(6);
|
||||
else
|
||||
} else {
|
||||
key_name = key.key_code;
|
||||
}
|
||||
result += " + " + key_name;
|
||||
}
|
||||
return result.substr(3);
|
||||
return result ? result.substr(3) : tr("unset");
|
||||
}
|
|
@ -478,7 +478,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
|||
value: string
|
||||
}[] = [];
|
||||
|
||||
for(let key in entry) {
|
||||
for(let key of Object.keys(entry)) {
|
||||
if(key == "cfid") continue;
|
||||
if(key == "ctid") continue;
|
||||
if(key === "invokerid") continue;
|
||||
|
@ -609,10 +609,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
|||
this.connection_handler.update_voice_status(channel_to);
|
||||
|
||||
for(const entry of client.channelTree.clientsByChannel(channelFrom)) {
|
||||
if(entry !== client && entry.get_audio_handle()) {
|
||||
entry.get_audio_handle().abort_replay();
|
||||
entry.speaking = false;
|
||||
}
|
||||
entry.getVoiceClient()?.abortReplay();
|
||||
}
|
||||
|
||||
const side_bar = this.connection_handler.side_bar;
|
||||
|
|
|
@ -8,13 +8,12 @@ import {
|
|||
QueryList,
|
||||
QueryListEntry, ServerGroupClient
|
||||
} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||
import {ChannelEntry} from "tc-shared/ui/channel";
|
||||
import {AbstractCommandHandler} from "tc-shared/connection/AbstractCommandHandler";
|
||||
import {tr} from "tc-shared/i18n/localize";
|
||||
import {ErrorCode} from "tc-shared/connection/ErrorCode";
|
||||
|
||||
export class CommandHelper extends AbstractCommandHandler {
|
||||
private _who_am_i: any;
|
||||
private whoAmIResponse: any;
|
||||
private infoByUniqueIdRequest: {[unique_id: string]:((resolved: ClientNameInfo) => any)[]} = {};
|
||||
private infoByDatabaseIdRequest: {[database_id: number]:((resolved: ClientNameInfo) => any)[]} = {};
|
||||
|
||||
|
@ -32,42 +31,37 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
destroy() {
|
||||
if(this.connection) {
|
||||
const hboss = this.connection.command_handler_boss();
|
||||
hboss && hboss.unregister_handler(this);
|
||||
hboss?.unregister_handler(this);
|
||||
}
|
||||
|
||||
this.infoByUniqueIdRequest = undefined;
|
||||
this.infoByDatabaseIdRequest = undefined;
|
||||
}
|
||||
|
||||
handle_command(command: ServerCommand): boolean {
|
||||
if(command.command == "notifyclientnamefromuid")
|
||||
this.handle_notifyclientnamefromuid(command.arguments);
|
||||
if(command.command == "notifyclientgetnamefromdbid")
|
||||
this.handle_notifyclientgetnamefromdbid(command.arguments);
|
||||
else
|
||||
if(command.command == "notifyclientnamefromuid") {
|
||||
this.handleNotifyClientNameFromUniqueId(command.arguments);
|
||||
} else if(command.command == "notifyclientgetnamefromdbid") {
|
||||
this.handleNotifyClientGetNameFromDatabaseId(command.arguments);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
joinChannel(channel: ChannelEntry, password?: string) : Promise<CommandResult> {
|
||||
return this.connection.send_command("clientmove", {
|
||||
"clid": this.connection.client.getClientId(),
|
||||
"cid": channel.getChannelId(),
|
||||
"cpw": password || ""
|
||||
});
|
||||
}
|
||||
|
||||
async info_from_uid(..._unique_ids: string[]) : Promise<ClientNameInfo[]> {
|
||||
async getInfoFromUniqueId(...uniqueIds: string[]) : Promise<ClientNameInfo[]> {
|
||||
const response: ClientNameInfo[] = [];
|
||||
const request = [];
|
||||
const unique_ids = new Set(_unique_ids);
|
||||
if(!unique_ids.size) return [];
|
||||
const uniqueUniqueIds = new Set(uniqueIds);
|
||||
if(uniqueUniqueIds.size === 0) return [];
|
||||
|
||||
const unique_id_resolvers: {[unique_id: string]: (resolved: ClientNameInfo) => any} = {};
|
||||
const resolvers: {[uniqueId: string]: (resolved: ClientNameInfo) => any} = {};
|
||||
|
||||
for(const uniqueId of uniqueUniqueIds) {
|
||||
request.push({ cluid: uniqueId });
|
||||
|
||||
for(const unique_id of unique_ids) {
|
||||
request.push({'cluid': unique_id});
|
||||
(this.infoByUniqueIdRequest[unique_id] || (this.infoByUniqueIdRequest[unique_id] = []))
|
||||
.push(unique_id_resolvers[unique_id] = info => response.push(info));
|
||||
const requestCallbacks = this.infoByUniqueIdRequest[uniqueId] || (this.infoByUniqueIdRequest[uniqueId] = []);
|
||||
requestCallbacks.push(resolvers[uniqueId] = info => response.push(info));
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -80,42 +74,43 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
} finally {
|
||||
/* cleanup */
|
||||
for(const unique_id of Object.keys(unique_id_resolvers))
|
||||
(this.infoByUniqueIdRequest[unique_id] || []).remove(unique_id_resolvers[unique_id]);
|
||||
for(const uniqueId of Object.keys(resolvers)) {
|
||||
this.infoByUniqueIdRequest[uniqueId]?.remove(resolvers[uniqueId]);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private handle_notifyclientgetnamefromdbid(json: any[]) {
|
||||
private handleNotifyClientGetNameFromDatabaseId(json: any[]) {
|
||||
for(const entry of json) {
|
||||
const info: ClientNameInfo = {
|
||||
client_unique_id: entry["cluid"],
|
||||
client_nickname: entry["clname"],
|
||||
client_database_id: parseInt(entry["cldbid"])
|
||||
clientUniqueId: entry["cluid"],
|
||||
clientNickname: entry["clname"],
|
||||
clientDatabaseId: parseInt(entry["cldbid"])
|
||||
};
|
||||
|
||||
const functions = this.infoByDatabaseIdRequest[info.client_database_id] || [];
|
||||
delete this.infoByDatabaseIdRequest[info.client_database_id];
|
||||
const callbacks = this.infoByDatabaseIdRequest[info.clientDatabaseId] || [];
|
||||
delete this.infoByDatabaseIdRequest[info.clientDatabaseId];
|
||||
|
||||
for(const fn of functions)
|
||||
fn(info);
|
||||
callbacks.forEach(callback => callback(info));
|
||||
}
|
||||
}
|
||||
|
||||
async info_from_cldbid(..._cldbid: number[]) : Promise<ClientNameInfo[]> {
|
||||
async getInfoFromClientDatabaseId(...clientDatabaseIds: number[]) : Promise<ClientNameInfo[]> {
|
||||
const response: ClientNameInfo[] = [];
|
||||
const request = [];
|
||||
const unique_cldbid = new Set(_cldbid);
|
||||
if(!unique_cldbid.size) return [];
|
||||
const uniqueClientDatabaseIds = new Set(clientDatabaseIds);
|
||||
if(!uniqueClientDatabaseIds.size) return [];
|
||||
|
||||
const unique_cldbid_resolvers: {[dbid: number]: (resolved: ClientNameInfo) => any} = {};
|
||||
const resolvers: {[dbid: number]: (resolved: ClientNameInfo) => any} = {};
|
||||
|
||||
|
||||
for(const cldbid of unique_cldbid) {
|
||||
request.push({'cldbid': cldbid});
|
||||
(this.infoByDatabaseIdRequest[cldbid] || (this.infoByDatabaseIdRequest[cldbid] = []))
|
||||
.push(unique_cldbid_resolvers[cldbid] = info => response.push(info));
|
||||
for(const clientDatabaseId of uniqueClientDatabaseIds) {
|
||||
request.push({ cldbid: clientDatabaseId });
|
||||
|
||||
const requestCallbacks = this.infoByUniqueIdRequest[clientDatabaseId] || (this.infoByUniqueIdRequest[clientDatabaseId] = []);
|
||||
requestCallbacks.push(resolvers[clientDatabaseId] = info => response.push(info));
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -128,30 +123,32 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
} finally {
|
||||
/* cleanup */
|
||||
for(const cldbid of Object.keys(unique_cldbid_resolvers))
|
||||
(this.infoByDatabaseIdRequest[cldbid] || []).remove(unique_cldbid_resolvers[cldbid]);
|
||||
for(const cldbid of Object.keys(resolvers)) {
|
||||
this.infoByDatabaseIdRequest[cldbid]?.remove(resolvers[cldbid]);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private handle_notifyclientnamefromuid(json: any[]) {
|
||||
private handleNotifyClientNameFromUniqueId(json: any[]) {
|
||||
for(const entry of json) {
|
||||
const info: ClientNameInfo = {
|
||||
client_unique_id: entry["cluid"],
|
||||
client_nickname: entry["clname"],
|
||||
client_database_id: parseInt(entry["cldbid"])
|
||||
clientUniqueId: entry["cluid"],
|
||||
clientNickname: entry["clname"],
|
||||
clientDatabaseId: parseInt(entry["cldbid"])
|
||||
};
|
||||
|
||||
const functions = this.infoByUniqueIdRequest[entry["cluid"]] || [];
|
||||
delete this.infoByUniqueIdRequest[entry["cluid"]];
|
||||
|
||||
for(const fn of functions)
|
||||
for(const fn of functions) {
|
||||
fn(info);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
request_query_list(server_id: number = undefined) : Promise<QueryList> {
|
||||
requestQueryList(server_id: number = undefined) : Promise<QueryList> {
|
||||
return new Promise<QueryList>((resolve, reject) => {
|
||||
const single_handler = {
|
||||
command: "notifyquerylist",
|
||||
|
@ -180,12 +177,11 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
let data = {};
|
||||
if(server_id !== undefined)
|
||||
if(server_id !== undefined) {
|
||||
data["server_id"] = server_id;
|
||||
}
|
||||
|
||||
this.connection.send_command("querylist", data).catch(error => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
|
||||
if(error instanceof CommandResult) {
|
||||
if(error.id == ErrorCode.DATABASE_EMPTY_RESULT) {
|
||||
resolve(undefined);
|
||||
|
@ -193,11 +189,13 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
}
|
||||
reject(error);
|
||||
}).then(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
request_playlist_list() : Promise<Playlist[]> {
|
||||
requestPlaylistList() : Promise<Playlist[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const single_handler: SingleCommandHandler = {
|
||||
command: "notifyplaylistlist",
|
||||
|
@ -234,8 +232,6 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
this.connection.send_command("playlistlist").catch(error => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
|
||||
if(error instanceof CommandResult) {
|
||||
if(error.id == ErrorCode.DATABASE_EMPTY_RESULT) {
|
||||
resolve([]);
|
||||
|
@ -243,11 +239,13 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
}
|
||||
reject(error);
|
||||
})
|
||||
}).then(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
request_playlist_songs(playlist_id: number, process_result?: boolean) : Promise<PlaylistSong[]> {
|
||||
requestPlaylistSongs(playlist_id: number, process_result?: boolean) : Promise<PlaylistSong[]> {
|
||||
let bulked_response = false;
|
||||
let bulk_index = 0;
|
||||
|
||||
|
@ -300,7 +298,6 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
this.connection.send_command("playlistsonglist", {playlist_id: playlist_id}, { process_result: process_result }).catch(error => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
if(error instanceof CommandResult) {
|
||||
if(error.id == ErrorCode.DATABASE_EMPTY_RESULT) {
|
||||
resolve([]);
|
||||
|
@ -308,7 +305,9 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
}
|
||||
reject(error);
|
||||
})
|
||||
}).catch(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -326,8 +325,9 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
|
||||
const result: number[] = [];
|
||||
|
||||
for(const entry of json)
|
||||
for(const entry of json) {
|
||||
result.push(parseInt(entry["cldbid"]));
|
||||
}
|
||||
|
||||
resolve(result.filter(e => !isNaN(e)));
|
||||
return true;
|
||||
|
@ -336,17 +336,18 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
this.connection.send_command("playlistclientlist", {playlist_id: playlist_id}).catch(error => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
if(error instanceof CommandResult && error.id == ErrorCode.DATABASE_EMPTY_RESULT) {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
reject(error);
|
||||
})
|
||||
}).then(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
request_clients_by_server_group(group_id: number) : Promise<ServerGroupClient[]> {
|
||||
requestClientsByServerGroup(group_id: number) : Promise<ServerGroupClient[]> {
|
||||
//servergroupclientlist sgid=2
|
||||
//notifyservergroupclientlist sgid=6 cldbid=2 client_nickname=WolverinDEV client_unique_identifier=xxjnc14LmvTk+Lyrm8OOeo4tOqw=
|
||||
return new Promise<ServerGroupClient[]>((resolve, reject) => {
|
||||
|
@ -380,14 +381,13 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
};
|
||||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
this.connection.send_command("servergroupclientlist", {sgid: group_id}).catch(error => {
|
||||
this.connection.send_command("servergroupclientlist", {sgid: group_id}).catch(reject).then(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
reject(error);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
request_playlist_info(playlist_id: number) : Promise<PlaylistInfo> {
|
||||
requestPlaylistInfo(playlist_id: number) : Promise<PlaylistInfo> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const single_handler: SingleCommandHandler = {
|
||||
command: "notifyplaylistinfo",
|
||||
|
@ -399,7 +399,6 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
}
|
||||
|
||||
try {
|
||||
//resolve
|
||||
resolve({
|
||||
playlist_id: parseInt(json["playlist_id"]),
|
||||
playlist_title: json["playlist_title"],
|
||||
|
@ -426,10 +425,9 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
};
|
||||
this.handler_boss.register_single_handler(single_handler);
|
||||
|
||||
this.connection.send_command("playlistinfo", {playlist_id: playlist_id}).catch(error => {
|
||||
this.connection.send_command("playlistinfo", { playlist_id: playlist_id }).catch(reject).then(() => {
|
||||
this.handler_boss.remove_single_handler(single_handler);
|
||||
reject(error);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -438,9 +436,10 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
* Its just a workaround for the query management.
|
||||
* There is no garantee that the whoami trick will work forever
|
||||
*/
|
||||
current_virtual_server_id() : Promise<number> {
|
||||
if(this._who_am_i)
|
||||
return Promise.resolve(parseInt(this._who_am_i["virtualserver_id"]));
|
||||
getCurrentVirtualServerId() : Promise<number> {
|
||||
if(this.whoAmIResponse) {
|
||||
return Promise.resolve(parseInt(this.whoAmIResponse["virtualserver_id"]));
|
||||
}
|
||||
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
const single_handler: SingleCommandHandler = {
|
||||
|
@ -448,8 +447,8 @@ export class CommandHelper extends AbstractCommandHandler {
|
|||
if(command.command != "" && command.command.indexOf("=") == -1)
|
||||
return false;
|
||||
|
||||
this._who_am_i = command.arguments[0];
|
||||
resolve(parseInt(this._who_am_i["virtualserver_id"]));
|
||||
this.whoAmIResponse = command.arguments[0];
|
||||
resolve(parseInt(this.whoAmIResponse["virtualserver_id"]));
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,62 +1,46 @@
|
|||
import {
|
||||
AbstractVoiceConnection, LatencySettings,
|
||||
PlayerState,
|
||||
VoiceClient,
|
||||
AbstractVoiceConnection,
|
||||
VoiceConnectionStatus, WhisperSessionInitializer
|
||||
} from "tc-shared/connection/VoiceConnection";
|
||||
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
|
||||
import {WhisperSession} from "tc-shared/voice/Whisper";
|
||||
import {VoiceClient} from "tc-shared/voice/VoiceClient";
|
||||
import {VoicePlayerLatencySettings, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
|
||||
import {WhisperSession} from "tc-shared/voice/VoiceWhisper";
|
||||
|
||||
class DummyVoiceClient implements VoiceClient {
|
||||
client_id: number;
|
||||
|
||||
callback_playback: () => any;
|
||||
callback_stopped: () => any;
|
||||
|
||||
callback_state_changed: (new_state: PlayerState) => any;
|
||||
|
||||
private readonly clientId: number;
|
||||
private volume: number;
|
||||
|
||||
constructor(clientId: number) {
|
||||
this.client_id = clientId;
|
||||
|
||||
this.clientId = clientId;
|
||||
this.volume = 1;
|
||||
this.reset_latency_settings();
|
||||
}
|
||||
|
||||
abort_replay() { }
|
||||
|
||||
flush() {
|
||||
throw "flush isn't supported";}
|
||||
|
||||
get_state(): PlayerState {
|
||||
return PlayerState.STOPPED;
|
||||
getClientId(): number {
|
||||
return this.clientId;
|
||||
}
|
||||
|
||||
latency_settings(settings?: LatencySettings): LatencySettings {
|
||||
throw "latency settings are not supported";
|
||||
}
|
||||
|
||||
reset_latency_settings() {
|
||||
throw "latency settings are not supported";
|
||||
}
|
||||
|
||||
set_volume(volume: number): void {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
get_volume(): number {
|
||||
getVolume(): number {
|
||||
return this.volume;
|
||||
}
|
||||
|
||||
support_flush(): boolean {
|
||||
return false;
|
||||
setVolume(volume: number) {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
support_latency_settings(): boolean {
|
||||
return false;
|
||||
getState(): VoicePlayerState {
|
||||
return VoicePlayerState.STOPPED;
|
||||
}
|
||||
|
||||
getLatencySettings(): Readonly<VoicePlayerLatencySettings> {
|
||||
return { maxBufferTime: 0, minBufferTime: 0 };
|
||||
}
|
||||
|
||||
setLatencySettings(settings) { }
|
||||
|
||||
flushBuffer() { }
|
||||
abortReplay() { }
|
||||
}
|
||||
|
||||
export class DummyVoiceConnection extends AbstractVoiceConnection {
|
||||
|
@ -89,7 +73,7 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
|
|||
this.events.fire("notify_recorder_changed", {});
|
||||
}
|
||||
|
||||
availableClients(): VoiceClient[] {
|
||||
availableVoiceClients(): VoiceClient[] {
|
||||
return this.voiceClients;
|
||||
}
|
||||
|
||||
|
@ -109,7 +93,7 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
|
|||
return 0;
|
||||
}
|
||||
|
||||
registerClient(clientId: number): VoiceClient {
|
||||
async registerVoiceClient(clientId: number): Promise<VoiceClient> {
|
||||
const client = new DummyVoiceClient(clientId);
|
||||
this.voiceClients.push(client);
|
||||
return client;
|
||||
|
@ -117,7 +101,7 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
|
|||
|
||||
setEncoderCodec(codec: number) {}
|
||||
|
||||
async unregister_client(client: VoiceClient): Promise<void> {
|
||||
async unregisterVoiceClient(client: VoiceClient): Promise<void> {
|
||||
this.voiceClients.remove(client as any);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,10 +32,9 @@ export class CommandResult {
|
|||
}
|
||||
|
||||
export interface ClientNameInfo {
|
||||
//cluid=tYzKUryn\/\/Y8VBMf8PHUT6B1eiE= name=Exp clname=Exp cldbid=9
|
||||
client_unique_id: string;
|
||||
client_nickname: string;
|
||||
client_database_id: number;
|
||||
clientUniqueId: string;
|
||||
clientNickname: string;
|
||||
clientDatabaseId: number;
|
||||
}
|
||||
|
||||
export interface ClientNameFromUid {
|
||||
|
|
|
@ -0,0 +1,168 @@
|
|||
import {ConnectionEvents, ConnectionHandler, ConnectionState} from "tc-shared/ConnectionHandler";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||
import {ErrorCode} from "tc-shared/connection/ErrorCode";
|
||||
import {LogCategory, logDebug, logTrace, logWarn} from "tc-shared/log";
|
||||
import {ExplicitCommandHandler} from "tc-shared/connection/AbstractCommandHandler";
|
||||
|
||||
export type ServerFeatureSupport = "unsupported" | "supported" | "experimental" | "deprecated";
|
||||
|
||||
export enum ServerFeature {
|
||||
ERROR_BULKS= "error-bulks", /* Current version is 1 */
|
||||
ADVANCED_CHANNEL_CHAT= "advanced-channel-chat", /* Current version is 1 */
|
||||
LOG_QUERY= "log-query", /* Current version is 1 */
|
||||
WHISPER_ECHO = "whisper-echo" /* Current version is 1 */
|
||||
}
|
||||
|
||||
export interface ServerFeatureEvents {
|
||||
notify_state_changed: {
|
||||
feature: ServerFeature,
|
||||
version?: number,
|
||||
support: ServerFeatureSupport
|
||||
}
|
||||
}
|
||||
|
||||
export class ServerFeatures {
|
||||
readonly events: Registry<ServerFeatureEvents>;
|
||||
private readonly connection: ConnectionHandler;
|
||||
private readonly explicitCommandHandler: ExplicitCommandHandler;
|
||||
private readonly stateChangeListener: (event: ConnectionEvents["notify_connection_state_changed"]) => void;
|
||||
|
||||
private featureAwait: Promise<boolean>;
|
||||
private featureAwaitCallback: (success: boolean) => void;
|
||||
private featuresSet = false;
|
||||
|
||||
private featureStates: {[key: string]: { version?: number, support: ServerFeatureSupport }} = {};
|
||||
|
||||
constructor(connection: ConnectionHandler) {
|
||||
this.events = new Registry<ServerFeatureEvents>();
|
||||
this.connection = connection;
|
||||
|
||||
this.connection.getServerConnection().command_handler_boss().register_explicit_handler("notifyfeaturesupport", this.explicitCommandHandler = command => {
|
||||
for(const set of command.arguments) {
|
||||
let support: ServerFeatureSupport;
|
||||
switch (parseInt(set["support"])) {
|
||||
case 0:
|
||||
support = "unsupported";
|
||||
break;
|
||||
|
||||
case 1:
|
||||
support = "supported";
|
||||
break;
|
||||
|
||||
case 2:
|
||||
support = "experimental";
|
||||
break;
|
||||
|
||||
case 3:
|
||||
support = "deprecated";
|
||||
break;
|
||||
|
||||
default:
|
||||
logWarn(LogCategory.SERVER, tr("Received feature %s with unknown support state: %s"), set["name"], set["support"])
|
||||
}
|
||||
this.setFeatureSupport(set["name"], support, parseInt(set["version"]));
|
||||
}
|
||||
});
|
||||
|
||||
this.connection.events().on("notify_connection_state_changed", this.stateChangeListener = event => {
|
||||
if(event.new_state === ConnectionState.CONNECTED) {
|
||||
this.connection.getServerConnection().send_command("listfeaturesupport").catch(error => {
|
||||
this.disableAllFeatures();
|
||||
if(error instanceof CommandResult) {
|
||||
if(error.id === ErrorCode.COMMAND_NOT_FOUND) {
|
||||
logDebug(LogCategory.SERVER, tr("Target server does not support the feature list command. Disabling all features."));
|
||||
return;
|
||||
}
|
||||
}
|
||||
logWarn(LogCategory.SERVER, tr("Failed to query server features: %o"), error);
|
||||
}).then(() => {
|
||||
this.featuresSet = true;
|
||||
if(this.featureAwaitCallback) {
|
||||
this.featureAwaitCallback(true);
|
||||
}
|
||||
});
|
||||
} else if(event.new_state === ConnectionState.DISCONNECTING || event.new_state === ConnectionState.UNCONNECTED) {
|
||||
this.disableAllFeatures();
|
||||
this.featureAwait = undefined;
|
||||
this.featureAwaitCallback = undefined;
|
||||
this.featuresSet = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.connection.events().off(this.stateChangeListener);
|
||||
this.connection.getServerConnection()?.command_handler_boss()?.unregister_explicit_handler("notifyfeaturesupport", this.explicitCommandHandler);
|
||||
|
||||
if(this.featureAwaitCallback) {
|
||||
this.featureAwaitCallback(false);
|
||||
}
|
||||
|
||||
this.events.destroy();
|
||||
}
|
||||
|
||||
supportsFeature(feature: ServerFeature, version?: number) : boolean {
|
||||
const support = this.featureStates[feature];
|
||||
if(!support) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(support.support === "supported" || support.support === "experimental" || support.support === "deprecated") {
|
||||
return typeof version === "number" ? version >= support.version : true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
awaitFeatures() : Promise<boolean> {
|
||||
if(this.featureAwait) {
|
||||
return this.featureAwait;
|
||||
} else if(this.featuresSet) {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
return this.featureAwait = new Promise<boolean>(resolve => this.featureAwaitCallback = resolve);
|
||||
}
|
||||
|
||||
listenSupportChange(feature: ServerFeature, listener: (support: boolean) => void, version?: number) : () => void {
|
||||
return this.events.on("notify_state_changed", event => {
|
||||
if(event.feature !== feature) {
|
||||
return;
|
||||
}
|
||||
|
||||
listener(this.supportsFeature(feature, version));
|
||||
});
|
||||
}
|
||||
|
||||
private disableAllFeatures() {
|
||||
for(const feature of Object.keys(this.featureStates) as ServerFeature[]) {
|
||||
this.setFeatureSupport(feature, "unsupported");
|
||||
}
|
||||
}
|
||||
|
||||
private setFeatureSupport(feature: ServerFeature, support: ServerFeatureSupport, version?: number) {
|
||||
logTrace(LogCategory.SERVER, tr("Setting server feature %s to %s (version %d)"), feature, support, version);
|
||||
if(support === "unsupported") {
|
||||
if(!this.featureStates[feature]) {
|
||||
return;
|
||||
}
|
||||
|
||||
delete this.featureStates[feature];
|
||||
this.events.fire("notify_state_changed", { feature: feature, support: "unsupported" });
|
||||
} else {
|
||||
if(!this.featureStates[feature] || this.featureStates[feature].version !== version || this.featureStates[feature].support !== support) {
|
||||
this.featureStates[feature] = {
|
||||
support: support,
|
||||
version: version
|
||||
};
|
||||
|
||||
this.events.fire("notify_state_changed", {
|
||||
feature: feature,
|
||||
support: support,
|
||||
version: version
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,44 +1,8 @@
|
|||
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {WhisperSession} from "tc-shared/voice/Whisper";
|
||||
|
||||
export enum PlayerState {
|
||||
PREBUFFERING,
|
||||
PLAYING,
|
||||
BUFFERING,
|
||||
STOPPING,
|
||||
STOPPED
|
||||
}
|
||||
|
||||
export type LatencySettings = {
|
||||
min_buffer: number; /* milliseconds */
|
||||
max_buffer: number; /* milliseconds */
|
||||
}
|
||||
|
||||
export interface VoiceClient {
|
||||
client_id: number;
|
||||
|
||||
callback_playback: () => any;
|
||||
callback_stopped: () => any;
|
||||
|
||||
callback_state_changed: (new_state: PlayerState) => any;
|
||||
|
||||
get_state() : PlayerState;
|
||||
|
||||
get_volume() : number;
|
||||
set_volume(volume: number) : void;
|
||||
|
||||
abort_replay();
|
||||
|
||||
support_latency_settings() : boolean;
|
||||
|
||||
reset_latency_settings();
|
||||
latency_settings(settings?: LatencySettings) : LatencySettings;
|
||||
|
||||
support_flush() : boolean;
|
||||
flush();
|
||||
}
|
||||
import {VoiceClient} from "tc-shared/voice/VoiceClient";
|
||||
import {WhisperSession, WhisperTarget} from "tc-shared/voice/VoiceWhisper";
|
||||
|
||||
export enum VoiceConnectionStatus {
|
||||
ClientUnsupported,
|
||||
|
@ -95,9 +59,9 @@ export abstract class AbstractVoiceConnection {
|
|||
abstract encodingSupported(codec: number) : boolean;
|
||||
abstract decodingSupported(codec: number) : boolean;
|
||||
|
||||
abstract registerClient(client_id: number) : VoiceClient;
|
||||
abstract availableClients() : VoiceClient[];
|
||||
abstract unregister_client(client: VoiceClient) : Promise<void>;
|
||||
abstract registerVoiceClient(clientId: number);
|
||||
abstract availableVoiceClients() : VoiceClient[];
|
||||
abstract unregisterVoiceClient(client: VoiceClient);
|
||||
|
||||
abstract voiceRecorder() : RecorderProfile;
|
||||
abstract acquireVoiceRecorder(recorder: RecorderProfile | undefined) : Promise<void>;
|
||||
|
@ -111,4 +75,8 @@ export abstract class AbstractVoiceConnection {
|
|||
|
||||
abstract setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined);
|
||||
abstract getWhisperSessionInitializer() : WhisperSessionInitializer | undefined;
|
||||
|
||||
abstract startWhisper(target: WhisperTarget) : Promise<void>;
|
||||
abstract getWhisperTarget() : WhisperTarget | undefined;
|
||||
abstract stopWhisper();
|
||||
}
|
|
@ -148,7 +148,11 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
|
|||
|
||||
event_registry.on("action_open_window_connect", event => {
|
||||
spawnConnectModal({
|
||||
default_connect_new_tab: event.new_tab
|
||||
default_connect_new_tab: event.newTab
|
||||
});
|
||||
});
|
||||
|
||||
event_registry.on("action_open_window_settings", event => {
|
||||
spawnSettingsModal(event.defaultCategory);
|
||||
});
|
||||
}
|
|
@ -5,14 +5,14 @@ export interface ClientGlobalControlEvents {
|
|||
/* open a basic window */
|
||||
action_open_window: {
|
||||
window:
|
||||
"settings" | /* use action_open_window_settings! */
|
||||
"bookmark-manage" |
|
||||
"query-manage" |
|
||||
"query-create" |
|
||||
"ban-list" |
|
||||
"permissions" |
|
||||
"token-list" |
|
||||
"token-use" |
|
||||
"settings",
|
||||
"token-use",
|
||||
connection?: ConnectionHandler
|
||||
},
|
||||
|
||||
|
@ -26,7 +26,11 @@ export interface ClientGlobalControlEvents {
|
|||
|
||||
/* some more specific window openings */
|
||||
action_open_window_connect: {
|
||||
new_tab: boolean
|
||||
newTab: boolean
|
||||
}
|
||||
|
||||
action_open_window_settings: {
|
||||
defaultCategory?: string
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
|||
import {createInfoModal} from "tc-shared/ui/elements/Modal";
|
||||
import * as stats from "./stats";
|
||||
import * as fidentity from "./profiles/identities/TeaForumIdentity";
|
||||
import {default_recorder, RecorderProfile, set_default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||
import {defaultRecorder, RecorderProfile, setDefaultRecorder} from "tc-shared/voice/RecorderProfile";
|
||||
import * as cmanager from "tc-shared/ui/frames/connection_handlers";
|
||||
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
|
||||
import {spawnConnectModal} from "tc-shared/ui/modal/ModalConnect";
|
||||
|
@ -99,8 +99,8 @@ async function initialize_app() {
|
|||
log.warn(LogCategory.GENERAL, tr("Client does not support aplayer.set_master_volume()... May client is too old?"));
|
||||
});
|
||||
|
||||
set_default_recorder(new RecorderProfile("default"));
|
||||
default_recorder.initialize().catch(error => {
|
||||
setDefaultRecorder(new RecorderProfile("default"));
|
||||
defaultRecorder.initialize().catch(error => {
|
||||
log.error(LogCategory.AUDIO, tr("Failed to initialize default recorder: %o"), error);
|
||||
});
|
||||
|
||||
|
|
|
@ -162,7 +162,9 @@ export interface SettingsEvents {
|
|||
mode: "global" | "server",
|
||||
|
||||
oldValue: string,
|
||||
newValue: string
|
||||
newValue: string,
|
||||
|
||||
newCastedValue: any
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -483,6 +485,12 @@ export class Settings extends StaticSettings {
|
|||
valueType: "boolean",
|
||||
};
|
||||
|
||||
static readonly KEY_VOICE_ECHO_TEST_ENABLED: ValuedSettingsKey<boolean> = {
|
||||
key: 'voice_echo_test_enabled',
|
||||
defaultValue: true,
|
||||
valueType: "boolean",
|
||||
};
|
||||
|
||||
static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => {
|
||||
return {
|
||||
key: "log." + category.toLowerCase() + ".enabled",
|
||||
|
@ -661,12 +669,21 @@ export class Settings extends StaticSettings {
|
|||
mode: "global",
|
||||
newValue: this.cacheGlobal[key.key],
|
||||
oldValue: oldValue,
|
||||
setting: key.key
|
||||
setting: key.key,
|
||||
newCastedValue: value
|
||||
});
|
||||
if(Settings.UPDATE_DIRECT)
|
||||
this.save();
|
||||
}
|
||||
|
||||
globalChangeListener<T extends ConfigValueTypes>(key: SettingsKey<T>, listener: (newValue: T) => void) : () => void {
|
||||
return this.events.on("notify_setting_changed", event => {
|
||||
if(event.setting === key.key && event.mode === "global") {
|
||||
listener(event.newCastedValue);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
save() {
|
||||
this.updated = false;
|
||||
let global = JSON.stringify(this.cacheGlobal);
|
||||
|
|
|
@ -22,6 +22,7 @@ import {ChannelEntryView as ChannelEntryView} from "./tree/Channel";
|
|||
import {spawnFileTransferModal} from "tc-shared/ui/modal/transfer/ModalFileTransfer";
|
||||
import {ViewReasonId} from "tc-shared/ConnectionHandler";
|
||||
import {EventChannelData} from "tc-shared/ui/frames/log/Definitions";
|
||||
import {ErrorCode} from "tc-shared/connection/ErrorCode";
|
||||
|
||||
export enum ChannelType {
|
||||
PERMANENT,
|
||||
|
@ -653,11 +654,15 @@ export class ChannelEntry extends ChannelTreeEntry<ChannelEvents> {
|
|||
return;
|
||||
}
|
||||
|
||||
this.channelTree.client.getServerConnection().command_helper.joinChannel(this, this.cachedPasswordHash).then(() => {
|
||||
this.channelTree.client.serverConnection.send_command("clientmove", {
|
||||
"clid": this.channelTree.client.getClientId(),
|
||||
"cid": this.getChannelId(),
|
||||
"cpw": this.cachedPasswordHash || ""
|
||||
}).then(() => {
|
||||
this.channelTree.client.sound.play(Sound.CHANNEL_JOINED);
|
||||
}).catch(error => {
|
||||
if(error instanceof CommandResult) {
|
||||
if(error.id == 781) { //Invalid password
|
||||
if(error.id == ErrorCode.CHANNEL_INVALID_PASSWORD) { //Invalid password
|
||||
this.invalidateCachedPassword();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,8 @@ import {EventClient, EventType} from "tc-shared/ui/frames/log/Definitions";
|
|||
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
|
||||
import {global_client_actions} from "tc-shared/events/GlobalEvents";
|
||||
import {ClientIcon} from "svg-sprites/client-icons";
|
||||
import {VoiceClient} from "tc-shared/connection/VoiceConnection";
|
||||
import {VoiceClient} from "tc-shared/voice/VoiceClient";
|
||||
import {VoicePlayerEvents, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
|
||||
|
||||
export enum ClientType {
|
||||
CLIENT_VOICE,
|
||||
|
@ -138,9 +139,9 @@ export class ClientConnectionInfo {
|
|||
}
|
||||
|
||||
export interface ClientEvents extends ChannelTreeEntryEvents {
|
||||
"notify_enter_view": {},
|
||||
notify_enter_view: {},
|
||||
notify_client_moved: { oldChannel: ChannelEntry, newChannel: ChannelEntry }
|
||||
"notify_left_view": {
|
||||
notify_left_view: {
|
||||
reason: ViewReasonId;
|
||||
message?: string;
|
||||
serverLeave: boolean;
|
||||
|
@ -152,27 +153,27 @@ export interface ClientEvents extends ChannelTreeEntryEvents {
|
|||
},
|
||||
notify_mute_state_change: { muted: boolean }
|
||||
notify_speak_state_change: { speaking: boolean },
|
||||
"notify_audio_level_changed": { newValue: number },
|
||||
notify_audio_level_changed: { newValue: number },
|
||||
|
||||
"music_status_update": {
|
||||
music_status_update: {
|
||||
player_buffered_index: number,
|
||||
player_replay_index: number
|
||||
},
|
||||
"music_song_change": {
|
||||
music_song_change: {
|
||||
"song": SongInfo
|
||||
},
|
||||
|
||||
/* TODO: Move this out of the music bots interface? */
|
||||
"playlist_song_add": { song: PlaylistSong },
|
||||
"playlist_song_remove": { song_id: number },
|
||||
"playlist_song_reorder": { song_id: number, previous_song_id: number },
|
||||
"playlist_song_loaded": { song_id: number, success: boolean, error_msg?: string, metadata?: string },
|
||||
|
||||
playlist_song_add: { song: PlaylistSong },
|
||||
playlist_song_remove: { song_id: number },
|
||||
playlist_song_reorder: { song_id: number, previous_song_id: number },
|
||||
playlist_song_loaded: { song_id: number, success: boolean, error_msg?: string, metadata?: string },
|
||||
}
|
||||
|
||||
export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
||||
readonly events: Registry<ClientEvents>;
|
||||
readonly view: React.RefObject<ClientEntryView> = React.createRef<ClientEntryView>();
|
||||
channelTree: ChannelTree;
|
||||
|
||||
protected _clientId: number;
|
||||
protected _channel: ChannelEntry;
|
||||
|
@ -182,19 +183,18 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
protected _speaking: boolean;
|
||||
protected _listener_initialized: boolean;
|
||||
|
||||
protected _audio_handle: VoiceClient;
|
||||
protected _audio_volume: number;
|
||||
protected _audio_muted: boolean;
|
||||
protected voiceHandle: VoiceClient;
|
||||
protected voiceVolume: number;
|
||||
protected voiceMuted: boolean;
|
||||
private readonly voiceCallbackStateChanged;
|
||||
|
||||
private _info_variables_promise: Promise<void>;
|
||||
private _info_variables_promise_timestamp: number;
|
||||
private promiseClientInfo: Promise<void>;
|
||||
private promiseClientInfoTimestamp: number;
|
||||
|
||||
private _info_connection_promise: Promise<ClientConnectionInfo>;
|
||||
private _info_connection_promise_timestamp: number;
|
||||
private _info_connection_promise_resolve: any;
|
||||
private _info_connection_promise_reject: any;
|
||||
|
||||
channelTree: ChannelTree;
|
||||
private promiseConnectionInfo: Promise<ClientConnectionInfo>;
|
||||
private promiseConnectionInfoTimestamp: number;
|
||||
private promiseConnectionInfoResolve: any;
|
||||
private promiseConnectionInfoReject: any;
|
||||
|
||||
constructor(clientId: number, clientName, properties: ClientProperties = new ClientProperties()) {
|
||||
super();
|
||||
|
@ -205,61 +205,59 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
this._clientId = clientId;
|
||||
this.channelTree = null;
|
||||
this._channel = null;
|
||||
|
||||
this.voiceCallbackStateChanged = this.handleVoiceStateChange.bind(this);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if(this._audio_handle) {
|
||||
if(this.voiceHandle) {
|
||||
log.warn(LogCategory.AUDIO, tr("Destroying client with an active audio handle. This could cause memory leaks!"));
|
||||
try {
|
||||
this._audio_handle.abort_replay();
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to abort replay: %o"), error);
|
||||
}
|
||||
this._audio_handle.callback_playback = undefined;
|
||||
this._audio_handle.callback_stopped = undefined;
|
||||
this._audio_handle = undefined;
|
||||
/* TODO: Unregister all voice events? */
|
||||
this.voiceHandle.abortReplay();
|
||||
this.voiceHandle = undefined;
|
||||
}
|
||||
|
||||
this._channel = undefined;
|
||||
}
|
||||
|
||||
tree_unregistered() {
|
||||
this.channelTree = undefined;
|
||||
if(this._audio_handle) {
|
||||
try {
|
||||
this._audio_handle.abort_replay();
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to abort replay: %o"), error);
|
||||
}
|
||||
this._audio_handle.callback_playback = undefined;
|
||||
this._audio_handle.callback_stopped = undefined;
|
||||
this._audio_handle = undefined;
|
||||
}
|
||||
|
||||
this._channel = undefined;
|
||||
}
|
||||
|
||||
set_audio_handle(handle: VoiceClient) {
|
||||
if(this._audio_handle === handle)
|
||||
setVoiceClient(handle: VoiceClient) {
|
||||
if(this.voiceHandle === handle)
|
||||
return;
|
||||
|
||||
if(this._audio_handle) {
|
||||
this._audio_handle.callback_playback = undefined;
|
||||
this._audio_handle.callback_stopped = undefined;
|
||||
}
|
||||
//TODO may ensure that the id is the same?
|
||||
this._audio_handle = handle;
|
||||
if(!handle) {
|
||||
this.speaking = false;
|
||||
return;
|
||||
if(this.voiceHandle) {
|
||||
this.voiceHandle.events.off(this.voiceCallbackStateChanged);
|
||||
}
|
||||
|
||||
handle.callback_playback = () => this.speaking = true;
|
||||
handle.callback_stopped = () => this.speaking = false;
|
||||
this.voiceHandle = handle;
|
||||
if(handle) {
|
||||
this.voiceHandle.events.on("notify_state_changed", this.voiceCallbackStateChanged);
|
||||
this.handleVoiceStateChange({ oldState: VoicePlayerState.STOPPED, newState: handle.getState() });
|
||||
}
|
||||
}
|
||||
|
||||
get_audio_handle() : VoiceClient {
|
||||
return this._audio_handle;
|
||||
private handleVoiceStateChange(event: VoicePlayerEvents["notify_state_changed"]) {
|
||||
switch (event.newState) {
|
||||
case VoicePlayerState.PLAYING:
|
||||
case VoicePlayerState.STOPPING:
|
||||
this.speaking = true;
|
||||
break;
|
||||
|
||||
case VoicePlayerState.STOPPED:
|
||||
case VoicePlayerState.INITIALIZING:
|
||||
this.speaking = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private updateVoiceVolume() {
|
||||
let volume = this.voiceMuted ? 0 : this.voiceVolume;
|
||||
|
||||
/* TODO: If a whisper session has been set, update this as well */
|
||||
this.voiceHandle?.setVolume(volume);
|
||||
}
|
||||
|
||||
getVoiceClient() : VoiceClient {
|
||||
return this.voiceHandle;
|
||||
}
|
||||
|
||||
get properties() : ClientProperties {
|
||||
|
@ -271,36 +269,33 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
clientUid(){ return this.properties.client_unique_identifier; }
|
||||
clientId(){ return this._clientId; }
|
||||
|
||||
is_muted() { return !!this._audio_muted; }
|
||||
set_muted(flag: boolean, force: boolean) {
|
||||
if(this._audio_muted === flag && !force)
|
||||
return;
|
||||
isMuted() { return !!this.voiceMuted; }
|
||||
|
||||
if(flag) {
|
||||
/* TODO: Move this method to the view (e.g. channel tree) and rename with to setClientMuted */
|
||||
setMuted(flagMuted: boolean, force: boolean) {
|
||||
if(this.voiceMuted === flagMuted && !force) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(flagMuted) {
|
||||
this.channelTree.client.serverConnection.send_command('clientmute', {
|
||||
clid: this.clientId()
|
||||
}).then(() => {});
|
||||
} else if(this._audio_muted) {
|
||||
} else if(this.voiceMuted) {
|
||||
this.channelTree.client.serverConnection.send_command('clientunmute', {
|
||||
clid: this.clientId()
|
||||
}).then(() => {});
|
||||
}
|
||||
this._audio_muted = flag;
|
||||
this.voiceMuted = flagMuted;
|
||||
|
||||
this.channelTree.client.settings.changeServer(Settings.FN_CLIENT_MUTED(this.clientUid()), flag);
|
||||
if(this._audio_handle) {
|
||||
if(flag) {
|
||||
this._audio_handle.set_volume(0);
|
||||
} else {
|
||||
this._audio_handle.set_volume(this._audio_volume);
|
||||
}
|
||||
}
|
||||
this.channelTree.client.settings.changeServer(Settings.FN_CLIENT_MUTED(this.clientUid()), flagMuted);
|
||||
this.updateVoiceVolume();
|
||||
|
||||
this.events.fire("notify_mute_state_change", { muted: flag });
|
||||
this.events.fire("notify_mute_state_change", { muted: flagMuted });
|
||||
for(const client of this.channelTree.clients) {
|
||||
if(client === this || client.properties.client_unique_identifier !== this.properties.client_unique_identifier)
|
||||
continue;
|
||||
client.set_muted(flag, false);
|
||||
client.setMuted(flagMuted, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -676,26 +671,24 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
type: contextmenu.MenuEntryType.ENTRY,
|
||||
name: tr("Change playback latency"),
|
||||
callback: () => {
|
||||
spawnChangeLatency(this, this._audio_handle.latency_settings(), () => {
|
||||
this._audio_handle.reset_latency_settings();
|
||||
return this._audio_handle.latency_settings();
|
||||
}, settings => this._audio_handle.latency_settings(settings), this._audio_handle.support_flush ? () => {
|
||||
this._audio_handle.flush();
|
||||
} : undefined);
|
||||
spawnChangeLatency(this, this.voiceHandle.getLatencySettings(), () => {
|
||||
this.voiceHandle.resetLatencySettings();
|
||||
return this.voiceHandle.getLatencySettings();
|
||||
}, settings => this.voiceHandle.setLatencySettings(settings), () => this.voiceHandle.flushBuffer());
|
||||
},
|
||||
visible: this._audio_handle && this._audio_handle.support_latency_settings()
|
||||
visible: !!this.voiceHandle
|
||||
}, {
|
||||
type: contextmenu.MenuEntryType.ENTRY,
|
||||
icon_class: ClientIcon.InputMutedLocal,
|
||||
name: tr("Mute client"),
|
||||
visible: !this._audio_muted,
|
||||
callback: () => this.set_muted(true, false)
|
||||
visible: !this.voiceMuted,
|
||||
callback: () => this.setMuted(true, false)
|
||||
}, {
|
||||
type: contextmenu.MenuEntryType.ENTRY,
|
||||
icon_class: ClientIcon.InputMutedLocal,
|
||||
name: tr("Unmute client"),
|
||||
visible: this._audio_muted,
|
||||
callback: () => this.set_muted(false, false)
|
||||
visible: this.voiceMuted,
|
||||
callback: () => this.setMuted(false, false)
|
||||
},
|
||||
contextmenu.Entry.CLOSE(() => trigger_close && on_close ? on_close() : {})
|
||||
);
|
||||
|
@ -767,14 +760,11 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
reorder_channel = true;
|
||||
}
|
||||
if(variable.key == "client_unique_identifier") {
|
||||
this._audio_volume = this.channelTree.client.settings.server(Settings.FN_CLIENT_VOLUME(this.clientUid()), 1);
|
||||
this.voiceVolume = this.channelTree.client.settings.server(Settings.FN_CLIENT_VOLUME(this.clientUid()), 1);
|
||||
const mute_status = this.channelTree.client.settings.server(Settings.FN_CLIENT_MUTED(this.clientUid()), false);
|
||||
this.set_muted(mute_status, mute_status); /* force only needed when we want to mute the client */
|
||||
|
||||
if(this._audio_handle)
|
||||
this._audio_handle.set_volume(this._audio_muted ? 0 : this._audio_volume);
|
||||
|
||||
log.debug(LogCategory.CLIENT, tr("Loaded client (%s) server specific properties. Volume: %o Muted: %o."), this.clientUid(), this._audio_volume, this._audio_muted);
|
||||
this.setMuted(mute_status, mute_status); /* force only needed when we want to mute the client */
|
||||
this.updateVoiceVolume();
|
||||
log.debug(LogCategory.CLIENT, tr("Loaded client (%s) server specific properties. Volume: %o Muted: %o."), this.clientUid(), this.voiceVolume, this.voiceMuted);
|
||||
}
|
||||
if(variable.key == "client_talk_power") {
|
||||
reorder_channel = true;
|
||||
|
@ -815,13 +805,13 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
}
|
||||
|
||||
updateClientVariables(force_update?: boolean) : Promise<void> {
|
||||
if(Date.now() - 10 * 60 * 1000 < this._info_variables_promise_timestamp && this._info_variables_promise && (typeof(force_update) !== "boolean" || force_update))
|
||||
return this._info_variables_promise;
|
||||
if(Date.now() - 10 * 60 * 1000 < this.promiseClientInfoTimestamp && this.promiseClientInfo && (typeof(force_update) !== "boolean" || force_update))
|
||||
return this.promiseClientInfo;
|
||||
|
||||
this._info_variables_promise_timestamp = Date.now();
|
||||
return (this._info_variables_promise = new Promise<void>((resolve, reject) => {
|
||||
this.promiseClientInfoTimestamp = Date.now();
|
||||
return (this.promiseClientInfo = new Promise<void>((resolve, reject) => {
|
||||
this.channelTree.client.serverConnection.send_command("clientgetvariables", {clid: this.clientId()}).then(() => resolve()).catch(error => {
|
||||
this._info_connection_promise_timestamp = 0; /* not succeeded */
|
||||
this.promiseConnectionInfoTimestamp = 0; /* not succeeded */
|
||||
reject(error);
|
||||
});
|
||||
}));
|
||||
|
@ -896,46 +886,46 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
|
|||
|
||||
/* max 1s ago, so we could update every second */
|
||||
request_connection_info() : Promise<ClientConnectionInfo> {
|
||||
if(Date.now() - 900 < this._info_connection_promise_timestamp && this._info_connection_promise)
|
||||
return this._info_connection_promise;
|
||||
if(Date.now() - 900 < this.promiseConnectionInfoTimestamp && this.promiseConnectionInfo)
|
||||
return this.promiseConnectionInfo;
|
||||
|
||||
if(this._info_connection_promise_reject)
|
||||
this._info_connection_promise_resolve("timeout");
|
||||
if(this.promiseConnectionInfoReject)
|
||||
this.promiseConnectionInfoResolve("timeout");
|
||||
|
||||
let _local_reject; /* to ensure we're using the right resolve! */
|
||||
this._info_connection_promise = new Promise<ClientConnectionInfo>((resolve, reject) => {
|
||||
this._info_connection_promise_resolve = resolve;
|
||||
this._info_connection_promise_reject = reject;
|
||||
this.promiseConnectionInfo = new Promise<ClientConnectionInfo>((resolve, reject) => {
|
||||
this.promiseConnectionInfoResolve = resolve;
|
||||
this.promiseConnectionInfoReject = reject;
|
||||
_local_reject = reject;
|
||||
});
|
||||
|
||||
this._info_connection_promise_timestamp = Date.now();
|
||||
this.promiseConnectionInfoTimestamp = Date.now();
|
||||
this.channelTree.client.serverConnection.send_command("getconnectioninfo", {clid: this._clientId}).catch(error => _local_reject(error));
|
||||
return this._info_connection_promise;
|
||||
return this.promiseConnectionInfo;
|
||||
}
|
||||
|
||||
set_connection_info(info: ClientConnectionInfo) {
|
||||
if(!this._info_connection_promise_resolve)
|
||||
if(!this.promiseConnectionInfoResolve)
|
||||
return;
|
||||
this._info_connection_promise_resolve(info);
|
||||
this._info_connection_promise_resolve = undefined;
|
||||
this._info_connection_promise_reject = undefined;
|
||||
this.promiseConnectionInfoResolve(info);
|
||||
this.promiseConnectionInfoResolve = undefined;
|
||||
this.promiseConnectionInfoReject = undefined;
|
||||
}
|
||||
|
||||
setAudioVolume(value: number) {
|
||||
if(this._audio_volume == value)
|
||||
if(this.voiceVolume == value)
|
||||
return;
|
||||
|
||||
this._audio_volume = value;
|
||||
this.voiceVolume = value;
|
||||
|
||||
this.get_audio_handle()?.set_volume(value);
|
||||
this.updateVoiceVolume();
|
||||
this.channelTree.client.settings.changeServer(Settings.FN_CLIENT_VOLUME(this.clientUid()), value);
|
||||
|
||||
this.events.fire("notify_audio_level_changed", { newValue: value });
|
||||
}
|
||||
|
||||
getAudioVolume() {
|
||||
return this._audio_volume;
|
||||
return this.voiceVolume;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1021,8 +1011,17 @@ export class LocalClientEntry extends ClientEntry {
|
|||
}
|
||||
}
|
||||
|
||||
export enum MusicClientPlayerState {
|
||||
SLEEPING,
|
||||
LOADING,
|
||||
|
||||
PLAYING,
|
||||
PAUSED,
|
||||
STOPPED
|
||||
}
|
||||
|
||||
export class MusicClientProperties extends ClientProperties {
|
||||
player_state: number = 0;
|
||||
player_state: number = 0; /* MusicClientPlayerState */
|
||||
player_volume: number = 0;
|
||||
|
||||
client_playlist_id: number = 0;
|
||||
|
@ -1033,26 +1032,6 @@ export class MusicClientProperties extends ClientProperties {
|
|||
client_uptime_mode: number = 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* command[index]["song_id"] = element ? element->getSongId() : 0;
|
||||
command[index]["song_url"] = element ? element->getUrl() : "";
|
||||
command[index]["song_invoker"] = element ? element->getInvoker() : 0;
|
||||
command[index]["song_loaded"] = false;
|
||||
|
||||
auto entry = dynamic_pointer_cast<ts::music::PlayableSong>(element);
|
||||
if(entry) {
|
||||
auto data = entry->song_loaded_data();
|
||||
command[index]["song_loaded"] = entry->song_loaded() && data;
|
||||
|
||||
if(entry->song_loaded() && data) {
|
||||
command[index]["song_title"] = data->title;
|
||||
command[index]["song_description"] = data->description;
|
||||
command[index]["song_thumbnail"] = data->thumbnail;
|
||||
command[index]["song_length"] = data->length.count();
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
export class SongInfo {
|
||||
song_id: number = 0;
|
||||
song_url: string = "";
|
||||
|
@ -1220,14 +1199,12 @@ export class MusicClientEntry extends ClientEntry {
|
|||
type: contextmenu.MenuEntryType.ENTRY,
|
||||
name: tr("Change playback latency"),
|
||||
callback: () => {
|
||||
spawnChangeLatency(this, this._audio_handle.latency_settings(), () => {
|
||||
this._audio_handle.reset_latency_settings();
|
||||
return this._audio_handle.latency_settings();
|
||||
}, settings => this._audio_handle.latency_settings(settings), this._audio_handle.support_flush ? () => {
|
||||
this._audio_handle.flush();
|
||||
} : undefined);
|
||||
spawnChangeLatency(this, this.voiceHandle.getLatencySettings(), () => {
|
||||
this.voiceHandle.resetLatencySettings();
|
||||
return this.voiceHandle.getLatencySettings();
|
||||
}, settings => this.voiceHandle.setLatencySettings(settings), () => this.voiceHandle.flushBuffer());
|
||||
},
|
||||
visible: this._audio_handle && this._audio_handle.support_latency_settings()
|
||||
visible: !!this.voiceHandle
|
||||
},
|
||||
contextmenu.Entry.HR(),
|
||||
{
|
||||
|
@ -1276,4 +1253,15 @@ export class MusicClientEntry extends ClientEntry {
|
|||
this.channelTree.client.serverConnection.send_command("musicbotplayerinfo", {bot_id: this.properties.client_database_id }).then(() => {});
|
||||
return this._info_promise;
|
||||
}
|
||||
|
||||
isCurrentlyPlaying() {
|
||||
switch (this.properties.player_state) {
|
||||
case MusicClientPlayerState.PLAYING:
|
||||
case MusicClientPlayerState.LOADING:
|
||||
return true;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,7 +6,6 @@ import {ReactComponentBase} from "tc-shared/ui/react-elements/ReactComponentBase
|
|||
import {
|
||||
ConnectionEvents,
|
||||
ConnectionHandler,
|
||||
ConnectionState as CConnectionState,
|
||||
ConnectionStateUpdateType
|
||||
} from "tc-shared/ConnectionHandler";
|
||||
import {Event, EventHandler, ReactEventHandler, Registry} from "tc-shared/events";
|
||||
|
@ -23,10 +22,8 @@ import {
|
|||
} from "tc-shared/bookmarks";
|
||||
import * as contextmenu from "tc-shared/ui/elements/ContextMenu";
|
||||
import {createInputModal} from "tc-shared/ui/elements/Modal";
|
||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||
import {global_client_actions} from "tc-shared/events/GlobalEvents";
|
||||
import {icon_cache_loader} from "tc-shared/file/Icons";
|
||||
import {InputState} from "tc-shared/voice/RecorderBase";
|
||||
|
||||
const cssStyle = require("./index.scss");
|
||||
const cssButtonStyle = require("./button.scss");
|
||||
|
@ -51,7 +48,7 @@ class ConnectButton extends ReactComponentBase<{ multiSession: boolean; event_re
|
|||
if(!this.state.connected) {
|
||||
subentries.push(
|
||||
<DropdownEntry key={"connect-server"} icon={"client-connect"} text={<Translatable>Connect to a server</Translatable>}
|
||||
onClick={ () => global_client_actions.fire("action_open_window_connect", {new_tab: false }) } />
|
||||
onClick={ () => global_client_actions.fire("action_open_window_connect", {newTab: false }) } />
|
||||
);
|
||||
} else {
|
||||
subentries.push(
|
||||
|
@ -67,14 +64,14 @@ class ConnectButton extends ReactComponentBase<{ multiSession: boolean; event_re
|
|||
}
|
||||
subentries.push(
|
||||
<DropdownEntry key={"connect-new-tab"} icon={"client-connect"} text={<Translatable>Connect to a server in another tab</Translatable>}
|
||||
onClick={ () => global_client_actions.fire("action_open_window_connect", { new_tab: true }) } />
|
||||
onClick={ () => global_client_actions.fire("action_open_window_connect", { newTab: true }) } />
|
||||
);
|
||||
}
|
||||
|
||||
if(!this.state.connected) {
|
||||
return (
|
||||
<Button colorTheme={"default"} autoSwitch={false} iconNormal={"client-connect"} tooltip={tr("Connect to a server")}
|
||||
onToggle={ () => global_client_actions.fire("action_open_window_connect", { new_tab: false }) }>
|
||||
onToggle={ () => global_client_actions.fire("action_open_window_connect", { newTab: false }) }>
|
||||
{subentries}
|
||||
</Button>
|
||||
);
|
||||
|
|
|
@ -136,7 +136,7 @@ export class ClientInfo {
|
|||
}
|
||||
|
||||
const volume = this._html_tag.find(".client-local-volume");
|
||||
volume.text((client && client.get_audio_handle() ? (client.get_audio_handle().get_volume() * 100) : -1).toFixed(0) + "%");
|
||||
volume.text((client && client.getVoiceClient() ? (client.getVoiceClient().getVolume() * 100) : -1).toFixed(0) + "%");
|
||||
}
|
||||
|
||||
/* teaspeak forum */
|
||||
|
@ -184,7 +184,7 @@ export class ClientInfo {
|
|||
)
|
||||
)
|
||||
}
|
||||
if(client.is_muted()) {
|
||||
if(client.isMuted()) {
|
||||
container_status_entries.append(
|
||||
$.spawn("div").addClass("status-entry").append(
|
||||
$.spawn("div").addClass("icon_em client-input_muted_local"),
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import {Frame, FrameContent} from "tc-shared/ui/frames/chat_frame";
|
||||
import {ClientEvents, MusicClientEntry, SongInfo} from "tc-shared/ui/client";
|
||||
import {ClientEvents, MusicClientEntry, MusicClientPlayerState, SongInfo} from "tc-shared/ui/client";
|
||||
import {LogCategory} from "tc-shared/log";
|
||||
import {CommandResult, PlaylistSong} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||
import {createErrorModal, createInputModal} from "tc-shared/ui/elements/Modal";
|
||||
import * as log from "tc-shared/log";
|
||||
import * as image_preview from "../image_preview";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {PlayerState} from "tc-shared/connection/VoiceConnection";
|
||||
import {ErrorCode} from "tc-shared/connection/ErrorCode";
|
||||
import {VoicePlayerState} from "tc-shared/voice/VoicePlayer";
|
||||
|
||||
export interface MusicSidebarEvents {
|
||||
"open": {}, /* triggers when frame should be shown */
|
||||
|
@ -72,7 +72,7 @@ export class MusicInfo {
|
|||
private _html_tag: JQuery;
|
||||
private _container_playlist: JQuery;
|
||||
|
||||
private _current_bot: MusicClientEntry | undefined;
|
||||
private currentMusicBot: MusicClientEntry | undefined;
|
||||
private update_song_info: number = 0; /* timestamp when we force update the info */
|
||||
private time_select: {
|
||||
active: boolean,
|
||||
|
@ -113,7 +113,7 @@ export class MusicInfo {
|
|||
this._html_tag && this._html_tag.remove();
|
||||
this._html_tag = undefined;
|
||||
|
||||
this._current_bot = undefined;
|
||||
this.currentMusicBot = undefined;
|
||||
this.previous_frame_content = undefined;
|
||||
}
|
||||
|
||||
|
@ -167,15 +167,13 @@ export class MusicInfo {
|
|||
this.events.on(["bot_change", "bot_property_update"], event => {
|
||||
if(event.type === "bot_property_update" && event.as<"bot_property_update">().properties.indexOf("player_state") == -1) return;
|
||||
|
||||
/* FIXME: Is this right, using our player state?! */
|
||||
button_play.toggleClass("hidden", this._current_bot === undefined || this._current_bot.properties.player_state < PlayerState.STOPPING);
|
||||
button_play.toggleClass("hidden", this.currentMusicBot === undefined || this.currentMusicBot.isCurrentlyPlaying());
|
||||
});
|
||||
|
||||
this.events.on(["bot_change", "bot_property_update"], event => {
|
||||
if(event.type === "bot_property_update" && event.as<"bot_property_update">().properties.indexOf("player_state") == -1) return;
|
||||
|
||||
/* FIXME: Is this right, using our player state?! */
|
||||
button_pause.toggleClass("hidden", this._current_bot !== undefined && this._current_bot.properties.player_state >= PlayerState.STOPPING);
|
||||
button_pause.toggleClass("hidden", this.currentMusicBot !== undefined && !this.currentMusicBot.isCurrentlyPlaying());
|
||||
});
|
||||
|
||||
this._html_tag.find(".control-buttons .button-rewind").on('click', () => this.events.fire("action_rewind"));
|
||||
|
@ -197,7 +195,7 @@ export class MusicInfo {
|
|||
thumb.on('mousedown', event => event.button === 0 && this.events.fire("playtime_move_begin"));
|
||||
|
||||
this.events.on(["bot_change", "player_song_change", "player_time_update", "playtime_move_end"], event => {
|
||||
if(!this._current_bot) {
|
||||
if(!this.currentMusicBot) {
|
||||
this.time_select.max_time = 0;
|
||||
indicator_buffered.each((_, e) => { e.style.width = "0%"; });
|
||||
indicator_playtime.each((_, e) => { e.style.width = "0%"; });
|
||||
|
@ -210,7 +208,7 @@ export class MusicInfo {
|
|||
if(event.type === "playtime_move_end" && !event.as<"playtime_move_end">().canceled) return;
|
||||
|
||||
const update_info = Date.now() > this.update_song_info;
|
||||
this._current_bot.requestPlayerInfo(update_info ? 1000 : 60 * 1000).then(data => {
|
||||
this.currentMusicBot.requestPlayerInfo(update_info ? 1000 : 60 * 1000).then(data => {
|
||||
if(update_info)
|
||||
this.display_song_info(data);
|
||||
|
||||
|
@ -313,9 +311,9 @@ export class MusicInfo {
|
|||
let song: SongInfo;
|
||||
|
||||
/* update the player info so we dont get old data */
|
||||
if(this._current_bot) {
|
||||
if(this.currentMusicBot) {
|
||||
this.update_song_info = 0;
|
||||
this._current_bot.requestPlayerInfo(1000).then(data => {
|
||||
this.currentMusicBot.requestPlayerInfo(1000).then(data => {
|
||||
this.display_song_info(data);
|
||||
}).catch(error => {
|
||||
log.warn(LogCategory.CLIENT, tr("Failed to update current song for side bar: %o"), error);
|
||||
|
@ -366,9 +364,9 @@ export class MusicInfo {
|
|||
private initialize_listener() {
|
||||
//Must come at first!
|
||||
this.events.on("player_song_change", event => {
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
this._current_bot.requestPlayerInfo(0); /* enforce an info refresh */
|
||||
this.currentMusicBot.requestPlayerInfo(0); /* enforce an info refresh */
|
||||
});
|
||||
|
||||
/* bot property listener */
|
||||
|
@ -414,7 +412,7 @@ export class MusicInfo {
|
|||
};
|
||||
|
||||
this.events.on(Object.keys(action_map) as any, event => {
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
const action_id = action_map[event.type];
|
||||
if(typeof action_id === "undefined") {
|
||||
|
@ -422,7 +420,7 @@ export class MusicInfo {
|
|||
return;
|
||||
}
|
||||
const data = {
|
||||
bot_id: this._current_bot.properties.client_database_id,
|
||||
bot_id: this.currentMusicBot.properties.client_database_id,
|
||||
action: action_id,
|
||||
units: event.units
|
||||
};
|
||||
|
@ -437,13 +435,13 @@ export class MusicInfo {
|
|||
}
|
||||
|
||||
this.events.on("action_song_set", event => {
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
const connection = this.handle.handle.serverConnection;
|
||||
if(!connection || !connection.connected()) return;
|
||||
|
||||
connection.send_command("playlistsongsetcurrent", {
|
||||
playlist_id: this._current_bot.properties.client_playlist_id,
|
||||
playlist_id: this.currentMusicBot.properties.client_playlist_id,
|
||||
song_id: event.song_id
|
||||
}).catch(error => {
|
||||
if(error instanceof CommandResult && error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) return;
|
||||
|
@ -455,7 +453,7 @@ export class MusicInfo {
|
|||
});
|
||||
|
||||
this.events.on("action_song_add", () => {
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
createInputModal(tr("Enter song URL"), tr("Please enter the target song URL"), text => {
|
||||
try {
|
||||
|
@ -465,11 +463,11 @@ export class MusicInfo {
|
|||
return false;
|
||||
}
|
||||
}, result => {
|
||||
if(!result || !this._current_bot) return;
|
||||
if(!result || !this.currentMusicBot) return;
|
||||
|
||||
const connection = this.handle.handle.serverConnection;
|
||||
connection.send_command("playlistsongadd", {
|
||||
playlist_id: this._current_bot.properties.client_playlist_id,
|
||||
playlist_id: this.currentMusicBot.properties.client_playlist_id,
|
||||
url: result
|
||||
}).catch(error => {
|
||||
if(error instanceof CommandResult && error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) return;
|
||||
|
@ -483,13 +481,13 @@ export class MusicInfo {
|
|||
});
|
||||
|
||||
this.events.on("action_song_delete", event => {
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
const connection = this.handle.handle.serverConnection;
|
||||
if(!connection || !connection.connected()) return;
|
||||
|
||||
connection.send_command("playlistsongremove", {
|
||||
playlist_id: this._current_bot.properties.client_playlist_id,
|
||||
playlist_id: this.currentMusicBot.properties.client_playlist_id,
|
||||
song_id: event.song_id
|
||||
}).catch(error => {
|
||||
if(error instanceof CommandResult && error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) return;
|
||||
|
@ -506,7 +504,7 @@ export class MusicInfo {
|
|||
const connection = this.handle.handle.serverConnection;
|
||||
if(!connection || !connection.connected()) return;
|
||||
|
||||
const bot_id = this._current_bot ? this._current_bot.properties.client_database_id : 0;
|
||||
const bot_id = this.currentMusicBot ? this.currentMusicBot.properties.client_database_id : 0;
|
||||
this.handle.handle.serverConnection.send_command("musicbotsetsubscription", { bot_id: bot_id }).catch(error => {
|
||||
log.warn(LogCategory.CLIENT, tr("Failed to subscribe to displayed bot within the side bar: %o"), error);
|
||||
});
|
||||
|
@ -682,10 +680,10 @@ export class MusicInfo {
|
|||
|
||||
const connection = this.handle.handle.serverConnection;
|
||||
if(!connection || !connection.connected()) return;
|
||||
if(!this._current_bot) return;
|
||||
if(!this.currentMusicBot) return;
|
||||
|
||||
connection.send_command("playlistsongreorder", {
|
||||
playlist_id: this._current_bot.properties.client_playlist_id,
|
||||
playlist_id: this.currentMusicBot.properties.client_playlist_id,
|
||||
song_id: data.song_id,
|
||||
song_previous_song_id: data.previous_entry
|
||||
}).catch(error => {
|
||||
|
@ -701,12 +699,12 @@ export class MusicInfo {
|
|||
});
|
||||
|
||||
this.events.on(["bot_change", "player_song_change"], event => {
|
||||
if(!this._current_bot) {
|
||||
if(!this.currentMusicBot) {
|
||||
this._html_tag.find(".playlist .current-song").removeClass("current-song");
|
||||
return;
|
||||
}
|
||||
|
||||
this._current_bot.requestPlayerInfo(1000).then(data => {
|
||||
this.currentMusicBot.requestPlayerInfo(1000).then(data => {
|
||||
const song_id = data ? data.song_id : 0;
|
||||
this._html_tag.find(".playlist .current-song").removeClass("current-song");
|
||||
this._html_tag.find(".playlist .entry[song-id=" + song_id + "]").addClass("current-song");
|
||||
|
@ -717,11 +715,11 @@ export class MusicInfo {
|
|||
|
||||
set_current_bot(client: MusicClientEntry | undefined, enforce?: boolean) {
|
||||
if(client) client.updateClientVariables(); /* just to ensure */
|
||||
if(client === this._current_bot && (typeof(enforce) === "undefined" || !enforce))
|
||||
if(client === this.currentMusicBot && (typeof(enforce) === "undefined" || !enforce))
|
||||
return;
|
||||
|
||||
const old = this._current_bot;
|
||||
this._current_bot = client;
|
||||
const old = this.currentMusicBot;
|
||||
this.currentMusicBot = client;
|
||||
this.events.fire("bot_change", {
|
||||
new: client,
|
||||
old: old
|
||||
|
@ -729,7 +727,7 @@ export class MusicInfo {
|
|||
}
|
||||
|
||||
current_bot() : MusicClientEntry | undefined {
|
||||
return this._current_bot;
|
||||
return this.currentMusicBot;
|
||||
}
|
||||
|
||||
private sort_songs(data: PlaylistSong[]) {
|
||||
|
@ -776,7 +774,7 @@ export class MusicInfo {
|
|||
const playlist = this._container_playlist.find(".playlist");
|
||||
playlist.empty();
|
||||
|
||||
if(!this.handle.handle.serverConnection || !this.handle.handle.serverConnection.connected() || !this._current_bot) {
|
||||
if(!this.handle.handle.serverConnection || !this.handle.handle.serverConnection.connected() || !this.currentMusicBot) {
|
||||
this._container_playlist.find(".overlay-empty").removeClass("hidden");
|
||||
return;
|
||||
}
|
||||
|
@ -784,10 +782,10 @@ export class MusicInfo {
|
|||
const overlay_loading = this._container_playlist.find(".overlay-loading");
|
||||
overlay_loading.removeClass("hidden");
|
||||
|
||||
this._current_bot.updateClientVariables(true).catch(error => {
|
||||
this.currentMusicBot.updateClientVariables(true).catch(error => {
|
||||
log.warn(LogCategory.CLIENT, tr("Failed to update music bot variables: %o"), error);
|
||||
}).then(() => {
|
||||
this.handle.handle.serverConnection.command_helper.request_playlist_songs(this._current_bot.properties.client_playlist_id, false).then(songs => {
|
||||
this.handle.handle.serverConnection.command_helper.requestPlaylistSongs(this.currentMusicBot.properties.client_playlist_id, false).then(songs => {
|
||||
this.playlist_subscribe(false); /* we're allowed to see the playlist */
|
||||
if(!songs) {
|
||||
this._container_playlist.find(".overlay-empty").removeClass("hidden");
|
||||
|
@ -813,7 +811,7 @@ export class MusicInfo {
|
|||
private playlist_subscribe(unsubscribe: boolean) {
|
||||
if(!this.handle.handle.serverConnection) return;
|
||||
|
||||
if(unsubscribe || !this._current_bot) {
|
||||
if(unsubscribe || !this.currentMusicBot) {
|
||||
if(!this._playlist_subscribed) return;
|
||||
this._playlist_subscribed = false;
|
||||
|
||||
|
@ -822,7 +820,7 @@ export class MusicInfo {
|
|||
});
|
||||
} else {
|
||||
this.handle.handle.serverConnection.send_command("playlistsetsubscription", {
|
||||
playlist_id: this._current_bot.properties.client_playlist_id
|
||||
playlist_id: this.currentMusicBot.properties.client_playlist_id
|
||||
}).then(() => this._playlist_subscribed = true).catch(error => {
|
||||
log.warn(LogCategory.CLIENT, tr("Failed to subscribe to bots playlist: %o"), error);
|
||||
});
|
||||
|
@ -891,8 +889,8 @@ export class MusicInfo {
|
|||
document.addEventListener("mousemove", move_listener);
|
||||
});
|
||||
|
||||
if(this._current_bot) {
|
||||
this._current_bot.requestPlayerInfo(60 * 1000).then(pdata => {
|
||||
if(this.currentMusicBot) {
|
||||
this.currentMusicBot.requestPlayerInfo(60 * 1000).then(pdata => {
|
||||
if(pdata.song_id === data.song_id)
|
||||
tag.addClass("current-song");
|
||||
});
|
||||
|
|
|
@ -141,10 +141,10 @@ export function spawnAvatarList(client: ConnectionHandler) {
|
|||
if(container_list.hasScrollBar())
|
||||
container_list.addClass("scrollbar");
|
||||
|
||||
client.serverConnection.command_helper.info_from_uid(...Object.keys(username_resolve)).then(result => {
|
||||
client.serverConnection.command_helper.getInfoFromUniqueId(...Object.keys(username_resolve)).then(result => {
|
||||
for(const info of result) {
|
||||
username_resolve[info.client_unique_id].forEach(e => e(info.client_nickname));
|
||||
delete username_resolve[info.client_unique_id];
|
||||
username_resolve[info.clientUniqueId].forEach(e => e(info.clientNickname));
|
||||
delete username_resolve[info.clientUniqueId];
|
||||
}
|
||||
for(const uid of Object.keys(username_resolve)) {
|
||||
(username_resolve[uid] || []).forEach(e => e(undefined));
|
||||
|
|
|
@ -2,16 +2,18 @@ import {createModal, Modal} from "tc-shared/ui/elements/Modal";
|
|||
import {ClientEntry} from "tc-shared/ui/client";
|
||||
import {Slider, sliderfy} from "tc-shared/ui/elements/Slider";
|
||||
import * as htmltags from "tc-shared/ui/htmltags";
|
||||
import {LatencySettings} from "tc-shared/connection/VoiceConnection";
|
||||
import {VoicePlayerLatencySettings} from "tc-shared/voice/VoicePlayer";
|
||||
|
||||
let modal: Modal;
|
||||
export function spawnChangeLatency(client: ClientEntry, current: LatencySettings, reset: () => LatencySettings, apply: (settings: LatencySettings) => any, callback_flush?: () => any) {
|
||||
if(modal) modal.close();
|
||||
let modalInstance: Modal;
|
||||
export function spawnChangeLatency(client: ClientEntry, current: VoicePlayerLatencySettings, reset: () => VoicePlayerLatencySettings, apply: (settings: VoicePlayerLatencySettings) => void, callback_flush?: () => any) {
|
||||
if(modalInstance) {
|
||||
modalInstance.close();
|
||||
}
|
||||
|
||||
const begin = Object.assign({}, current);
|
||||
current = Object.assign({}, current);
|
||||
|
||||
modal = createModal({
|
||||
modalInstance = createModal({
|
||||
header: tr("Change playback latency"),
|
||||
body: function () {
|
||||
let tag = $("#tmpl_change_latency").renderTag({
|
||||
|
@ -26,10 +28,10 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
});
|
||||
|
||||
const update_value = () => {
|
||||
const valid = current.min_buffer < current.max_buffer;
|
||||
const valid = current.minBufferTime < current.maxBufferTime;
|
||||
|
||||
modal.htmlTag.find(".modal-body").toggleClass("modal-red", !valid);
|
||||
modal.htmlTag.find(".modal-body").toggleClass("modal-green", valid);
|
||||
modalInstance.htmlTag.find(".modal-body").toggleClass("modal-red", !valid);
|
||||
modalInstance.htmlTag.find(".modal-body").toggleClass("modal-green", valid);
|
||||
|
||||
if(!valid)
|
||||
return;
|
||||
|
@ -44,7 +46,7 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
|
||||
const slider_tag = container.find(".container-slider");
|
||||
slider_min = sliderfy(slider_tag, {
|
||||
initial_value: current.min_buffer,
|
||||
initial_value: current.minBufferTime,
|
||||
step: 20,
|
||||
max_value: 1000,
|
||||
min_value: 0,
|
||||
|
@ -52,12 +54,12 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
unit: 'ms'
|
||||
});
|
||||
slider_tag.on('change', event => {
|
||||
current.min_buffer = parseInt(slider_tag.attr("value"));
|
||||
tag_value.text(current.min_buffer + "ms");
|
||||
current.minBufferTime = parseInt(slider_tag.attr("value"));
|
||||
tag_value.text(current.minBufferTime + "ms");
|
||||
update_value();
|
||||
});
|
||||
|
||||
tag_value.text(current.min_buffer + "ms");
|
||||
tag_value.text(current.minBufferTime + "ms");
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -66,7 +68,7 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
|
||||
const slider_tag = container.find(".container-slider");
|
||||
slider_max = sliderfy(slider_tag, {
|
||||
initial_value: current.max_buffer,
|
||||
initial_value: current.maxBufferTime,
|
||||
step: 20,
|
||||
max_value: 1020,
|
||||
min_value: 20,
|
||||
|
@ -75,28 +77,28 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
});
|
||||
|
||||
slider_tag.on('change', event => {
|
||||
current.max_buffer = parseInt(slider_tag.attr("value"));
|
||||
tag_value.text(current.max_buffer + "ms");
|
||||
current.maxBufferTime = parseInt(slider_tag.attr("value"));
|
||||
tag_value.text(current.maxBufferTime + "ms");
|
||||
update_value();
|
||||
});
|
||||
|
||||
tag_value.text(current.max_buffer + "ms");
|
||||
tag_value.text(current.maxBufferTime + "ms");
|
||||
}
|
||||
setTimeout(update_value, 0);
|
||||
|
||||
tag.find(".button-close").on('click', event => {
|
||||
modal.close();
|
||||
modalInstance.close();
|
||||
});
|
||||
|
||||
tag.find(".button-cancel").on('click', event => {
|
||||
apply(begin);
|
||||
modal.close();
|
||||
modalInstance.close();
|
||||
});
|
||||
|
||||
tag.find(".button-reset").on('click', event => {
|
||||
current = Object.assign({}, reset());
|
||||
slider_max.value(current.max_buffer);
|
||||
slider_min.value(current.min_buffer);
|
||||
slider_max.value(current.maxBufferTime);
|
||||
slider_min.value(current.minBufferTime);
|
||||
});
|
||||
|
||||
tag.find(".button-flush").on('click', event => callback_flush());
|
||||
|
@ -108,7 +110,7 @@ export function spawnChangeLatency(client: ClientEntry, current: LatencySettings
|
|||
width: 600
|
||||
});
|
||||
|
||||
modal.close_listener.push(() => modal = undefined);
|
||||
modal.open();
|
||||
modal.htmlTag.find(".modal-body").addClass("modal-latency");
|
||||
modalInstance.close_listener.push(() => modalInstance = undefined);
|
||||
modalInstance.open();
|
||||
modalInstance.htmlTag.find(".modal-body").addClass("modal-latency");
|
||||
}
|
|
@ -55,7 +55,7 @@ function permission_controller(event_registry: Registry<modal.music_manage>, bot
|
|||
{
|
||||
event_registry.on("query_playlist_status", event => {
|
||||
const playlist_id = bot.properties.client_playlist_id;
|
||||
client.serverConnection.command_helper.request_playlist_info(playlist_id).then(result => {
|
||||
client.serverConnection.command_helper.requestPlaylistInfo(playlist_id).then(result => {
|
||||
event_registry.fire("playlist_status", {
|
||||
status: "success",
|
||||
data: {
|
||||
|
@ -285,15 +285,15 @@ function permission_controller(event_registry: Registry<modal.music_manage>, bot
|
|||
event_registry.on("query_special_clients", event => {
|
||||
const playlist_id = bot.properties.client_playlist_id;
|
||||
client.serverConnection.command_helper.request_playlist_client_list(playlist_id).then(clients => {
|
||||
return client.serverConnection.command_helper.info_from_cldbid(...clients);
|
||||
return client.serverConnection.command_helper.getInfoFromClientDatabaseId(...clients);
|
||||
}).then(clients => {
|
||||
event_registry.fire("special_client_list", {
|
||||
status: "success",
|
||||
clients: clients.map(e => {
|
||||
return {
|
||||
name: e.client_nickname,
|
||||
unique_id: e.client_unique_id,
|
||||
database_id: e.client_database_id
|
||||
name: e.clientNickname,
|
||||
unique_id: e.clientUniqueId,
|
||||
database_id: e.clientDatabaseId
|
||||
}
|
||||
})
|
||||
});
|
||||
|
@ -316,9 +316,9 @@ function permission_controller(event_registry: Registry<modal.music_manage>, bot
|
|||
is_uuid = atob(text).length === 32;
|
||||
} catch(e) {}
|
||||
if(is_uuid) {
|
||||
return client.serverConnection.command_helper.info_from_uid(text);
|
||||
return client.serverConnection.command_helper.getInfoFromUniqueId(text);
|
||||
} else if(text.match(/^[0-9]{1,7}$/) && !isNaN(parseInt(text))) {
|
||||
return client.serverConnection.command_helper.info_from_cldbid(parseInt(text));
|
||||
return client.serverConnection.command_helper.getInfoFromClientDatabaseId(parseInt(text));
|
||||
} else {
|
||||
//TODO: Database name lookup?
|
||||
return Promise.reject("no results");
|
||||
|
@ -329,9 +329,9 @@ function permission_controller(event_registry: Registry<modal.music_manage>, bot
|
|||
event_registry.fire("search_client_result", {
|
||||
status: "success",
|
||||
client: {
|
||||
name: client.client_nickname,
|
||||
unique_id: client.client_unique_id,
|
||||
database_id: client.client_database_id
|
||||
name: client.clientNickname,
|
||||
unique_id: client.clientUniqueId,
|
||||
database_id: client.clientDatabaseId
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
|
|
@ -224,10 +224,10 @@ export function spawnQueryManage(client: ConnectionHandler) {
|
|||
filter_callbacks = [];
|
||||
container_list.find(".entry").remove();
|
||||
|
||||
client.serverConnection.command_helper.current_virtual_server_id().then(server_id => {
|
||||
client.serverConnection.command_helper.getCurrentVirtualServerId().then(server_id => {
|
||||
current_server = server_id;
|
||||
|
||||
client.serverConnection.command_helper.request_query_list(server_id).then(result => {
|
||||
client.serverConnection.command_helper.requestQueryList(server_id).then(result => {
|
||||
if(!result || !result.queries.length) {
|
||||
container_list_empty.text(tr("No queries available"));
|
||||
return;
|
||||
|
|
|
@ -0,0 +1,185 @@
|
|||
import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
|
||||
import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller";
|
||||
import * as React from "react";
|
||||
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
||||
import {EchoTestEventRegistry, EchoTestModal} from "tc-shared/ui/modal/echo-test/Renderer";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {EchoTestEvents, TestState} from "tc-shared/ui/modal/echo-test/Definitions";
|
||||
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
||||
import {global_client_actions} from "tc-shared/events/GlobalEvents";
|
||||
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
|
||||
import {Settings, settings} from "tc-shared/settings";
|
||||
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||
import {LogCategory, logError} from "tc-shared/log";
|
||||
import {ServerFeature} from "tc-shared/connection/ServerFeatures";
|
||||
|
||||
export function spawnEchoTestModal(connection: ConnectionHandler) {
|
||||
const events = new Registry<EchoTestEvents>();
|
||||
|
||||
initializeController(connection, events);
|
||||
|
||||
const modal = spawnReactModal(class extends InternalModal {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
renderBody(): React.ReactElement {
|
||||
return (
|
||||
<EchoTestEventRegistry.Provider value={events}>
|
||||
<EchoTestModal />
|
||||
</EchoTestEventRegistry.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
title(): string | React.ReactElement<Translatable> {
|
||||
return <Translatable>Voice echo test</Translatable>;
|
||||
}
|
||||
});
|
||||
|
||||
events.on("action_close", () => {
|
||||
modal.destroy();
|
||||
});
|
||||
|
||||
modal.events.on("close", () => events.fire("notify_close"));
|
||||
modal.events.on("destroy", () => {
|
||||
events.fire("notify_destroy");
|
||||
events.destroy();
|
||||
});
|
||||
|
||||
modal.show();
|
||||
}
|
||||
|
||||
function initializeController(connection: ConnectionHandler, events: Registry<EchoTestEvents>) {
|
||||
let testState: TestState = { state: "stopped" };
|
||||
|
||||
events.on("action_open_microphone_settings", () => {
|
||||
global_client_actions.fire("action_open_window_settings", { defaultCategory: "audio-microphone" });
|
||||
});
|
||||
|
||||
events.on("action_toggle_tests", event => {
|
||||
settings.changeGlobal(Settings.KEY_VOICE_ECHO_TEST_ENABLED, event.enabled);
|
||||
});
|
||||
|
||||
events.on("query_test_state", () => {
|
||||
events.fire_async("notify_tests_toggle", { enabled: settings.global(Settings.KEY_VOICE_ECHO_TEST_ENABLED) });
|
||||
});
|
||||
|
||||
events.on("notify_destroy", settings.globalChangeListener(Settings.KEY_VOICE_ECHO_TEST_ENABLED, value => {
|
||||
events.fire_async("notify_tests_toggle", { enabled: value });
|
||||
}));
|
||||
|
||||
events.on("action_test_result", event => {
|
||||
if(event.status === "success") {
|
||||
events.fire("action_close");
|
||||
} else {
|
||||
events.fire("action_stop_test");
|
||||
events.fire("notify_test_phase", { phase: "troubleshooting" });
|
||||
}
|
||||
});
|
||||
|
||||
events.on("action_troubleshooting_finished", event => {
|
||||
if(event.status === "aborted") {
|
||||
events.fire("action_close");
|
||||
} else {
|
||||
events.fire("notify_test_phase", { phase: "testing" });
|
||||
events.fire("action_start_test");
|
||||
}
|
||||
});
|
||||
|
||||
const reportVoiceConnectionState = (state: VoiceConnectionStatus) => {
|
||||
if(state === VoiceConnectionStatus.Connected) {
|
||||
beginTest();
|
||||
} else {
|
||||
endTest();
|
||||
}
|
||||
switch (state) {
|
||||
case VoiceConnectionStatus.Connected:
|
||||
events.fire("notify_voice_connection_state", { state: "connected" });
|
||||
break;
|
||||
|
||||
case VoiceConnectionStatus.Disconnected:
|
||||
case VoiceConnectionStatus.Disconnecting:
|
||||
events.fire("notify_voice_connection_state", { state: "disconnected" });
|
||||
break;
|
||||
|
||||
case VoiceConnectionStatus.Connecting:
|
||||
events.fire("notify_voice_connection_state", { state: "connecting" });
|
||||
break;
|
||||
|
||||
case VoiceConnectionStatus.ClientUnsupported:
|
||||
events.fire("notify_voice_connection_state", { state: "unsupported-client" });
|
||||
break;
|
||||
|
||||
case VoiceConnectionStatus.ServerUnsupported:
|
||||
events.fire("notify_voice_connection_state", { state: "unsupported-server" });
|
||||
break;
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
events.on("notify_destroy", connection.getServerConnection().getVoiceConnection().events.on("notify_connection_status_changed", event => {
|
||||
reportVoiceConnectionState(event.newStatus);
|
||||
}));
|
||||
|
||||
events.on("query_voice_connection_state", () => reportVoiceConnectionState(connection.getServerConnection().getVoiceConnection().getConnectionState()));
|
||||
|
||||
events.on("query_test_state", () => {
|
||||
events.fire_async("notify_test_state", { state: testState });
|
||||
});
|
||||
|
||||
events.on("action_start_test", () => {
|
||||
beginTest();
|
||||
});
|
||||
|
||||
const setTestState = (state: TestState) => {
|
||||
testState = state;
|
||||
events.fire("notify_test_state", { state: state });
|
||||
}
|
||||
|
||||
let testId = 0;
|
||||
const beginTest = () => {
|
||||
if(testState.state === "initializing" || testState.state === "running") {
|
||||
return;
|
||||
} else if(!connection.serverFeatures.supportsFeature(ServerFeature.WHISPER_ECHO)) {
|
||||
setTestState({ state: "unsupported" });
|
||||
return;
|
||||
}
|
||||
|
||||
setTestState({ state: "initializing" });
|
||||
|
||||
|
||||
const currentTestId = ++testId;
|
||||
connection.startEchoTest().then(() => {
|
||||
if(currentTestId !== testId) {
|
||||
return;
|
||||
}
|
||||
|
||||
setTestState({ state: "running" });
|
||||
}).catch(error => {
|
||||
if(currentTestId !== testId) {
|
||||
return;
|
||||
}
|
||||
|
||||
let message;
|
||||
if(error instanceof CommandResult) {
|
||||
message = error.formattedMessage();
|
||||
} else if(error instanceof Error) {
|
||||
message = error.message;
|
||||
} else if(typeof error === "string") {
|
||||
message = error;
|
||||
} else {
|
||||
message = tr("lookup the console");
|
||||
logError(LogCategory.AUDIO, tr("Failed to begin echo testing: %o"), error);
|
||||
}
|
||||
|
||||
setTestState({ state: "start-failed", error: message });
|
||||
});
|
||||
}
|
||||
|
||||
const endTest = () => {
|
||||
setTestState({ state: "stopped" });
|
||||
connection.stopEchoTest();
|
||||
}
|
||||
|
||||
events.on(["notify_destroy", "notify_close", "action_stop_test"], endTest);
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
export type VoiceConnectionState = "connecting" | "connected" | "disconnected" | "unsupported-client" | "unsupported-server";
|
||||
export type TestState = { state: "initializing" | "running" | "stopped" | "microphone-invalid" | "unsupported" } | { state: "start-failed", error: string };
|
||||
|
||||
export interface EchoTestEvents {
|
||||
action_troubleshooting_finished: { status: "test-again" | "aborted" }
|
||||
action_close: {},
|
||||
action_test_result: { status: "success" | "fail" },
|
||||
action_open_microphone_settings: {},
|
||||
/* toggle the default test popup */
|
||||
action_toggle_tests: { enabled: boolean },
|
||||
action_start_test: {},
|
||||
action_stop_test: {},
|
||||
|
||||
query_voice_connection_state: {},
|
||||
query_test_state: {},
|
||||
query_test_toggle: {},
|
||||
|
||||
notify_destroy: {},
|
||||
notify_close: {},
|
||||
|
||||
notify_test_phase: {
|
||||
phase: "testing" | "troubleshooting"
|
||||
},
|
||||
notify_voice_connection_state: {
|
||||
state: VoiceConnectionState
|
||||
},
|
||||
notify_test_state: {
|
||||
state: TestState
|
||||
},
|
||||
notify_tests_toggle: {
|
||||
enabled: boolean
|
||||
}
|
||||
}
|
|
@ -0,0 +1,257 @@
|
|||
@import "../../../../css/static/mixin";
|
||||
@import "../../../../css/static/properties";
|
||||
|
||||
.container {
|
||||
@include user-select(none);
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: stretch;
|
||||
|
||||
position: relative;
|
||||
|
||||
width: 40em;
|
||||
height: 23em;
|
||||
|
||||
padding: 1em;
|
||||
|
||||
.header {
|
||||
flex-shrink: 0;
|
||||
all: unset;
|
||||
|
||||
display: block;
|
||||
|
||||
font-size: 1.3em;
|
||||
margin-top: 0;
|
||||
margin-bottom: .2em;
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-evenly;
|
||||
|
||||
position: relative;
|
||||
|
||||
margin-top: 2em;
|
||||
padding-bottom: 4.5em;
|
||||
|
||||
.buttonContainer {
|
||||
position: relative;
|
||||
|
||||
.button {
|
||||
font-size: 6.5em;
|
||||
|
||||
height: 1em;
|
||||
width: 1em;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
|
||||
border: 2px solid;
|
||||
border-radius: 50%;
|
||||
|
||||
box-sizing: content-box;
|
||||
padding: .1em;
|
||||
|
||||
cursor: pointer;
|
||||
|
||||
@include transition(ease-in-out $button_hover_animation_time);
|
||||
|
||||
&.success {
|
||||
border-color: #1ca037;
|
||||
|
||||
&:hover {
|
||||
background-color: rgba(28, 160, 55, .1);
|
||||
}
|
||||
}
|
||||
|
||||
&.fail {
|
||||
border-color: #c90709;
|
||||
|
||||
&:hover {
|
||||
background-color: #c907091a;
|
||||
}
|
||||
}
|
||||
|
||||
&:hover {
|
||||
@include transform(scale(1.05));
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
position: absolute;
|
||||
|
||||
margin-top: .3em;
|
||||
font-size: 1.1rem;
|
||||
|
||||
top: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
.overlay {
|
||||
z-index: 1;
|
||||
position: absolute;
|
||||
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
|
||||
pointer-events: none;
|
||||
opacity: 0;
|
||||
|
||||
background-color: #19191bcc;
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
|
||||
text-align: center;
|
||||
padding-bottom: 3.5em;
|
||||
|
||||
font-size: 1.2em;
|
||||
|
||||
@include transition(ease-in-out .2s);
|
||||
|
||||
&.shown {
|
||||
pointer-events: all;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.footer {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
|
||||
margin-top: auto;
|
||||
|
||||
label {
|
||||
align-self: flex-end;
|
||||
}
|
||||
}
|
||||
|
||||
> .overlay {
|
||||
z-index: 1;
|
||||
position: absolute;
|
||||
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
|
||||
display: none;
|
||||
background: #19191b;
|
||||
|
||||
&.shown {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.troubleshoot {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: stretch;
|
||||
|
||||
padding: 1em;
|
||||
|
||||
.top {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: stretch;
|
||||
|
||||
min-height: 6em;
|
||||
|
||||
flex-shrink: 1;
|
||||
}
|
||||
|
||||
.containerIcon {
|
||||
padding: 0 2em;
|
||||
|
||||
flex-grow: 0;
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
|
||||
.icon {
|
||||
align-self: center;
|
||||
font-size: 12em;
|
||||
}
|
||||
}
|
||||
|
||||
.help {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: stretch;
|
||||
|
||||
min-height: 6em;
|
||||
|
||||
flex-shrink: 1;
|
||||
flex-grow: 1;
|
||||
|
||||
h1 {
|
||||
font-size: 1.4em;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
ol {
|
||||
overflow: auto;
|
||||
flex-shrink: 1;
|
||||
flex-grow: 1;
|
||||
min-height: 4em;
|
||||
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
padding-left: 1.1em;
|
||||
padding-right: .5em;
|
||||
|
||||
padding-inline-start: 1em;
|
||||
|
||||
@include chat-scrollbar-vertical();
|
||||
|
||||
li {
|
||||
color: #557EDC;
|
||||
margin-top: .5em;
|
||||
|
||||
p {
|
||||
margin: 0;
|
||||
color: #999;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
h2 {
|
||||
all: unset;
|
||||
display: block;
|
||||
position: relative;
|
||||
|
||||
button {
|
||||
vertical-align: middle;
|
||||
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.buttons {
|
||||
flex-shrink: 0;
|
||||
padding: 0;
|
||||
margin-top: 1em;
|
||||
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,236 @@
|
|||
import * as React from "react";
|
||||
import {useContext, useState} from "react";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {EchoTestEvents, TestState, VoiceConnectionState} from "./Definitions";
|
||||
import {Translatable, VariadicTranslatable} from "tc-shared/ui/react-elements/i18n";
|
||||
import {ClientIcon} from "svg-sprites/client-icons";
|
||||
import {ClientIconRenderer} from "tc-shared/ui/react-elements/Icons";
|
||||
import {Checkbox} from "tc-shared/ui/react-elements/Checkbox";
|
||||
import {Button} from "tc-shared/ui/react-elements/Button";
|
||||
import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
|
||||
|
||||
const cssStyle = require("./Renderer.scss");
|
||||
|
||||
export const EchoTestEventRegistry = React.createContext<Registry<EchoTestEvents>>(undefined);
|
||||
|
||||
const VoiceStateOverlay = () => {
|
||||
const events = useContext(EchoTestEventRegistry);
|
||||
|
||||
const [ state, setState ] = useState<"loading" | VoiceConnectionState>(() => {
|
||||
events.fire("query_voice_connection_state");
|
||||
return "loading";
|
||||
});
|
||||
|
||||
events.reactUse("notify_voice_connection_state", event => setState(event.state));
|
||||
|
||||
let inner, shown = true;
|
||||
switch (state) {
|
||||
case "disconnected":
|
||||
inner = <a key={state}><Translatable>Voice connection has been disconnected.</Translatable></a>;
|
||||
break;
|
||||
|
||||
case "unsupported-server":
|
||||
inner = <a key={state}><Translatable>Voice connection isn't supported by the server.</Translatable></a>;
|
||||
break;
|
||||
|
||||
case "unsupported-client":
|
||||
inner = <a key={state}>
|
||||
<Translatable>Voice connection isn't supported by your browser.</Translatable><br />
|
||||
<Translatable>Please use another browser.</Translatable>
|
||||
</a>;
|
||||
break;
|
||||
|
||||
case "connecting":
|
||||
inner = <a key={state}><Translatable>establishing voice connection</Translatable> <LoadingDots /></a>;
|
||||
break;
|
||||
|
||||
case "loading":
|
||||
inner = <a key={state}><Translatable>loading</Translatable> <LoadingDots /></a>;
|
||||
break;
|
||||
|
||||
case "connected":
|
||||
shown = false;
|
||||
break;
|
||||
|
||||
default:
|
||||
shown = false;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cssStyle.overlay + " " + (shown ? cssStyle.shown : "")}>
|
||||
{inner}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const TestStateOverlay = () => {
|
||||
const events = useContext(EchoTestEventRegistry);
|
||||
|
||||
const [ state, setState ] = useState<{ state: "loading" } | TestState>(() => {
|
||||
events.fire("query_test_state");
|
||||
return { state: "loading" };
|
||||
});
|
||||
|
||||
const [ voiceConnected, setVoiceConnected ] = useState<"loading" | boolean>(() => {
|
||||
return "loading";
|
||||
});
|
||||
|
||||
events.reactUse("notify_voice_connection_state", event => setVoiceConnected(event.state === "connected"));
|
||||
events.reactUse("notify_test_state", event => setState(event.state));
|
||||
|
||||
let inner;
|
||||
switch (state.state) {
|
||||
case "loading":
|
||||
case "initializing":
|
||||
inner = <a key={"initializing"}><Translatable>initializing</Translatable> <LoadingDots/></a>;
|
||||
break;
|
||||
|
||||
case "start-failed":
|
||||
inner = <a key={"initializing"}>
|
||||
<VariadicTranslatable text={"Failed to start echo test:\n{0}"}>
|
||||
{state.error}
|
||||
</VariadicTranslatable>
|
||||
<br />
|
||||
<Button type={"small"} color={"green"} onClick={() => events.fire("action_start_test")}><Translatable>Try again</Translatable></Button>
|
||||
</a>;
|
||||
break;
|
||||
|
||||
case "unsupported":
|
||||
inner = <a key={"initializing"}><Translatable>Echo testing hasn't been supported by the server.</Translatable></a>;
|
||||
break;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cssStyle.overlay + " " + (state.state !== "running" && voiceConnected ? cssStyle.shown : "")}>
|
||||
{inner}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const TroubleshootingSoundOverlay = () => {
|
||||
const events = useContext(EchoTestEventRegistry);
|
||||
|
||||
const [ visible, setVisible ] = useState(false);
|
||||
|
||||
events.reactUse("notify_test_phase", event => setVisible(event.phase === "troubleshooting"));
|
||||
|
||||
return (
|
||||
<div className={cssStyle.overlay + " " + cssStyle.troubleshoot + " " + (visible ? cssStyle.shown : "")}>
|
||||
<div className={cssStyle.top}>
|
||||
<div className={cssStyle.containerIcon}>
|
||||
<ClientIconRenderer icon={ClientIcon.MicrophoneBroken} className={cssStyle.icon} />
|
||||
</div>
|
||||
<div className={cssStyle.help}>
|
||||
<h1><Translatable>Troubleshooting guide</Translatable></h1>
|
||||
<ol>
|
||||
<li>
|
||||
<h2><Translatable>Correct microphone selected?</Translatable>
|
||||
<Button type={"extra-small"} onClick={() => events.fire("action_open_microphone_settings")}>
|
||||
<Translatable>Open Microphone settings</Translatable>
|
||||
</Button>
|
||||
</h2>
|
||||
<p>
|
||||
<Translatable>Check within the settings, if the right microphone has been selected.</Translatable>
|
||||
<Translatable>The indicators will show you any voice activity.</Translatable>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<h2><Translatable>Are any addons blocking the microphone access?</Translatable></h2>
|
||||
<p>
|
||||
<Translatable>Some addons might block the access to your microphone. Try to disable all addons and reload the site.</Translatable>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<h2><Translatable>Has WebRTC been enabled?</Translatable></h2>
|
||||
<p>
|
||||
<VariadicTranslatable text={"In some cases, WebRTC has been disabled. Click {0} to troubleshoot any WebRTC related issues."}>
|
||||
<a href={"https://test.webrtc.org"} hrefLang={"en"} target={"_blank"}><Translatable>here</Translatable></a>
|
||||
</VariadicTranslatable>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<h2><Translatable>Reload the site</Translatable></h2>
|
||||
<p>
|
||||
<Translatable>In some cases, reloading the site will already solve the issue for you.</Translatable>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<h2><Translatable>Nothing worked? Submit an issue</Translatable></h2>
|
||||
<p>
|
||||
<VariadicTranslatable text={"If still nothing worked, try to seek help in our {0}."}>
|
||||
<a href={"https://forum.teaspeak.de"} hrefLang={"en"} target={"_blank"}><Translatable>forum</Translatable></a>
|
||||
</VariadicTranslatable>
|
||||
<VariadicTranslatable text={"You can also create a new issue/bug report {0}."}>
|
||||
<a href={"https://github.com/TeaSpeak/TeaWeb/issues"} hrefLang={"en"} target={"_blank"}><Translatable>here</Translatable></a>
|
||||
</VariadicTranslatable>
|
||||
</p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
<div className={cssStyle.buttons}>
|
||||
<Button type={"small"} color={"red"} onClick={() => events.fire("action_troubleshooting_finished", { status: "aborted" })}>
|
||||
<Translatable>Abort test</Translatable>
|
||||
</Button>
|
||||
|
||||
<Button type={"small"} color={"green"} onClick={() => events.fire("action_troubleshooting_finished", { status: "test-again" })}>
|
||||
<Translatable>Test again</Translatable>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export const TestToggle = () => {
|
||||
const events = useContext(EchoTestEventRegistry);
|
||||
|
||||
const [ state, setState ] = useState<"loading" | boolean>(() => {
|
||||
events.fire("query_test_state");
|
||||
return "loading";
|
||||
});
|
||||
|
||||
events.reactUse("notify_tests_toggle", event => setState(event.enabled));
|
||||
|
||||
return (
|
||||
<Checkbox
|
||||
value={state === true}
|
||||
disabled={state === "loading"}
|
||||
onChange={() => events.fire("action_toggle_tests", { enabled: state === false })}
|
||||
label={<Translatable>Show this on the next connect</Translatable>}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export const EchoTestModal = () => {
|
||||
const events = useContext(EchoTestEventRegistry);
|
||||
|
||||
return (
|
||||
<div className={cssStyle.container}>
|
||||
<h1 className={cssStyle.header}>
|
||||
<Translatable>Welcome to the private echo test. Can you hear yourself speaking?</Translatable>
|
||||
</h1>
|
||||
<div className={cssStyle.buttons}>
|
||||
<div className={cssStyle.buttonContainer}>
|
||||
<div className={cssStyle.button + " " + cssStyle.success} title={tr("Yes")} onClick={() => events.fire("action_test_result", { status: "success" })}>
|
||||
<ClientIconRenderer icon={ClientIcon.Apply} className={cssStyle.icon} />
|
||||
</div>
|
||||
<a><Translatable>Yes</Translatable></a>
|
||||
</div>
|
||||
<div className={cssStyle.buttonContainer}>
|
||||
<div className={cssStyle.button + " " + cssStyle.fail} title={tr("No")} onClick={() => events.fire("action_test_result", { status: "fail" })}>
|
||||
<ClientIconRenderer icon={ClientIcon.Delete} className={cssStyle.icon} />
|
||||
</div>
|
||||
<a><Translatable>No</Translatable></a>
|
||||
</div>
|
||||
|
||||
<VoiceStateOverlay />
|
||||
<TestStateOverlay />
|
||||
</div>
|
||||
<div className={cssStyle.footer}>
|
||||
<TestToggle />
|
||||
<Button color={"red"} type={"small"} onClick={() => events.fire("action_close")}><Translatable>Close</Translatable></Button>
|
||||
</div>
|
||||
<TroubleshootingSoundOverlay />
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -590,7 +590,7 @@ function initializePermissionModalController(connection: ConnectionHandler, even
|
|||
}
|
||||
|
||||
events.on("query_group_clients", event => {
|
||||
connection.serverConnection.command_helper.request_clients_by_server_group(event.id).then(clients => {
|
||||
connection.serverConnection.command_helper.requestClientsByServerGroup(event.id).then(clients => {
|
||||
events.fire("query_group_clients_result", { id: event.id, status: "success", clients: clients.map(e => {
|
||||
return {
|
||||
name: e.client_nickname,
|
||||
|
@ -614,7 +614,7 @@ function initializePermissionModalController(connection: ConnectionHandler, even
|
|||
if(typeof client === "number")
|
||||
return Promise.resolve(client);
|
||||
|
||||
return connection.serverConnection.command_helper.info_from_uid(client.trim()).then(info => info[0].client_database_id);
|
||||
return connection.serverConnection.command_helper.getInfoFromUniqueId(client.trim()).then(info => info[0].clientDatabaseId);
|
||||
}).then(clientDatabaseId => connection.serverConnection.send_command("servergroupaddclient", {
|
||||
sgid: event.id,
|
||||
cldbid: clientDatabaseId
|
||||
|
@ -667,9 +667,9 @@ function initializePermissionModalController(connection: ConnectionHandler, even
|
|||
events.on("query_client_info", event => {
|
||||
let promise: Promise<ClientNameInfo[]>;
|
||||
if(typeof event.client === "number") {
|
||||
promise = connection.serverConnection.command_helper.info_from_cldbid(event.client);
|
||||
promise = connection.serverConnection.command_helper.getInfoFromClientDatabaseId(event.client);
|
||||
} else {
|
||||
promise = connection.serverConnection.command_helper.info_from_uid(event.client.trim());
|
||||
promise = connection.serverConnection.command_helper.getInfoFromUniqueId(event.client.trim());
|
||||
}
|
||||
promise.then(result => {
|
||||
if(result.length === 0) {
|
||||
|
@ -682,7 +682,7 @@ function initializePermissionModalController(connection: ConnectionHandler, even
|
|||
events.fire("query_client_info_result", {
|
||||
client: event.client,
|
||||
state: "success",
|
||||
info: { name: result[0].client_nickname, databaseId: result[0].client_database_id, uniqueId: result[0].client_unique_id }
|
||||
info: { name: result[0].clientNickname, databaseId: result[0].clientDatabaseId, uniqueId: result[0].clientUniqueId }
|
||||
});
|
||||
}).catch(error => {
|
||||
if(error instanceof CommandResult) {
|
||||
|
|
|
@ -8,8 +8,6 @@
|
|||
display: flex;
|
||||
position: relative;
|
||||
|
||||
padding: .5em;
|
||||
|
||||
background-color: inherit;
|
||||
|
||||
.background {
|
||||
|
@ -71,6 +69,8 @@
|
|||
}
|
||||
|
||||
&.shown {
|
||||
padding: .5em;
|
||||
|
||||
.background {
|
||||
display: flex;
|
||||
z-index: 1;
|
||||
|
|
|
@ -4,7 +4,7 @@ import {Registry} from "tc-shared/events";
|
|||
import {LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logWarn} from "tc-shared/log";
|
||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||
import {defaultRecorder} from "tc-shared/voice/RecorderProfile";
|
||||
import {DeviceListState, getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
|
||||
|
||||
export type MicrophoneSetting = "volume" | "vad-type" | "ppt-key" | "ppt-release-delay" | "ppt-release-delay-active" | "threshold-threshold";
|
||||
|
@ -98,7 +98,7 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
|
||||
for(const device of recorderBackend.getDeviceList().getDevices()) {
|
||||
let promise = recorderBackend.createLevelMeter(device).then(meter => {
|
||||
meter.set_observer(level => {
|
||||
meter.setObserver(level => {
|
||||
if(level_meters[device.deviceId] !== promise) return; /* old level meter */
|
||||
|
||||
level_info[device.deviceId] = {
|
||||
|
@ -172,7 +172,7 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
|
||||
events.fire_async("notify_devices", {
|
||||
status: "success",
|
||||
selectedDevice: default_recorder.getDeviceId(),
|
||||
selectedDevice: defaultRecorder.getDeviceId(),
|
||||
devices: devices.map(e => { return { id: e.deviceId, name: e.name, driver: e.driver }})
|
||||
});
|
||||
}
|
||||
|
@ -181,11 +181,11 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
events.on("action_set_selected_device", event => {
|
||||
const device = recorderBackend.getDeviceList().getDevices().find(e => e.deviceId === event.deviceId);
|
||||
if(!device && event.deviceId !== IDevice.NoDeviceId) {
|
||||
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: default_recorder.getDeviceId() });
|
||||
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: defaultRecorder.getDeviceId() });
|
||||
return;
|
||||
}
|
||||
|
||||
default_recorder.set_device(device).then(() => {
|
||||
defaultRecorder.setDevice(device).then(() => {
|
||||
console.debug(tr("Changed default microphone device to %s"), event.deviceId);
|
||||
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
||||
}).catch((error) => {
|
||||
|
@ -201,27 +201,27 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
let value;
|
||||
switch (event.setting) {
|
||||
case "volume":
|
||||
value = default_recorder.get_volume();
|
||||
value = defaultRecorder.getVolume();
|
||||
break;
|
||||
|
||||
case "threshold-threshold":
|
||||
value = default_recorder.get_vad_threshold();
|
||||
value = defaultRecorder.getThresholdThreshold();
|
||||
break;
|
||||
|
||||
case "vad-type":
|
||||
value = default_recorder.get_vad_type();
|
||||
value = defaultRecorder.getVadType();
|
||||
break;
|
||||
|
||||
case "ppt-key":
|
||||
value = default_recorder.get_vad_ppt_key();
|
||||
value = defaultRecorder.getPushToTalkKey();
|
||||
break;
|
||||
|
||||
case "ppt-release-delay":
|
||||
value = Math.abs(default_recorder.get_vad_ppt_delay());
|
||||
value = Math.abs(defaultRecorder.getPushToTalkDelay());
|
||||
break;
|
||||
|
||||
case "ppt-release-delay-active":
|
||||
value = default_recorder.get_vad_ppt_delay() > 0;
|
||||
value = defaultRecorder.getPushToTalkDelay() > 0;
|
||||
break;
|
||||
|
||||
default:
|
||||
|
@ -246,17 +246,17 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
switch (event.setting) {
|
||||
case "volume":
|
||||
if(!ensure_type("number")) return;
|
||||
default_recorder.set_volume(event.value);
|
||||
defaultRecorder.setVolume(event.value);
|
||||
break;
|
||||
|
||||
case "threshold-threshold":
|
||||
if(!ensure_type("number")) return;
|
||||
default_recorder.set_vad_threshold(event.value);
|
||||
defaultRecorder.setThresholdThreshold(event.value);
|
||||
break;
|
||||
|
||||
case "vad-type":
|
||||
if(!ensure_type("string")) return;
|
||||
if(!default_recorder.set_vad_type(event.value)) {
|
||||
if(!defaultRecorder.setVadType(event.value)) {
|
||||
logWarn(LogCategory.GENERAL, tr("Failed to change recorders VAD type to %s"), event.value);
|
||||
return;
|
||||
}
|
||||
|
@ -264,18 +264,18 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
|
||||
case "ppt-key":
|
||||
if(!ensure_type("object")) return;
|
||||
default_recorder.set_vad_ppt_key(event.value);
|
||||
defaultRecorder.setPushToTalkKey(event.value);
|
||||
break;
|
||||
|
||||
case "ppt-release-delay":
|
||||
if(!ensure_type("number")) return;
|
||||
const sign = default_recorder.get_vad_ppt_delay() >= 0 ? 1 : -1;
|
||||
default_recorder.set_vad_ppt_delay(sign * event.value);
|
||||
const sign = defaultRecorder.getPushToTalkDelay() >= 0 ? 1 : -1;
|
||||
defaultRecorder.setPushToTalkDelay(sign * event.value);
|
||||
break;
|
||||
|
||||
case "ppt-release-delay-active":
|
||||
if(!ensure_type("boolean")) return;
|
||||
default_recorder.set_vad_ppt_delay(Math.abs(default_recorder.get_vad_ppt_delay()) * (event.value ? 1 : -1));
|
||||
defaultRecorder.setPushToTalkDelay(Math.abs(defaultRecorder.getPushToTalkDelay()) * (event.value ? 1 : -1));
|
||||
break;
|
||||
|
||||
default:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import {ClientIcon} from "svg-sprites/client-icons";
|
||||
import * as React from "react";
|
||||
|
||||
export const ClientIconRenderer = (props: { icon: ClientIcon, size?: string | number, title?: string }) => (
|
||||
<div className={"icon_em " + props.icon} style={{ fontSize: props.size }} title={props.title} />
|
||||
export const ClientIconRenderer = (props: { icon: ClientIcon, size?: string | number, title?: string, className?: string }) => (
|
||||
<div className={"icon_em " + props.icon + " " + props.className} style={{ fontSize: props.size }} title={props.title} />
|
||||
);
|
|
@ -51,7 +51,7 @@ class ClientSpeakIcon extends ReactComponentBase<ClientIconProperties, {}> {
|
|||
} else {
|
||||
if (properties.client_away) {
|
||||
icon = ClientIcon.Away;
|
||||
} else if (!client.get_audio_handle() && !(this instanceof LocalClientEntry)) {
|
||||
} else if (!client.getVoiceClient() && !(this instanceof LocalClientEntry)) {
|
||||
icon = ClientIcon.InputMutedLocal;
|
||||
} else if(!properties.client_output_hardware) {
|
||||
icon = ClientIcon.HardwareOutputMuted;
|
||||
|
@ -338,7 +338,7 @@ class ClientNameEdit extends ReactComponentBase<ClientNameEditProps, {}> {
|
|||
contentEditable={true}
|
||||
ref={this.ref_div}
|
||||
dangerouslySetInnerHTML={{__html: DOMPurify.sanitize(this.props.initialName)}}
|
||||
onBlur={e => this.onBlur()}
|
||||
onBlur={() => this.onBlur()}
|
||||
onKeyPress={e => this.onKeyPress(e)}
|
||||
/>
|
||||
}
|
||||
|
|
|
@ -488,14 +488,16 @@ export class ChannelTree {
|
|||
|
||||
//FIXME: Trigger the notify_clients_changed event!
|
||||
const voice_connection = this.client.serverConnection.getVoiceConnection();
|
||||
if(client.get_audio_handle()) {
|
||||
if(client.getVoiceClient()) {
|
||||
const voiceClient = client.getVoiceClient();
|
||||
client.setVoiceClient(undefined);
|
||||
|
||||
if(!voice_connection) {
|
||||
log.warn(LogCategory.VOICE, tr("Deleting client with a voice handle, but we haven't a voice connection!"));
|
||||
} else {
|
||||
voice_connection.unregister_client(client.get_audio_handle());
|
||||
voice_connection.unregisterVoiceClient(voiceClient);
|
||||
}
|
||||
}
|
||||
client.set_audio_handle(undefined);
|
||||
client.destroy();
|
||||
}
|
||||
|
||||
|
@ -503,9 +505,10 @@ export class ChannelTree {
|
|||
this.clients.push(client);
|
||||
client.channelTree = this;
|
||||
|
||||
const voice_connection = this.client.serverConnection.getVoiceConnection();
|
||||
if(voice_connection)
|
||||
client.set_audio_handle(voice_connection.registerClient(client.clientId()));
|
||||
const voiceConnection = this.client.serverConnection.getVoiceConnection();
|
||||
if(voiceConnection) {
|
||||
client.setVoiceClient(voiceConnection.registerVoiceClient(client.clientId()));
|
||||
}
|
||||
}
|
||||
|
||||
unregisterClient(client: ClientEntry) {
|
||||
|
@ -852,9 +855,9 @@ export class ChannelTree {
|
|||
|
||||
const voice_connection = this.client.serverConnection ? this.client.serverConnection.getVoiceConnection() : undefined;
|
||||
for(const client of this.clients) {
|
||||
if(client.get_audio_handle() && voice_connection) {
|
||||
voice_connection.unregister_client(client.get_audio_handle());
|
||||
client.set_audio_handle(undefined);
|
||||
if(client.getVoiceClient() && voice_connection) {
|
||||
voice_connection.unregisterVoiceClient(client.getVoiceClient());
|
||||
client.setVoiceClient(undefined);
|
||||
}
|
||||
client.destroy();
|
||||
}
|
||||
|
|
|
@ -9,14 +9,14 @@ export enum InputConsumerType {
|
|||
}
|
||||
export interface CallbackInputConsumer {
|
||||
type: InputConsumerType.CALLBACK;
|
||||
callback_audio?: (buffer: AudioBuffer) => any;
|
||||
callback_buffer?: (buffer: Float32Array, samples: number, channels: number) => any;
|
||||
callbackAudio?: (buffer: AudioBuffer) => any;
|
||||
callbackBuffer?: (buffer: Float32Array, samples: number, channels: number) => any;
|
||||
}
|
||||
|
||||
export interface NodeInputConsumer {
|
||||
type: InputConsumerType.NODE;
|
||||
callback_node: (source_node: AudioNode) => any;
|
||||
callback_disconnect: (source_node: AudioNode) => any;
|
||||
callbackNode: (source_node: AudioNode) => any;
|
||||
callbackDisconnect: (source_node: AudioNode) => any;
|
||||
}
|
||||
|
||||
export interface NativeInputConsumer {
|
||||
|
@ -54,6 +54,23 @@ export interface InputEvents {
|
|||
notify_voice_end: {}
|
||||
}
|
||||
|
||||
export enum FilterMode {
|
||||
/**
|
||||
* Apply all filters and act according to the output
|
||||
*/
|
||||
Filter,
|
||||
|
||||
/**
|
||||
* Bypass all filters and replay the audio
|
||||
*/
|
||||
Bypass,
|
||||
|
||||
/**
|
||||
* Block all communication
|
||||
*/
|
||||
Block
|
||||
}
|
||||
|
||||
export interface AbstractInput {
|
||||
readonly events: Registry<InputEvents>;
|
||||
|
||||
|
@ -68,6 +85,9 @@ export interface AbstractInput {
|
|||
*/
|
||||
isFiltered() : boolean;
|
||||
|
||||
getFilterMode() : FilterMode;
|
||||
setFilterMode(mode: FilterMode);
|
||||
|
||||
currentDeviceId() : string | undefined;
|
||||
|
||||
/*
|
||||
|
@ -90,9 +110,9 @@ export interface AbstractInput {
|
|||
}
|
||||
|
||||
export interface LevelMeter {
|
||||
device() : IDevice;
|
||||
getDevice() : IDevice;
|
||||
|
||||
set_observer(callback: (value: number) => any);
|
||||
setObserver(callback: (value: number) => any);
|
||||
|
||||
destroy();
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logWarn} from "tc-shared/log";
|
||||
import {AbstractInput} from "tc-shared/voice/RecorderBase";
|
||||
import {LogCategory, logError, logWarn} from "tc-shared/log";
|
||||
import {AbstractInput, FilterMode} from "tc-shared/voice/RecorderBase";
|
||||
import {KeyDescriptor, KeyHook} from "tc-shared/PPTListener";
|
||||
import {Settings, settings} from "tc-shared/settings";
|
||||
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
||||
|
@ -34,9 +34,9 @@ export interface RecorderProfileConfig {
|
|||
}
|
||||
}
|
||||
|
||||
export let default_recorder: RecorderProfile; /* needs initialize */
|
||||
export function set_default_recorder(recorder: RecorderProfile) {
|
||||
default_recorder = recorder;
|
||||
export let defaultRecorder: RecorderProfile; /* needs initialize */
|
||||
export function setDefaultRecorder(recorder: RecorderProfile) {
|
||||
defaultRecorder = recorder;
|
||||
}
|
||||
|
||||
export class RecorderProfile {
|
||||
|
@ -61,10 +61,7 @@ export class RecorderProfile {
|
|||
|
||||
private registeredFilter = {
|
||||
"ppt-gate": undefined as StateFilter,
|
||||
"threshold": undefined as ThresholdFilter,
|
||||
|
||||
/* disable voice transmission by default, e.g. when reinitializing filters etc. */
|
||||
"default-disabled": undefined as StateFilter
|
||||
"threshold": undefined as ThresholdFilter
|
||||
}
|
||||
|
||||
constructor(name: string, volatile?: boolean) {
|
||||
|
@ -148,10 +145,7 @@ export class RecorderProfile {
|
|||
this.callback_stop();
|
||||
});
|
||||
|
||||
this.registeredFilter["default-disabled"] = this.input.createFilter(FilterType.STATE, 20);
|
||||
await this.registeredFilter["default-disabled"].setState(true); /* filter */
|
||||
this.registeredFilter["default-disabled"].setEnabled(true);
|
||||
|
||||
this.input.setFilterMode(FilterMode.Block);
|
||||
this.registeredFilter["ppt-gate"] = this.input.createFilter(FilterType.STATE, 100);
|
||||
this.registeredFilter["ppt-gate"].setEnabled(false);
|
||||
|
||||
|
@ -173,21 +167,24 @@ export class RecorderProfile {
|
|||
}
|
||||
|
||||
private save() {
|
||||
if(!this.volatile)
|
||||
if(!this.volatile) {
|
||||
settings.changeGlobal(Settings.FN_PROFILE_RECORD(this.name), this.config);
|
||||
}
|
||||
}
|
||||
|
||||
private reinitializePPTHook() {
|
||||
if(this.config.vad_type !== "push_to_talk")
|
||||
if(this.config.vad_type !== "push_to_talk") {
|
||||
return;
|
||||
}
|
||||
|
||||
if(this.pptHookRegistered) {
|
||||
ppt.unregister_key_hook(this.pptHook);
|
||||
this.pptHookRegistered = false;
|
||||
}
|
||||
|
||||
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"]) {
|
||||
this.pptHook[key] = this.config.vad_push_to_talk[key];
|
||||
}
|
||||
|
||||
ppt.register_key_hook(this.pptHook);
|
||||
this.pptHookRegistered = true;
|
||||
|
@ -196,10 +193,11 @@ export class RecorderProfile {
|
|||
}
|
||||
|
||||
private async reinitializeFilter() {
|
||||
if(!this.input) return;
|
||||
if(!this.input) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* don't let any audio pass while we initialize the other filters */
|
||||
this.registeredFilter["default-disabled"].setEnabled(true);
|
||||
this.input.setFilterMode(FilterMode.Block);
|
||||
|
||||
/* disable all filter */
|
||||
this.registeredFilter["threshold"].setEnabled(false);
|
||||
|
@ -232,8 +230,7 @@ export class RecorderProfile {
|
|||
/* we don't have to initialize any filters */
|
||||
}
|
||||
|
||||
|
||||
this.registeredFilter["default-disabled"].setEnabled(false);
|
||||
this.input.setFilterMode(FilterMode.Filter);
|
||||
}
|
||||
|
||||
async unmount() : Promise<void> {
|
||||
|
@ -247,6 +244,8 @@ export class RecorderProfile {
|
|||
} catch(error) {
|
||||
log.warn(LogCategory.VOICE, tr("Failed to unmount input consumer for profile (%o)"), error);
|
||||
}
|
||||
|
||||
this.input.setFilterMode(FilterMode.Block);
|
||||
}
|
||||
|
||||
this.callback_input_initialized = undefined;
|
||||
|
@ -256,8 +255,8 @@ export class RecorderProfile {
|
|||
this.current_handler = undefined;
|
||||
}
|
||||
|
||||
get_vad_type() { return this.config.vad_type; }
|
||||
set_vad_type(type: VadType) : boolean {
|
||||
getVadType() { return this.config.vad_type; }
|
||||
setVadType(type: VadType) : boolean {
|
||||
if(this.config.vad_type === type)
|
||||
return true;
|
||||
|
||||
|
@ -265,13 +264,15 @@ export class RecorderProfile {
|
|||
return false;
|
||||
|
||||
this.config.vad_type = type;
|
||||
this.reinitializeFilter();
|
||||
this.reinitializeFilter().catch(error => {
|
||||
logError(LogCategory.AUDIO, tr("Failed to reinitialize filters after vad type change: %o"), error);
|
||||
});
|
||||
this.save();
|
||||
return true;
|
||||
}
|
||||
|
||||
get_vad_threshold() { return parseInt(this.config.vad_threshold.threshold as any); } /* for some reason it might be a string... */
|
||||
set_vad_threshold(value: number) {
|
||||
getThresholdThreshold() { return parseInt(this.config.vad_threshold.threshold as any); } /* for some reason it might be a string... */
|
||||
setThresholdThreshold(value: number) {
|
||||
if(this.config.vad_threshold.threshold === value)
|
||||
return;
|
||||
|
||||
|
@ -280,8 +281,8 @@ export class RecorderProfile {
|
|||
this.save();
|
||||
}
|
||||
|
||||
get_vad_ppt_key() : KeyDescriptor { return this.config.vad_push_to_talk; }
|
||||
set_vad_ppt_key(key: KeyDescriptor) {
|
||||
getPushToTalkKey() : KeyDescriptor { return this.config.vad_push_to_talk; }
|
||||
setPushToTalkKey(key: KeyDescriptor) {
|
||||
for(const _key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||
this.config.vad_push_to_talk[_key] = key[_key];
|
||||
|
||||
|
@ -289,8 +290,8 @@ export class RecorderProfile {
|
|||
this.save();
|
||||
}
|
||||
|
||||
get_vad_ppt_delay() { return this.config.vad_push_to_talk.delay; }
|
||||
set_vad_ppt_delay(value: number) {
|
||||
getPushToTalkDelay() { return this.config.vad_push_to_talk.delay; }
|
||||
setPushToTalkDelay(value: number) {
|
||||
if(this.config.vad_push_to_talk.delay === value)
|
||||
return;
|
||||
|
||||
|
@ -299,14 +300,14 @@ export class RecorderProfile {
|
|||
}
|
||||
|
||||
getDeviceId() : string { return this.config.device_id; }
|
||||
set_device(device: IDevice | undefined) : Promise<void> {
|
||||
setDevice(device: IDevice | undefined) : Promise<void> {
|
||||
this.config.device_id = device ? device.deviceId : IDevice.NoDeviceId;
|
||||
this.save();
|
||||
return this.input?.setDeviceId(this.config.device_id) || Promise.resolve();
|
||||
}
|
||||
|
||||
get_volume() : number { return this.input ? (this.input.getVolume() * 100) : this.config.volume; }
|
||||
set_volume(volume: number) {
|
||||
getVolume() : number { return this.input ? (this.input.getVolume() * 100) : this.config.volume; }
|
||||
setVolume(volume: number) {
|
||||
if(this.config.volume === volume)
|
||||
return;
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
import {VoicePlayer} from "tc-shared/voice/VoicePlayer";
|
||||
|
||||
export interface VoiceClient extends VoicePlayer {
|
||||
getClientId() : number;
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
import {Registry} from "tc-shared/events";
|
||||
|
||||
export enum VoicePlayerState {
|
||||
INITIALIZING,
|
||||
|
||||
PREBUFFERING,
|
||||
PLAYING,
|
||||
BUFFERING,
|
||||
STOPPING,
|
||||
STOPPED
|
||||
}
|
||||
|
||||
export interface VoicePlayerEvents {
|
||||
notify_state_changed: { oldState: VoicePlayerState, newState: VoicePlayerState }
|
||||
}
|
||||
|
||||
export interface VoicePlayerLatencySettings {
|
||||
/* time in milliseconds */
|
||||
minBufferTime: number;
|
||||
|
||||
/* time in milliseconds */
|
||||
maxBufferTime: number;
|
||||
}
|
||||
|
||||
export interface VoicePlayer {
|
||||
readonly events: Registry<VoicePlayerEvents>;
|
||||
|
||||
/**
|
||||
* @returns Returns the current voice player state.
|
||||
* Subscribe to the "notify_state_changed" event to receive player changes.
|
||||
*/
|
||||
getState() : VoicePlayerState;
|
||||
|
||||
/**
|
||||
* @returns The volume multiplier in a range from [0, 1]
|
||||
*/
|
||||
getVolume() : number;
|
||||
|
||||
/**
|
||||
* @param volume The volume multiplier in a range from [0, 1]
|
||||
*/
|
||||
setVolume(volume: number);
|
||||
|
||||
/**
|
||||
* Abort the replaying of the currently pending buffers.
|
||||
* If new buffers are arriving a new replay will be started.
|
||||
*/
|
||||
abortReplay();
|
||||
|
||||
/**
|
||||
* Flush the current buffer.
|
||||
* This will most likely set the player into the buffering mode.
|
||||
*/
|
||||
flushBuffer();
|
||||
|
||||
/**
|
||||
* Get the currently used latency settings
|
||||
*/
|
||||
getLatencySettings() : Readonly<VoicePlayerLatencySettings>;
|
||||
|
||||
/**
|
||||
* @param settings The new latency settings to be used
|
||||
*/
|
||||
setLatencySettings(settings: VoicePlayerLatencySettings);
|
||||
|
||||
/**
|
||||
* Reset the latency settings to the default
|
||||
*/
|
||||
resetLatencySettings();
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
import {Registry} from "tc-shared/events";
|
||||
import {VoicePlayer} from "tc-shared/voice/VoicePlayer";
|
||||
|
||||
export interface WhisperTargetChannelClients {
|
||||
target: "channel-clients",
|
||||
|
||||
channels: number[],
|
||||
clients: number[]
|
||||
}
|
||||
|
||||
export interface WhisperTargetGroups {
|
||||
target: "groups",
|
||||
/* TODO! */
|
||||
}
|
||||
|
||||
export interface WhisperTargetEcho {
|
||||
target: "echo",
|
||||
}
|
||||
|
||||
export type WhisperTarget = WhisperTargetGroups | WhisperTargetChannelClients | WhisperTargetEcho;
|
||||
|
||||
export interface WhisperSessionEvents {
|
||||
notify_state_changed: { oldState: WhisperSessionState, newState: WhisperSessionState },
|
||||
notify_blocked_state_changed: { oldState: boolean, newState: boolean },
|
||||
notify_timed_out: {}
|
||||
}
|
||||
|
||||
export enum WhisperSessionState {
|
||||
/* the session is getting initialized, not all variables may be set */
|
||||
INITIALIZING,
|
||||
|
||||
/* there is currently no whispering */
|
||||
PAUSED,
|
||||
|
||||
/* we're replaying some whisper */
|
||||
PLAYING,
|
||||
|
||||
/* Something in the initialize process went wrong. */
|
||||
INITIALIZE_FAILED
|
||||
}
|
||||
|
||||
export const kUnknownWhisperClientUniqueId = "unknown";
|
||||
|
||||
export interface WhisperSession {
|
||||
readonly events: Registry<WhisperSessionEvents>;
|
||||
|
||||
/* get information about the whisperer */
|
||||
getClientId() : number;
|
||||
|
||||
/* only ensured to be valid if session has been initialized */
|
||||
getClientName() : string | undefined;
|
||||
|
||||
/* only ensured to be valid if session has been initialized */
|
||||
getClientUniqueId() : string | undefined;
|
||||
|
||||
getSessionState() : WhisperSessionState;
|
||||
|
||||
isBlocked() : boolean;
|
||||
setBlocked(blocked: boolean);
|
||||
|
||||
getSessionTimeout() : number;
|
||||
setSessionTimeout(timeout: number);
|
||||
|
||||
getLastWhisperTimestamp() : number;
|
||||
|
||||
/**
|
||||
* This is only valid if the session has been initialized successfully,
|
||||
* and it hasn't been blocked
|
||||
*
|
||||
* @returns Returns the voice player
|
||||
*/
|
||||
getVoicePlayer() : VoicePlayer | undefined;
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
import {Registry} from "tc-shared/events";
|
||||
|
||||
export interface WhisperSessionEvents {
|
||||
notify_state_changed: { oldState: WhisperSessionState, newState: WhisperSessionState }
|
||||
}
|
||||
|
||||
export enum WhisperSessionState {
|
||||
/* the sesston is getting initialized, not all variables may be set */
|
||||
INITIALIZING,
|
||||
|
||||
/* there is currently no whispering */
|
||||
PAUSED,
|
||||
|
||||
/* we're currently buffering */
|
||||
BUFFERING,
|
||||
|
||||
/* we're replaying some whisper */
|
||||
PLAYING,
|
||||
|
||||
/* we're currently receiving a whisper, but it has been blocked */
|
||||
BLOCKED
|
||||
}
|
||||
|
||||
export const kUnknownWhisperClientUniqueId = "unknown";
|
||||
|
||||
export interface WhisperSession {
|
||||
readonly events: Registry<WhisperSessionEvents>;
|
||||
|
||||
/* get information about the whisperer */
|
||||
getClientId() : number;
|
||||
|
||||
/* only ensured to be valid if session has been initialized */
|
||||
getClientName() : string | undefined;
|
||||
|
||||
/* only ensured to be valid if session has been initialized */
|
||||
getClientUniqueId() : string | undefined;
|
||||
|
||||
isBlocked() : boolean;
|
||||
setBlocked(flag: boolean);
|
||||
|
||||
getSessionTimeout() : number;
|
||||
setSessionTimeout() : number;
|
||||
|
||||
getLastWhisperTimestamp() : number;
|
||||
|
||||
setVolume(volume: number);
|
||||
getVolume() : number;
|
||||
}
|
|
@ -18,12 +18,13 @@ export class AudioClient {
|
|||
this.handle.destroyClient(this.clientId);
|
||||
}
|
||||
|
||||
enqueueBuffer(buffer: Uint8Array, packetId: number, codec: number) {
|
||||
enqueueBuffer(buffer: Uint8Array, packetId: number, codec: number, head: boolean) {
|
||||
this.handle.getWorker().executeThrow("enqueue-audio-packet", {
|
||||
clientId: this.clientId,
|
||||
|
||||
codec: codec,
|
||||
packetId: packetId,
|
||||
head: head,
|
||||
|
||||
buffer: buffer.buffer,
|
||||
byteLength: buffer.byteLength,
|
||||
|
|
|
@ -7,6 +7,7 @@ export interface AWCommand {
|
|||
clientId: number,
|
||||
packetId: number,
|
||||
codec: number,
|
||||
head: boolean,
|
||||
|
||||
buffer: ArrayBuffer,
|
||||
byteLength: number,
|
||||
|
|
|
@ -53,10 +53,14 @@ workerHandler.registerMessageHandler("create-client", () => {
|
|||
}
|
||||
});
|
||||
|
||||
workerHandler.registerMessageHandler("destroy-client", payload => {
|
||||
audioLibrary.audio_client_destroy(payload.clientId);
|
||||
})
|
||||
|
||||
workerHandler.registerMessageHandler("initialize", async () => {
|
||||
await initializeAudioLib();
|
||||
})
|
||||
|
||||
workerHandler.registerMessageHandler("enqueue-audio-packet", payload => {
|
||||
audioLibrary.audio_client_enqueue_buffer(payload.clientId, new Uint8Array(payload.buffer, payload.byteOffset, payload.byteLength), payload.packetId, payload.codec);
|
||||
audioLibrary.audio_client_enqueue_buffer(payload.clientId, new Uint8Array(payload.buffer, payload.byteOffset, payload.byteLength), payload.packetId, payload.codec, payload.head);
|
||||
});
|
|
@ -2,6 +2,7 @@ import {AudioRecorderBacked, DeviceList, IDevice,} from "tc-shared/audio/recorde
|
|||
import {Registry} from "tc-shared/events";
|
||||
import {
|
||||
AbstractInput,
|
||||
FilterMode,
|
||||
InputConsumer,
|
||||
InputConsumerType,
|
||||
InputEvents,
|
||||
|
@ -124,6 +125,7 @@ class JavascriptInput implements AbstractInput {
|
|||
|
||||
private registeredFilters: (Filter & JAbstractFilter<AudioNode>)[] = [];
|
||||
private inputFiltered: boolean = false;
|
||||
private filterMode: FilterMode = FilterMode.Block;
|
||||
|
||||
private startPromise: Promise<InputStartResult>;
|
||||
|
||||
|
@ -159,8 +161,13 @@ class JavascriptInput implements AbstractInput {
|
|||
private initializeFilters() {
|
||||
this.registeredFilters.forEach(e => e.finalize());
|
||||
this.registeredFilters.sort((a, b) => a.priority - b.priority);
|
||||
if(!this.audioContext || !this.audioNodeVolume) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(this.audioContext && this.audioNodeVolume) {
|
||||
if(this.filterMode === FilterMode.Block) {
|
||||
this.switchSourceNode(this.audioNodeMute);
|
||||
} else if(this.filterMode === FilterMode.Filter) {
|
||||
const activeFilters = this.registeredFilters.filter(e => e.isEnabled());
|
||||
|
||||
let chain = "output <- ";
|
||||
|
@ -176,7 +183,10 @@ class JavascriptInput implements AbstractInput {
|
|||
logDebug(LogCategory.AUDIO, tr("Input filter chain: %s"), chain);
|
||||
|
||||
this.switchSourceNode(currentSource);
|
||||
} else if(this.filterMode === FilterMode.Bypass) {
|
||||
this.switchSourceNode(this.audioNodeVolume);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private handleAudio(event: AudioProcessingEvent) {
|
||||
|
@ -184,11 +194,11 @@ class JavascriptInput implements AbstractInput {
|
|||
return;
|
||||
}
|
||||
|
||||
if(this.consumer.callback_audio) {
|
||||
this.consumer.callback_audio(event.inputBuffer);
|
||||
if(this.consumer.callbackAudio) {
|
||||
this.consumer.callbackAudio(event.inputBuffer);
|
||||
}
|
||||
|
||||
if(this.consumer.callback_buffer) {
|
||||
if(this.consumer.callbackBuffer) {
|
||||
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
|
||||
}
|
||||
}
|
||||
|
@ -245,7 +255,7 @@ class JavascriptInput implements AbstractInput {
|
|||
this.currentAudioStream.connect(this.audioNodeVolume);
|
||||
|
||||
this.state = InputState.RECORDING;
|
||||
this.recalculateFilterStatus(true);
|
||||
this.updateFilterStatus(true);
|
||||
|
||||
return InputStartResult.EOK;
|
||||
} catch(error) {
|
||||
|
@ -329,12 +339,12 @@ class JavascriptInput implements AbstractInput {
|
|||
throw tr("unknown filter type");
|
||||
}
|
||||
|
||||
filter.callback_active_change = () => this.recalculateFilterStatus(false);
|
||||
filter.callback_active_change = () => this.updateFilterStatus(false);
|
||||
filter.callback_enabled_change = () => this.initializeFilters();
|
||||
|
||||
this.registeredFilters.push(filter);
|
||||
this.initializeFilters();
|
||||
this.recalculateFilterStatus(false);
|
||||
this.updateFilterStatus(false);
|
||||
return filter as any;
|
||||
}
|
||||
|
||||
|
@ -356,7 +366,7 @@ class JavascriptInput implements AbstractInput {
|
|||
|
||||
this.registeredFilters = [];
|
||||
this.initializeFilters();
|
||||
this.recalculateFilterStatus(false);
|
||||
this.updateFilterStatus(false);
|
||||
}
|
||||
|
||||
removeFilter(filterInstance: Filter) {
|
||||
|
@ -368,11 +378,24 @@ class JavascriptInput implements AbstractInput {
|
|||
filter.enabled = false;
|
||||
|
||||
this.initializeFilters();
|
||||
this.recalculateFilterStatus(false);
|
||||
this.updateFilterStatus(false);
|
||||
}
|
||||
|
||||
private recalculateFilterStatus(forceUpdate: boolean) {
|
||||
let filtered = this.registeredFilters.filter(e => e.isEnabled()).filter(e => e.active).length > 0;
|
||||
private calculateCurrentFilterStatus() {
|
||||
switch (this.filterMode) {
|
||||
case FilterMode.Block:
|
||||
return true;
|
||||
|
||||
case FilterMode.Bypass:
|
||||
return false;
|
||||
|
||||
case FilterMode.Filter:
|
||||
return this.registeredFilters.filter(e => e.isEnabled()).filter(e => e.active).length > 0;
|
||||
}
|
||||
}
|
||||
|
||||
private updateFilterStatus(forceUpdate: boolean) {
|
||||
let filtered = this.calculateCurrentFilterStatus();
|
||||
if(filtered === this.inputFiltered && !forceUpdate)
|
||||
return;
|
||||
|
||||
|
@ -391,21 +414,25 @@ class JavascriptInput implements AbstractInput {
|
|||
async setConsumer(consumer: InputConsumer) {
|
||||
if(this.consumer) {
|
||||
if(this.consumer.type == InputConsumerType.NODE) {
|
||||
if(this.sourceNode)
|
||||
(this.consumer as NodeInputConsumer).callback_disconnect(this.sourceNode)
|
||||
if(this.sourceNode) {
|
||||
this.consumer.callbackDisconnect(this.sourceNode);
|
||||
}
|
||||
} else if(this.consumer.type === InputConsumerType.CALLBACK) {
|
||||
if(this.sourceNode)
|
||||
if(this.sourceNode) {
|
||||
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(consumer) {
|
||||
if(consumer.type == InputConsumerType.CALLBACK) {
|
||||
if(this.sourceNode)
|
||||
if(this.sourceNode) {
|
||||
this.sourceNode.connect(this.audioNodeCallbackConsumer);
|
||||
}
|
||||
} else if(consumer.type == InputConsumerType.NODE) {
|
||||
if(this.sourceNode)
|
||||
(consumer as NodeInputConsumer).callback_node(this.sourceNode);
|
||||
if(this.sourceNode) {
|
||||
consumer.callbackNode(this.sourceNode);
|
||||
}
|
||||
} else {
|
||||
throw "native callback consumers are not supported!";
|
||||
}
|
||||
|
@ -418,11 +445,11 @@ class JavascriptInput implements AbstractInput {
|
|||
if(this.consumer.type == InputConsumerType.NODE) {
|
||||
const node_consumer = this.consumer as NodeInputConsumer;
|
||||
if(this.sourceNode) {
|
||||
node_consumer.callback_disconnect(this.sourceNode);
|
||||
node_consumer.callbackDisconnect(this.sourceNode);
|
||||
}
|
||||
|
||||
if(newNode) {
|
||||
node_consumer.callback_node(newNode);
|
||||
node_consumer.callbackNode(newNode);
|
||||
}
|
||||
} else if(this.consumer.type == InputConsumerType.CALLBACK) {
|
||||
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
|
||||
|
@ -461,6 +488,20 @@ class JavascriptInput implements AbstractInput {
|
|||
isFiltered(): boolean {
|
||||
return this.state === InputState.RECORDING ? this.inputFiltered : true;
|
||||
}
|
||||
|
||||
getFilterMode(): FilterMode {
|
||||
return this.filterMode;
|
||||
}
|
||||
|
||||
setFilterMode(mode: FilterMode) {
|
||||
if(this.filterMode === mode) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.filterMode = mode;
|
||||
this.updateFilterStatus(false);
|
||||
this.initializeFilters();
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptLevelMeter implements LevelMeter {
|
||||
|
@ -570,11 +611,11 @@ class JavascriptLevelMeter implements LevelMeter {
|
|||
}
|
||||
}
|
||||
|
||||
device(): IDevice {
|
||||
getDevice(): IDevice {
|
||||
return this._device;
|
||||
}
|
||||
|
||||
set_observer(callback: (value: number) => any) {
|
||||
setObserver(callback: (value: number) => any) {
|
||||
this._callback = callback;
|
||||
}
|
||||
|
||||
|
|
|
@ -169,6 +169,10 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
|
|||
}
|
||||
|
||||
private updateGainNode(increaseSilenceCount: boolean) {
|
||||
if(!this.audioNode) {
|
||||
return;
|
||||
}
|
||||
|
||||
let state;
|
||||
if(this.currentLevel > this.threshold) {
|
||||
this.silenceCount = 0;
|
||||
|
@ -204,7 +208,10 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
|
|||
}
|
||||
|
||||
this.paused = flag;
|
||||
this.initializeAnalyzer();
|
||||
|
||||
if(!this.paused) {
|
||||
this.initializeAnalyzer();
|
||||
}
|
||||
}
|
||||
|
||||
registerLevelCallback(callback: (value: number) => void) {
|
||||
|
@ -216,7 +223,7 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
|
|||
}
|
||||
|
||||
private initializeAnalyzer() {
|
||||
if(this.analyzeTask) {
|
||||
if(this.analyzeTask || !this.audioNode) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,292 +1,15 @@
|
|||
import * as aplayer from "../audio/player";
|
||||
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
|
||||
import {LatencySettings, PlayerState, VoiceClient} from "tc-shared/connection/VoiceConnection";
|
||||
import {AudioResampler} from "tc-backend/web/voice/AudioResampler";
|
||||
import {AudioClient} from "tc-backend/web/audio-lib/AudioClient";
|
||||
import {getAudioLibrary} from "tc-backend/web/audio-lib";
|
||||
import {VoicePacket} from "tc-backend/web/voice/bridge/VoiceBridge";
|
||||
import {VoiceClient} from "tc-shared/voice/VoiceClient";
|
||||
import {WebVoicePlayer} from "tc-backend/web/voice/VoicePlayer";
|
||||
|
||||
export class VoiceClientController implements VoiceClient {
|
||||
callback_playback: () => any;
|
||||
callback_state_changed: (new_state: PlayerState) => any;
|
||||
callback_stopped: () => any;
|
||||
client_id: number;
|
||||
export class VoiceClientController extends WebVoicePlayer implements VoiceClient {
|
||||
private readonly clientId: number;
|
||||
|
||||
private speakerContext: AudioContext;
|
||||
private gainNode: GainNode;
|
||||
|
||||
private playerState: PlayerState = PlayerState.STOPPED;
|
||||
|
||||
private currentPlaybackTime: number = 0;
|
||||
private bufferTimeout: number;
|
||||
|
||||
private bufferQueueTime: number = 0;
|
||||
private bufferQueue: AudioBuffer[] = [];
|
||||
private playingNodes: AudioBufferSourceNode[] = [];
|
||||
|
||||
private currentVolume: number = 1;
|
||||
private latencySettings: LatencySettings;
|
||||
|
||||
private audioInitializePromise: Promise<void>;
|
||||
private audioClient: AudioClient;
|
||||
private resampler: AudioResampler;
|
||||
|
||||
constructor(client_id: number) {
|
||||
this.client_id = client_id;
|
||||
this.reset_latency_settings();
|
||||
|
||||
this.resampler = new AudioResampler(48000);
|
||||
aplayer.on_ready(() => {
|
||||
this.speakerContext = aplayer.context();
|
||||
this.gainNode = aplayer.context().createGain();
|
||||
this.gainNode.connect(this.speakerContext.destination);
|
||||
this.gainNode.gain.value = this.currentVolume;
|
||||
});
|
||||
constructor(clientId) {
|
||||
super();
|
||||
this.clientId = clientId;
|
||||
}
|
||||
|
||||
private initializeAudio() : Promise<void> {
|
||||
if(this.audioInitializePromise) {
|
||||
return this.audioInitializePromise;
|
||||
}
|
||||
|
||||
this.audioInitializePromise = (async () => {
|
||||
this.audioClient = await getAudioLibrary().createClient();
|
||||
this.audioClient.callback_decoded = buffer => {
|
||||
this.resampler.resample(buffer).then(buffer => {
|
||||
this.playbackAudioBuffer(buffer);
|
||||
});
|
||||
}
|
||||
this.audioClient.callback_ended = () => {
|
||||
this.stopAudio(false);
|
||||
};
|
||||
})();
|
||||
return this.audioInitializePromise;
|
||||
}
|
||||
|
||||
public enqueuePacket(packet: VoicePacket) {
|
||||
if(!this.audioClient && packet.payload.length === 0) {
|
||||
return;
|
||||
} else {
|
||||
this.initializeAudio().then(() => {
|
||||
if(!this.audioClient) {
|
||||
/* we've already been destroyed */
|
||||
return;
|
||||
}
|
||||
|
||||
this.audioClient.enqueueBuffer(packet.payload, packet.voiceId, packet.codec);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public destroy() {
|
||||
this.audioClient?.destroy();
|
||||
this.audioClient = undefined;
|
||||
}
|
||||
|
||||
playbackAudioBuffer(buffer: AudioBuffer) {
|
||||
if(!buffer) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Got empty or undefined buffer! Dropping it"));
|
||||
return;
|
||||
}
|
||||
|
||||
if(!this.speakerContext) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Failed to replay audio. Global audio context not initialized yet!"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.sampleRate != this.speakerContext.sampleRate) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Source sample rate isn't equal to playback sample rate! (%o | %o)"), buffer.sampleRate, this.speakerContext.sampleRate);
|
||||
}
|
||||
|
||||
if(this.playerState == PlayerState.STOPPED || this.playerState == PlayerState.STOPPING) {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Starting new playback"));
|
||||
this.setPlayerState(PlayerState.PREBUFFERING);
|
||||
}
|
||||
|
||||
if(this.playerState === PlayerState.PREBUFFERING || this.playerState === PlayerState.BUFFERING) {
|
||||
this.resetBufferTimeout(true);
|
||||
this.bufferQueue.push(buffer);
|
||||
this.bufferQueueTime += buffer.duration;
|
||||
if(this.bufferQueueTime <= this.latencySettings.min_buffer / 1000) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* finished buffering */
|
||||
if(this.playerState == PlayerState.PREBUFFERING) {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Prebuffering succeeded (Replaying now)"));
|
||||
if(this.callback_playback) {
|
||||
this.callback_playback();
|
||||
}
|
||||
} else {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Buffering succeeded (Replaying now)"));
|
||||
}
|
||||
|
||||
this.replayBufferQueue();
|
||||
this.setPlayerState(PlayerState.PLAYING);
|
||||
} else if(this.playerState === PlayerState.PLAYING) {
|
||||
const latency = this.getCurrentPlaybackLatency();
|
||||
if(latency > (this.latencySettings.max_buffer / 1000)) {
|
||||
logWarn(LogCategory.VOICE, tr("Dropping replay buffer for client %d because of too high replay latency. (Current: %f, Max: %f)"),
|
||||
this.client_id, latency.toFixed(3), (this.latencySettings.max_buffer / 1000).toFixed(3));
|
||||
return;
|
||||
}
|
||||
this.enqueueBufferForPayback(buffer);
|
||||
} else {
|
||||
logError(LogCategory.AUDIO, tr("This block should be unreachable!"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
getCurrentPlaybackLatency() {
|
||||
return Math.max(this.currentPlaybackTime - this.speakerContext.currentTime, 0);
|
||||
}
|
||||
|
||||
stopAudio(abortPlayback: boolean) {
|
||||
if(abortPlayback) {
|
||||
this.setPlayerState(PlayerState.STOPPED);
|
||||
this.flush();
|
||||
if(this.callback_stopped) {
|
||||
this.callback_stopped();
|
||||
}
|
||||
} else {
|
||||
this.setPlayerState(PlayerState.STOPPING);
|
||||
|
||||
/* replay all pending buffers */
|
||||
this.replayBufferQueue();
|
||||
|
||||
/* test if there are any buffers which are currently played, if not the state will change to stopped */
|
||||
this.testReplayState();
|
||||
}
|
||||
}
|
||||
|
||||
private replayBufferQueue() {
|
||||
for(const buffer of this.bufferQueue)
|
||||
this.enqueueBufferForPayback(buffer);
|
||||
this.bufferQueue = [];
|
||||
this.bufferQueueTime = 0;
|
||||
}
|
||||
|
||||
private enqueueBufferForPayback(buffer: AudioBuffer) {
|
||||
/* advance the playback time index, we seem to be behind a bit */
|
||||
if(this.currentPlaybackTime < this.speakerContext.currentTime)
|
||||
this.currentPlaybackTime = this.speakerContext.currentTime;
|
||||
|
||||
const player = this.speakerContext.createBufferSource();
|
||||
player.buffer = buffer;
|
||||
|
||||
player.onended = () => this.handleBufferPlaybackEnded(player);
|
||||
this.playingNodes.push(player);
|
||||
|
||||
player.connect(this.gainNode);
|
||||
player.start(this.currentPlaybackTime);
|
||||
|
||||
this.currentPlaybackTime += buffer.duration;
|
||||
}
|
||||
|
||||
private handleBufferPlaybackEnded(node: AudioBufferSourceNode) {
|
||||
this.playingNodes.remove(node);
|
||||
this.testReplayState();
|
||||
}
|
||||
|
||||
private testReplayState() {
|
||||
if(this.bufferQueue.length > 0 || this.playingNodes.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(this.playerState === PlayerState.STOPPING) {
|
||||
/* All buffers have been replayed successfully */
|
||||
this.setPlayerState(PlayerState.STOPPED);
|
||||
if(this.callback_stopped) {
|
||||
this.callback_stopped();
|
||||
}
|
||||
} else if(this.playerState === PlayerState.PLAYING) {
|
||||
logDebug(LogCategory.VOICE, tr("Client %d has a buffer underflow. Changing state to buffering."), this.client_id);
|
||||
this.setPlayerState(PlayerState.BUFFERING);
|
||||
}
|
||||
}
|
||||
|
||||
/***
|
||||
* Schedule a new buffer timeout.
|
||||
* The buffer timeout is used to playback even small amounts of audio, which are less than the min. buffer size.
|
||||
* @param scheduleNewTimeout
|
||||
* @private
|
||||
*/
|
||||
private resetBufferTimeout(scheduleNewTimeout: boolean) {
|
||||
clearTimeout(this.bufferTimeout);
|
||||
|
||||
if(scheduleNewTimeout) {
|
||||
this.bufferTimeout = setTimeout(() => {
|
||||
if(this.playerState == PlayerState.PREBUFFERING || this.playerState == PlayerState.BUFFERING) {
|
||||
logWarn(LogCategory.VOICE, tr("[Audio] Buffering exceeded timeout. Flushing and stopping replay."));
|
||||
this.stopAudio(false);
|
||||
}
|
||||
this.bufferTimeout = undefined;
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
private setPlayerState(state: PlayerState) {
|
||||
if(this.playerState === state) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.playerState = state;
|
||||
if(this.callback_state_changed) {
|
||||
this.callback_state_changed(this.playerState);
|
||||
}
|
||||
}
|
||||
|
||||
get_state(): PlayerState {
|
||||
return this.playerState;
|
||||
}
|
||||
|
||||
get_volume(): number {
|
||||
return this.currentVolume;
|
||||
}
|
||||
|
||||
set_volume(volume: number): void {
|
||||
if(this.currentVolume == volume)
|
||||
return;
|
||||
|
||||
this.currentVolume = volume;
|
||||
if(this.gainNode) {
|
||||
this.gainNode.gain.value = volume;
|
||||
}
|
||||
}
|
||||
|
||||
abort_replay() {
|
||||
this.stopAudio(true);
|
||||
}
|
||||
|
||||
support_flush(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.bufferQueue = [];
|
||||
this.bufferQueueTime = 0;
|
||||
|
||||
for(const entry of this.playingNodes) {
|
||||
entry.stop(0);
|
||||
}
|
||||
this.playingNodes = [];
|
||||
}
|
||||
|
||||
latency_settings(settings?: LatencySettings): LatencySettings {
|
||||
if(typeof settings !== "undefined") {
|
||||
this.latencySettings = settings;
|
||||
}
|
||||
return this.latencySettings;
|
||||
}
|
||||
|
||||
reset_latency_settings() {
|
||||
this.latencySettings = {
|
||||
min_buffer: 60,
|
||||
max_buffer: 400
|
||||
};
|
||||
}
|
||||
|
||||
support_latency_settings(): boolean {
|
||||
return true;
|
||||
getClientId(): number {
|
||||
return this.clientId;
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logDebug, logInfo, logWarn} from "tc-shared/log";
|
||||
import {LogCategory, logDebug, logError, logInfo, logTrace, logWarn} from "tc-shared/log";
|
||||
import * as aplayer from "../audio/player";
|
||||
import {ServerConnection} from "../connection/ServerConnection";
|
||||
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||
|
@ -8,7 +8,6 @@ import {settings, ValuedSettingsKey} from "tc-shared/settings";
|
|||
import {tr} from "tc-shared/i18n/localize";
|
||||
import {
|
||||
AbstractVoiceConnection,
|
||||
VoiceClient,
|
||||
VoiceConnectionStatus,
|
||||
WhisperSessionInitializer
|
||||
} from "tc-shared/connection/VoiceConnection";
|
||||
|
@ -18,7 +17,14 @@ import {ConnectionState} from "tc-shared/ConnectionHandler";
|
|||
import {VoiceBridge, VoicePacket, VoiceWhisperPacket} from "./bridge/VoiceBridge";
|
||||
import {NativeWebRTCVoiceBridge} from "./bridge/NativeWebRTCVoiceBridge";
|
||||
import {EventType} from "tc-shared/ui/frames/log/Definitions";
|
||||
import {kUnknownWhisperClientUniqueId, WhisperSession} from "tc-shared/voice/Whisper";
|
||||
import {
|
||||
kUnknownWhisperClientUniqueId,
|
||||
WhisperSession,
|
||||
WhisperSessionState,
|
||||
WhisperTarget
|
||||
} from "tc-shared/voice/VoiceWhisper";
|
||||
import {VoiceClient} from "tc-shared/voice/VoiceClient";
|
||||
import {WebWhisperSession} from "tc-backend/web/voice/VoiceWhisper";
|
||||
|
||||
export enum VoiceEncodeType {
|
||||
JS_ENCODE,
|
||||
|
@ -31,6 +37,8 @@ const KEY_VOICE_CONNECTION_TYPE: ValuedSettingsKey<number> = {
|
|||
defaultValue: VoiceEncodeType.NATIVE_ENCODE
|
||||
};
|
||||
|
||||
type CancelableWhisperTarget = WhisperTarget & { canceled: boolean };
|
||||
|
||||
export class VoiceConnection extends AbstractVoiceConnection {
|
||||
readonly connection: ServerConnection;
|
||||
|
||||
|
@ -45,10 +53,13 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
private awaitingAudioInitialize = false;
|
||||
|
||||
private currentAudioSource: RecorderProfile;
|
||||
private voiceClients: VoiceClientController[] = [];
|
||||
private voiceClients: {[key: number]: VoiceClientController} = {};
|
||||
|
||||
private whisperSessionInitializer: WhisperSessionInitializer;
|
||||
private whisperSessions: {[key: number]: WhisperSession} = {};
|
||||
private whisperSessions: {[key: number]: WebWhisperSession} = {};
|
||||
|
||||
private whisperTarget: CancelableWhisperTarget | undefined;
|
||||
private whisperTargetInitialize: Promise<void>;
|
||||
|
||||
private voiceBridge: VoiceBridge;
|
||||
|
||||
|
@ -78,11 +89,8 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
this.acquireVoiceRecorder(undefined, true).catch(error => {
|
||||
log.warn(LogCategory.VOICE, tr("Failed to release voice recorder: %o"), error);
|
||||
}).then(() => {
|
||||
for(const client of this.voiceClients) {
|
||||
client.abort_replay();
|
||||
client.callback_playback = undefined;
|
||||
client.callback_state_changed = undefined;
|
||||
client.callback_stopped = undefined;
|
||||
for(const client of Object.values(this.voiceClients)) {
|
||||
client.abortReplay();
|
||||
}
|
||||
this.voiceClients = undefined;
|
||||
this.currentAudioSource = undefined;
|
||||
|
@ -229,13 +237,13 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
if(chandler.isSpeakerMuted() || chandler.isSpeakerDisabled()) /* we dont need to do anything with sound playback when we're not listening to it */
|
||||
return;
|
||||
|
||||
let client = this.find_client(packet.clientId);
|
||||
let client = this.findVoiceClient(packet.clientId);
|
||||
if(!client) {
|
||||
log.error(LogCategory.VOICE, tr("Having voice from unknown audio client? (ClientID: %o)"), packet.clientId);
|
||||
return;
|
||||
}
|
||||
|
||||
client.enqueuePacket(packet);
|
||||
client.enqueueAudioPacket(packet.voiceId, packet.codec, packet.head, packet.payload);
|
||||
}
|
||||
|
||||
private handleRecorderStop() {
|
||||
|
@ -296,29 +304,29 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
return this.currentAudioSource;
|
||||
}
|
||||
|
||||
availableClients(): VoiceClient[] {
|
||||
return this.voiceClients;
|
||||
availableVoiceClients(): VoiceClient[] {
|
||||
return Object.values(this.voiceClients);
|
||||
}
|
||||
|
||||
find_client(client_id: number) : VoiceClientController | undefined {
|
||||
for(const client of this.voiceClients)
|
||||
if(client.client_id === client_id)
|
||||
return client;
|
||||
return undefined;
|
||||
findVoiceClient(clientId: number) : VoiceClientController | undefined {
|
||||
return this.voiceClients[clientId];
|
||||
}
|
||||
|
||||
unregister_client(client: VoiceClient): Promise<void> {
|
||||
unregisterVoiceClient(client: VoiceClient) {
|
||||
if(!(client instanceof VoiceClientController))
|
||||
throw "Invalid client type";
|
||||
|
||||
delete this.voiceClients[client.getClientId()];
|
||||
client.destroy();
|
||||
this.voiceClients.remove(client);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
registerClient(client_id: number): VoiceClient {
|
||||
const client = new VoiceClientController(client_id);
|
||||
this.voiceClients.push(client);
|
||||
registerVoiceClient(clientId: number): VoiceClient {
|
||||
if(typeof this.voiceClients[clientId] !== "undefined") {
|
||||
throw tr("voice client already registered");
|
||||
}
|
||||
|
||||
const client = new VoiceClientController(clientId);
|
||||
this.voiceClients[clientId] = client;
|
||||
return client;
|
||||
}
|
||||
|
||||
|
@ -339,7 +347,36 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
}
|
||||
|
||||
protected handleWhisperPacket(packet: VoiceWhisperPacket) {
|
||||
console.error("Received voice whisper packet: %o", packet);
|
||||
const clientId = packet.clientId;
|
||||
|
||||
let session = this.whisperSessions[clientId];
|
||||
if(typeof session !== "object") {
|
||||
logDebug(LogCategory.VOICE, tr("Received new whisper from %d (%s)"), packet.clientId, packet.clientNickname);
|
||||
session = (this.whisperSessions[clientId] = new WebWhisperSession(packet));
|
||||
this.whisperSessionInitializer(session).then(result => {
|
||||
session.initializeFromData(result).then(() => {
|
||||
if(this.whisperSessions[clientId] !== session) {
|
||||
/* seems to be an old session */
|
||||
return;
|
||||
}
|
||||
this.events.fire("notify_whisper_initialized", { session });
|
||||
}).catch(error => {
|
||||
logError(LogCategory.VOICE, tr("Failed to internally initialize a voice whisper session: %o"), error);
|
||||
session.setSessionState(WhisperSessionState.INITIALIZE_FAILED);
|
||||
});
|
||||
}).catch(error => {
|
||||
logError(LogCategory.VOICE, tr("Failed to initialize whisper session: %o."), error);
|
||||
session.initializeFailed();
|
||||
});
|
||||
|
||||
session.events.on("notify_timed_out", () => {
|
||||
logTrace(LogCategory.VOICE, tr("Whisper session %d timed out. Dropping session."), session.getClientId());
|
||||
this.dropWhisperSession(session);
|
||||
});
|
||||
this.events.fire("notify_whisper_created", { session: session });
|
||||
}
|
||||
|
||||
session.enqueueWhisperPacket(packet);
|
||||
}
|
||||
|
||||
getWhisperSessions(): WhisperSession[] {
|
||||
|
@ -347,7 +384,12 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
}
|
||||
|
||||
dropWhisperSession(session: WhisperSession) {
|
||||
throw "this is currently not supported";
|
||||
if(!(session instanceof WebWhisperSession)) {
|
||||
throw tr("Session isn't an instance of the web whisper system");
|
||||
}
|
||||
|
||||
delete this.whisperSessions[session.getClientId()];
|
||||
session.destroy();
|
||||
}
|
||||
|
||||
setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined) {
|
||||
|
@ -371,6 +413,57 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
|||
getWhisperSessionInitializer(): WhisperSessionInitializer | undefined {
|
||||
return this.whisperSessionInitializer;
|
||||
}
|
||||
|
||||
async startWhisper(target: WhisperTarget): Promise<void> {
|
||||
while(this.whisperTargetInitialize) {
|
||||
this.whisperTarget.canceled = true;
|
||||
await this.whisperTargetInitialize;
|
||||
}
|
||||
|
||||
this.whisperTarget = Object.assign({ canceled: false }, target);
|
||||
try {
|
||||
await (this.whisperTargetInitialize = this.doStartWhisper(this.whisperTarget));
|
||||
} finally {
|
||||
this.whisperTargetInitialize = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
private async doStartWhisper(target: CancelableWhisperTarget) {
|
||||
if(target.target === "echo") {
|
||||
await this.connection.send_command("setwhispertarget", {
|
||||
type: 0x10, /* self */
|
||||
target: 0,
|
||||
id: 0
|
||||
}, { flagset: ["new"] });
|
||||
} else if(target.target === "channel-clients") {
|
||||
throw "target not yet supported";
|
||||
} else if(target.target === "groups") {
|
||||
throw "target not yet supported";
|
||||
} else {
|
||||
throw "target not yet supported";
|
||||
}
|
||||
|
||||
if(target.canceled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.voiceBridge.startWhispering();
|
||||
}
|
||||
|
||||
getWhisperTarget(): WhisperTarget | undefined {
|
||||
return this.whisperTarget;
|
||||
}
|
||||
|
||||
stopWhisper() {
|
||||
if(this.whisperTarget) {
|
||||
this.whisperTarget.canceled = true;
|
||||
this.whisperTargetInitialize = undefined;
|
||||
this.connection.send_command("clearwhispertarget").catch(error => {
|
||||
logWarn(LogCategory.CLIENT, tr("Failed to clear the whisper target: %o"), error);
|
||||
});
|
||||
}
|
||||
this.voiceBridge.stopWhispering();
|
||||
}
|
||||
}
|
||||
|
||||
/* funny fact that typescript dosn't find this */
|
||||
|
|
|
@ -0,0 +1,290 @@
|
|||
import {
|
||||
VoicePlayer,
|
||||
VoicePlayerEvents,
|
||||
VoicePlayerLatencySettings,
|
||||
VoicePlayerState
|
||||
} from "tc-shared/voice/VoicePlayer";
|
||||
import {AudioClient} from "tc-backend/web/audio-lib/AudioClient";
|
||||
import {AudioResampler} from "./AudioResampler";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import * as aplayer from "tc-backend/web/audio/player";
|
||||
import {getAudioLibrary} from "tc-backend/web/audio-lib";
|
||||
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
|
||||
|
||||
const kDefaultLatencySettings = {
|
||||
minBufferTime: 60,
|
||||
maxBufferTime: 400
|
||||
} as VoicePlayerLatencySettings;
|
||||
|
||||
export class WebVoicePlayer implements VoicePlayer {
|
||||
public readonly events: Registry<VoicePlayerEvents>;
|
||||
|
||||
private speakerContext: AudioContext;
|
||||
private gainNode: GainNode;
|
||||
|
||||
private playerState = VoicePlayerState.STOPPED;
|
||||
|
||||
private currentPlaybackTime: number = 0;
|
||||
private bufferTimeout: number;
|
||||
|
||||
private bufferQueueTime: number = 0;
|
||||
private bufferQueue: AudioBuffer[] = [];
|
||||
private playingNodes: AudioBufferSourceNode[] = [];
|
||||
|
||||
private currentVolume: number = 1;
|
||||
private latencySettings: VoicePlayerLatencySettings;
|
||||
|
||||
private audioInitializePromise: Promise<void>;
|
||||
private audioClient: AudioClient;
|
||||
private resampler: AudioResampler;
|
||||
|
||||
constructor() {
|
||||
this.events = new Registry<VoicePlayerEvents>();
|
||||
|
||||
this.resampler = new AudioResampler(48000);
|
||||
aplayer.on_ready(() => {
|
||||
this.speakerContext = aplayer.context();
|
||||
this.gainNode = aplayer.context().createGain();
|
||||
this.gainNode.connect(this.speakerContext.destination);
|
||||
this.gainNode.gain.value = this.currentVolume;
|
||||
this.initializeAudio();
|
||||
});
|
||||
|
||||
this.resetLatencySettings();
|
||||
this.setPlayerState(VoicePlayerState.STOPPED);
|
||||
}
|
||||
|
||||
abortReplay() {
|
||||
this.stopAudio(true);
|
||||
}
|
||||
|
||||
flushBuffer() {
|
||||
this.bufferQueue = [];
|
||||
this.bufferQueueTime = 0;
|
||||
|
||||
for(const entry of this.playingNodes) {
|
||||
entry.stop(0);
|
||||
}
|
||||
this.playingNodes = [];
|
||||
}
|
||||
|
||||
getState(): VoicePlayerState {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
getVolume(): number {
|
||||
return this.currentVolume;
|
||||
}
|
||||
|
||||
setVolume(volume: number) {
|
||||
if(this.currentVolume == volume) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.currentVolume = volume;
|
||||
if(this.gainNode) {
|
||||
this.gainNode.gain.value = volume;
|
||||
}
|
||||
}
|
||||
|
||||
getLatencySettings(): Readonly<VoicePlayerLatencySettings> {
|
||||
return this.latencySettings;
|
||||
}
|
||||
|
||||
setLatencySettings(settings: VoicePlayerLatencySettings) {
|
||||
this.latencySettings = settings
|
||||
}
|
||||
|
||||
resetLatencySettings() {
|
||||
this.latencySettings = kDefaultLatencySettings;
|
||||
}
|
||||
|
||||
enqueueAudioPacket(packetId: number, codec: number, head: boolean, buffer: Uint8Array) {
|
||||
if(!this.audioClient) {
|
||||
return;
|
||||
} else {
|
||||
|
||||
this.initializeAudio().then(() => {
|
||||
if(!this.audioClient) {
|
||||
/* we've already been destroyed */
|
||||
return;
|
||||
}
|
||||
|
||||
this.audioClient.enqueueBuffer(buffer, packetId, codec, head);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.audioClient?.destroy();
|
||||
this.audioClient = undefined;
|
||||
}
|
||||
|
||||
private initializeAudio() : Promise<void> {
|
||||
if(this.audioInitializePromise) {
|
||||
return this.audioInitializePromise;
|
||||
}
|
||||
|
||||
this.audioInitializePromise = (async () => {
|
||||
this.audioClient = await getAudioLibrary().createClient();
|
||||
this.audioClient.callback_decoded = buffer => {
|
||||
this.resampler.resample(buffer).then(buffer => {
|
||||
this.playbackAudioBuffer(buffer);
|
||||
});
|
||||
}
|
||||
this.audioClient.callback_ended = () => {
|
||||
this.stopAudio(false);
|
||||
};
|
||||
})();
|
||||
return this.audioInitializePromise;
|
||||
}
|
||||
|
||||
playbackAudioBuffer(buffer: AudioBuffer) {
|
||||
if(!buffer) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Got empty or undefined buffer! Dropping it"));
|
||||
return;
|
||||
}
|
||||
|
||||
if(!this.speakerContext) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Failed to replay audio. Global audio context not initialized yet!"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.sampleRate != this.speakerContext.sampleRate) {
|
||||
logWarn(LogCategory.VOICE, tr("[AudioController] Source sample rate isn't equal to playback sample rate! (%o | %o)"), buffer.sampleRate, this.speakerContext.sampleRate);
|
||||
}
|
||||
|
||||
if(this.playerState == VoicePlayerState.STOPPED || this.playerState == VoicePlayerState.STOPPING) {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Starting new playback"));
|
||||
this.setPlayerState(VoicePlayerState.PREBUFFERING);
|
||||
}
|
||||
|
||||
if(this.playerState === VoicePlayerState.PREBUFFERING || this.playerState === VoicePlayerState.BUFFERING) {
|
||||
this.resetBufferTimeout(true);
|
||||
this.bufferQueue.push(buffer);
|
||||
this.bufferQueueTime += buffer.duration;
|
||||
if(this.bufferQueueTime <= this.latencySettings.minBufferTime / 1000) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* finished buffering */
|
||||
if(this.playerState == VoicePlayerState.PREBUFFERING) {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Prebuffering succeeded (Replaying now)"));
|
||||
} else {
|
||||
logDebug(LogCategory.VOICE, tr("[Audio] Buffering succeeded (Replaying now)"));
|
||||
}
|
||||
|
||||
this.gainNode.gain.value = 0;
|
||||
this.gainNode.gain.linearRampToValueAtTime(this.currentVolume, this.speakerContext.currentTime + .1);
|
||||
|
||||
this.replayBufferQueue();
|
||||
this.setPlayerState(VoicePlayerState.PLAYING);
|
||||
} else if(this.playerState === VoicePlayerState.PLAYING) {
|
||||
const latency = this.getCurrentPlaybackLatency();
|
||||
if(latency > (this.latencySettings.maxBufferTime / 1000)) {
|
||||
logWarn(LogCategory.VOICE, tr("Dropping replay buffer because of too high replay latency. (Current: %f, Max: %f)"),
|
||||
latency.toFixed(3), (this.latencySettings.maxBufferTime / 1000).toFixed(3));
|
||||
return;
|
||||
}
|
||||
this.enqueueBufferForPayback(buffer);
|
||||
} else {
|
||||
logError(LogCategory.AUDIO, tr("This block should be unreachable!"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
getCurrentPlaybackLatency() {
|
||||
return Math.max(this.currentPlaybackTime - this.speakerContext.currentTime, 0);
|
||||
}
|
||||
|
||||
stopAudio(abortPlayback: boolean) {
|
||||
if(abortPlayback) {
|
||||
this.setPlayerState(VoicePlayerState.STOPPED);
|
||||
this.flushBuffer();
|
||||
} else {
|
||||
this.setPlayerState(VoicePlayerState.STOPPING);
|
||||
|
||||
/* replay all pending buffers */
|
||||
this.replayBufferQueue();
|
||||
|
||||
/* test if there are any buffers which are currently played, if not the state will change to stopped */
|
||||
this.testReplayState();
|
||||
}
|
||||
}
|
||||
|
||||
private replayBufferQueue() {
|
||||
for(const buffer of this.bufferQueue)
|
||||
this.enqueueBufferForPayback(buffer);
|
||||
this.bufferQueue = [];
|
||||
this.bufferQueueTime = 0;
|
||||
}
|
||||
|
||||
private enqueueBufferForPayback(buffer: AudioBuffer) {
|
||||
/* advance the playback time index, we seem to be behind a bit */
|
||||
if(this.currentPlaybackTime < this.speakerContext.currentTime)
|
||||
this.currentPlaybackTime = this.speakerContext.currentTime;
|
||||
|
||||
const player = this.speakerContext.createBufferSource();
|
||||
player.buffer = buffer;
|
||||
|
||||
player.onended = () => this.handleBufferPlaybackEnded(player);
|
||||
this.playingNodes.push(player);
|
||||
|
||||
player.connect(this.gainNode);
|
||||
player.start(this.currentPlaybackTime);
|
||||
|
||||
this.currentPlaybackTime += buffer.duration;
|
||||
}
|
||||
|
||||
private handleBufferPlaybackEnded(node: AudioBufferSourceNode) {
|
||||
this.playingNodes.remove(node);
|
||||
this.testReplayState();
|
||||
}
|
||||
|
||||
private testReplayState() {
|
||||
if(this.bufferQueue.length > 0 || this.playingNodes.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(this.playerState === VoicePlayerState.STOPPING) {
|
||||
/* All buffers have been replayed successfully */
|
||||
this.setPlayerState(VoicePlayerState.STOPPED);
|
||||
} else if(this.playerState === VoicePlayerState.PLAYING) {
|
||||
logDebug(LogCategory.VOICE, tr("Voice player has a buffer underflow. Changing state to buffering."));
|
||||
this.setPlayerState(VoicePlayerState.BUFFERING);
|
||||
}
|
||||
}
|
||||
|
||||
/***
|
||||
* Schedule a new buffer timeout.
|
||||
* The buffer timeout is used to playback even small amounts of audio, which are less than the min. buffer size.
|
||||
* @param scheduleNewTimeout
|
||||
* @private
|
||||
*/
|
||||
private resetBufferTimeout(scheduleNewTimeout: boolean) {
|
||||
clearTimeout(this.bufferTimeout);
|
||||
|
||||
if(scheduleNewTimeout) {
|
||||
this.bufferTimeout = setTimeout(() => {
|
||||
if(this.playerState == VoicePlayerState.PREBUFFERING || this.playerState == VoicePlayerState.BUFFERING) {
|
||||
logWarn(LogCategory.VOICE, tr("[Audio] Buffering exceeded timeout. Flushing and stopping replay."));
|
||||
this.stopAudio(false);
|
||||
}
|
||||
this.bufferTimeout = undefined;
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
private setPlayerState(state: VoicePlayerState) {
|
||||
if(this.playerState === state) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldState = this.playerState;
|
||||
this.playerState = state;
|
||||
this.events.fire("notify_state_changed", {
|
||||
oldState: oldState,
|
||||
newState: state
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,158 @@
|
|||
import {WhisperSession, WhisperSessionEvents, WhisperSessionState} from "tc-shared/voice/VoiceWhisper";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {VoicePlayer, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
|
||||
import {WhisperSessionInitializeData} from "tc-shared/connection/VoiceConnection";
|
||||
import {VoiceWhisperPacket} from "tc-backend/web/voice/bridge/VoiceBridge";
|
||||
import {WebVoicePlayer} from "tc-backend/web/voice/VoicePlayer";
|
||||
|
||||
const kMaxUninitializedBuffers = 10;
|
||||
export class WebWhisperSession implements WhisperSession {
|
||||
readonly events: Registry<WhisperSessionEvents>;
|
||||
private readonly clientId: number;
|
||||
|
||||
private clientName: string;
|
||||
private clientUniqueId: string;
|
||||
|
||||
private sessionState: WhisperSessionState;
|
||||
private sessionBlocked: boolean;
|
||||
|
||||
private sessionTimeout: number;
|
||||
private sessionTimeoutId: number;
|
||||
|
||||
private lastWhisperTimestamp: number;
|
||||
private packetBuffer: VoiceWhisperPacket[] = [];
|
||||
|
||||
private voicePlayer: WebVoicePlayer;
|
||||
|
||||
constructor(initialPacket: VoiceWhisperPacket) {
|
||||
this.events = new Registry<WhisperSessionEvents>();
|
||||
this.clientId = initialPacket.clientId;
|
||||
this.clientName = initialPacket.clientNickname;
|
||||
this.clientUniqueId = initialPacket.clientUniqueId;
|
||||
this.sessionState = WhisperSessionState.INITIALIZING;
|
||||
}
|
||||
|
||||
getClientId(): number {
|
||||
return this.clientId;
|
||||
}
|
||||
|
||||
getClientName(): string | undefined {
|
||||
return this.clientName;
|
||||
}
|
||||
|
||||
getClientUniqueId(): string | undefined {
|
||||
return this.clientUniqueId;
|
||||
}
|
||||
|
||||
getLastWhisperTimestamp(): number {
|
||||
return this.lastWhisperTimestamp;
|
||||
}
|
||||
|
||||
getSessionState(): WhisperSessionState {
|
||||
return this.sessionState;
|
||||
}
|
||||
|
||||
getSessionTimeout(): number {
|
||||
return this.sessionTimeout;
|
||||
}
|
||||
|
||||
getVoicePlayer(): VoicePlayer | undefined {
|
||||
return this.voicePlayer;
|
||||
}
|
||||
|
||||
setSessionTimeout(timeout: number) {
|
||||
this.sessionTimeout = timeout;
|
||||
this.resetSessionTimeout();
|
||||
}
|
||||
|
||||
isBlocked(): boolean {
|
||||
return this.sessionBlocked;
|
||||
}
|
||||
|
||||
setBlocked(blocked: boolean) {
|
||||
this.sessionBlocked = blocked;
|
||||
}
|
||||
|
||||
async initializeFromData(data: WhisperSessionInitializeData) {
|
||||
this.clientName = data.clientName;
|
||||
this.clientUniqueId = data.clientUniqueId;
|
||||
|
||||
this.sessionBlocked = data.blocked;
|
||||
this.sessionTimeout = data.sessionTimeout;
|
||||
|
||||
this.voicePlayer = new WebVoicePlayer();
|
||||
this.voicePlayer.events.on("notify_state_changed", event => {
|
||||
if(event.newState === VoicePlayerState.BUFFERING) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.resetSessionTimeout();
|
||||
if(event.newState === VoicePlayerState.PLAYING || event.newState === VoicePlayerState.STOPPING) {
|
||||
this.setSessionState(WhisperSessionState.PLAYING);
|
||||
} else {
|
||||
this.setSessionState(WhisperSessionState.PAUSED);
|
||||
}
|
||||
});
|
||||
this.setSessionState(WhisperSessionState.PAUSED);
|
||||
}
|
||||
|
||||
initializeFailed() {
|
||||
this.setSessionState(WhisperSessionState.INITIALIZE_FAILED);
|
||||
|
||||
/* if we're receiving nothing for more than 5 seconds we can try it again */
|
||||
this.sessionTimeout = 5000;
|
||||
this.resetSessionTimeout();
|
||||
}
|
||||
|
||||
destroy() {
|
||||
clearTimeout(this.sessionTimeoutId);
|
||||
this.events.destroy();
|
||||
this.voicePlayer?.destroy();
|
||||
this.voicePlayer = undefined;
|
||||
}
|
||||
|
||||
enqueueWhisperPacket(packet: VoiceWhisperPacket) {
|
||||
this.resetSessionTimeout();
|
||||
if(this.sessionBlocked) {
|
||||
/* do nothing, the session has been blocked */
|
||||
return;
|
||||
}
|
||||
|
||||
if(this.sessionState === WhisperSessionState.INITIALIZE_FAILED) {
|
||||
return;
|
||||
} else if(this.sessionState === WhisperSessionState.INITIALIZING) {
|
||||
this.packetBuffer.push(packet);
|
||||
|
||||
while(this.packetBuffer.length > kMaxUninitializedBuffers) {
|
||||
this.packetBuffer.pop_front();
|
||||
}
|
||||
} else {
|
||||
this.voicePlayer?.enqueueAudioPacket(packet.voiceId, packet.codec, packet.head, packet.payload);
|
||||
}
|
||||
}
|
||||
|
||||
setSessionState(state: WhisperSessionState) {
|
||||
if(this.sessionState === state) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldState = this.sessionState;
|
||||
this.sessionState = state;
|
||||
this.events.fire("notify_state_changed", { oldState: oldState, newState: state });
|
||||
}
|
||||
|
||||
private resetSessionTimeout() {
|
||||
clearTimeout(this.sessionTimeoutId);
|
||||
if(this.sessionState === WhisperSessionState.PLAYING) {
|
||||
/* no need to reschedule a session timeout if we're currently playing */
|
||||
return;
|
||||
} else if(this.sessionState === WhisperSessionState.INITIALIZING) {
|
||||
/* we're still initializing; a session timeout hasn't been set */
|
||||
return;
|
||||
}
|
||||
|
||||
this.sessionTimeoutId = setTimeout(() => {
|
||||
this.events.fire("notify_timed_out");
|
||||
}, Math.max(this.sessionTimeout, 1000));
|
||||
}
|
||||
}
|
|
@ -20,15 +20,18 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
return true;
|
||||
}
|
||||
|
||||
private readonly localAudioDestinationNode: MediaStreamAudioDestinationNode;
|
||||
private readonly localVoiceDestinationNode: MediaStreamAudioDestinationNode;
|
||||
private readonly localWhisperDestinationNode: MediaStreamAudioDestinationNode;
|
||||
private currentInputNode: AudioNode;
|
||||
private currentInput: AbstractInput;
|
||||
private voicePacketId: number;
|
||||
private whispering: boolean;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.voicePacketId = 0;
|
||||
this.localAudioDestinationNode = aplayer.context().createMediaStreamDestination();
|
||||
this.whispering = false;
|
||||
this.localVoiceDestinationNode = aplayer.context().createMediaStreamDestination();
|
||||
this.localWhisperDestinationNode = aplayer.context().createMediaStreamDestination();
|
||||
}
|
||||
|
||||
protected generateRtpOfferOptions(): RTCOfferOptions {
|
||||
|
@ -40,7 +43,8 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
}
|
||||
|
||||
protected initializeRtpConnection(connection: RTCPeerConnection) {
|
||||
connection.addStream(this.localAudioDestinationNode.stream);
|
||||
connection.addStream(this.localVoiceDestinationNode.stream);
|
||||
connection.addStream(this.localWhisperDestinationNode.stream);
|
||||
}
|
||||
|
||||
protected handleVoiceDataChannelMessage(message: MessageEvent) {
|
||||
|
@ -55,6 +59,7 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
clientId: clientId,
|
||||
voiceId: packetId,
|
||||
codec: codec,
|
||||
head: false,
|
||||
payload: new Uint8Array(message.data, 5)
|
||||
});
|
||||
}
|
||||
|
@ -67,8 +72,11 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
|
||||
const flags = payload[payload_offset++];
|
||||
|
||||
let packet = {} as VoiceWhisperPacket;
|
||||
if((flags & 0x01) === 1) {
|
||||
let packet = {
|
||||
head: (flags & 0x01) === 1
|
||||
} as VoiceWhisperPacket;
|
||||
|
||||
if(packet.head) {
|
||||
packet.clientUniqueId = arraybuffer_to_string(payload.subarray(payload_offset, payload_offset + 28));
|
||||
payload_offset += 28;
|
||||
|
||||
|
@ -81,8 +89,8 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
packet.clientId = payload[payload_offset] << 8 | payload[payload_offset + 1];
|
||||
payload_offset += 2;
|
||||
|
||||
packet.codec = payload[payload_offset];
|
||||
|
||||
packet.codec = payload[payload_offset++];
|
||||
packet.payload = new Uint8Array(message.data, payload_offset);
|
||||
this.callback_incoming_whisper(packet);
|
||||
}
|
||||
|
||||
|
@ -105,8 +113,14 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
try {
|
||||
await this.currentInput.setConsumer({
|
||||
type: InputConsumerType.NODE,
|
||||
callback_node: node => node.connect(this.localAudioDestinationNode),
|
||||
callback_disconnect: node => node.disconnect(this.localAudioDestinationNode)
|
||||
callbackNode: node => {
|
||||
this.currentInputNode = node;
|
||||
node.connect(this.whispering ? this.localWhisperDestinationNode : this.localVoiceDestinationNode);
|
||||
},
|
||||
callbackDisconnect: node => {
|
||||
this.currentInputNode = undefined;
|
||||
node.disconnect(this.whispering ? this.localWhisperDestinationNode : this.localVoiceDestinationNode);
|
||||
}
|
||||
} as NodeInputConsumer);
|
||||
log.debug(LogCategory.VOICE, tr("Successfully set/updated to the new input for the recorder"));
|
||||
} catch (e) {
|
||||
|
@ -115,29 +129,34 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
|||
}
|
||||
}
|
||||
|
||||
private fillVoicePacketHeader(packet: Uint8Array, codec: number) {
|
||||
packet[0] = 0; //Flag header
|
||||
packet[1] = 0; //Flag fragmented
|
||||
packet[2] = (this.voicePacketId >> 8) & 0xFF; //HIGHT (voiceID)
|
||||
packet[3] = (this.voicePacketId >> 0) & 0xFF; //LOW (voiceID)
|
||||
packet[4] = codec; //Codec
|
||||
this.voicePacketId++;
|
||||
}
|
||||
|
||||
sendStopSignal(codec: number) {
|
||||
const packet = new Uint8Array(5);
|
||||
this.fillVoicePacketHeader(packet, codec);
|
||||
/*
|
||||
* No stop signal needs to be send.
|
||||
* The server will automatically send one, when the stream contains silence.
|
||||
*/
|
||||
}
|
||||
|
||||
const channel = this.getMainDataChannel();
|
||||
if (!channel || channel.readyState !== "open")
|
||||
startWhispering() {
|
||||
if(this.whispering) {
|
||||
return;
|
||||
}
|
||||
|
||||
channel.send(packet);
|
||||
this.whispering = true;
|
||||
if(this.currentInputNode) {
|
||||
this.currentInputNode.disconnect(this.localVoiceDestinationNode);
|
||||
this.currentInputNode.connect(this.localWhisperDestinationNode);
|
||||
}
|
||||
}
|
||||
|
||||
startWhisper() {
|
||||
}
|
||||
stopWhispering() {
|
||||
if(!this.whispering) {
|
||||
return;
|
||||
}
|
||||
|
||||
stopWhisper() {
|
||||
this.whispering = false;
|
||||
if(this.currentInputNode) {
|
||||
this.currentInputNode.connect(this.localVoiceDestinationNode);
|
||||
this.currentInputNode.disconnect(this.localWhisperDestinationNode);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,6 +14,8 @@ export interface VoicePacket {
|
|||
voiceId: number;
|
||||
clientId: number;
|
||||
codec: number;
|
||||
|
||||
head: boolean;
|
||||
payload: Uint8Array;
|
||||
}
|
||||
|
||||
|
@ -48,4 +50,7 @@ export abstract class VoiceBridge {
|
|||
abstract setInput(input: AbstractInput | undefined): Promise<void>;
|
||||
|
||||
abstract sendStopSignal(codec: number);
|
||||
|
||||
abstract startWhispering();
|
||||
abstract stopWhispering();
|
||||
}
|
|
@ -63,7 +63,7 @@ impl Add<u16> for PacketId {
|
|||
type Output = PacketId;
|
||||
|
||||
fn add(self, rhs: u16) -> Self::Output {
|
||||
PacketId{ packet_id: self.packet_id.wrapping_add(rhs) }
|
||||
PacketId::new(self.packet_id.wrapping_add(rhs))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ impl Sub<u16> for PacketId {
|
|||
type Output = PacketId;
|
||||
|
||||
fn sub(self, rhs: u16) -> Self::Output {
|
||||
PacketId{ packet_id: self.packet_id.wrapping_sub(rhs) }
|
||||
PacketId::new(self.packet_id.wrapping_sub(rhs))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use crate::audio::{AudioPacket, Codec};
|
||||
use crate::audio::codec::opus::{Application, Decoder, Channels};
|
||||
use std::cell::Cell;
|
||||
use crate::audio::codec::opus::{Channels};
|
||||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Formatter;
|
||||
|
@ -72,7 +71,7 @@ impl AudioDecoder {
|
|||
}
|
||||
|
||||
fn get_decoder(&mut self, codec: Codec, initialize: bool) -> Result<Rc<RefCell<dyn AudioCodecDecoder>>, AudioDecodeError> {
|
||||
let mut decoder_state = self.decoder_state(codec)?;
|
||||
let decoder_state = self.decoder_state(codec)?;
|
||||
|
||||
match decoder_state {
|
||||
DecoderState::Initialized(decoder) => {
|
||||
|
@ -86,7 +85,7 @@ impl AudioDecoder {
|
|||
return Err(AudioDecodeError::DecoderUninitialized);
|
||||
}
|
||||
|
||||
let mut decoder: Option<Rc<RefCell<dyn AudioCodecDecoder>>> = None;
|
||||
let decoder: Option<Rc<RefCell<dyn AudioCodecDecoder>>>;
|
||||
match codec {
|
||||
Codec::Opus => {
|
||||
decoder = Some(Rc::new(RefCell::new(decoder::AudioOpusDecoder::new(Channels::Mono))));
|
||||
|
@ -99,7 +98,7 @@ impl AudioDecoder {
|
|||
}
|
||||
}
|
||||
|
||||
let mut decoder = decoder.unwrap();
|
||||
let decoder = decoder.unwrap();
|
||||
if let Err(error) = decoder.borrow_mut().initialize() {
|
||||
*decoder_state = DecoderState::InitializeFailed(error.clone());
|
||||
return Err(AudioDecodeError::DecoderInitializeFailed(error, true));
|
||||
|
@ -111,13 +110,8 @@ impl AudioDecoder {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn initialize_codec(&mut self, codec: Codec) -> Result<(), AudioDecodeError> {
|
||||
let _ = self.get_decoder(codec, true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn decode(&mut self, packet: &AudioPacket, dest: &mut Vec<f32>) -> Result<(usize /* samples */, u8 /* channels */), AudioDecodeError> {
|
||||
let mut audio_decoder = self.get_decoder(packet.codec, true)?;
|
||||
let audio_decoder = self.get_decoder(packet.codec, true)?;
|
||||
let mut audio_decoder = audio_decoder.borrow_mut();
|
||||
|
||||
let result = audio_decoder.decode(&packet.payload, dest)?;
|
||||
|
@ -149,7 +143,7 @@ trait AudioCodecDecoder {
|
|||
|
||||
mod decoder {
|
||||
/* the opus implementation */
|
||||
use crate::audio::codec::opus::{Application, Decoder, Channels, ErrorCode};
|
||||
use crate::audio::codec::opus::{Decoder, Channels, ErrorCode};
|
||||
use crate::audio::decoder::{AudioCodecDecoder, AudioDecodeError};
|
||||
use log::warn;
|
||||
|
||||
|
@ -234,6 +228,7 @@ mod decoder {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::audio::decoder::{AudioDecoder, AudioDecodeError};
|
||||
use crate::audio::{AudioPacket, PacketId, Codec};
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::collections::VecDeque;
|
|||
use std::ops::{ Deref };
|
||||
use std::time::{SystemTime, Duration, UNIX_EPOCH};
|
||||
use futures::{FutureExt};
|
||||
use crate::audio::{AudioPacket, Codec, PacketId};
|
||||
use crate::audio::{AudioPacket, PacketId};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AudioPacketQueueEvent {
|
||||
|
@ -127,21 +127,40 @@ impl AudioPacketQueue {
|
|||
instance
|
||||
}
|
||||
|
||||
fn test_sequence(&self, packet: &Box<AudioPacket>) -> Result<(), EnqueueError> {
|
||||
if !self.last_packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
|
||||
return Err(EnqueueError::PacketTooOld);
|
||||
} else if self.last_packet_id.difference(&packet.packet_id, Some(self.clipping_window)) > 20 {
|
||||
return Err(EnqueueError::PacketSequenceMismatch(self.last_packet_id.clone()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn initialize_sequence(&mut self, packet: &Box<AudioPacket>) {
|
||||
self.reset_sequence(false);
|
||||
self.last_packet_timestamp = current_time_millis();
|
||||
self.last_packet_id = packet.packet_id - 1; /* reduce the last packet id by one so this packet is the next packet */
|
||||
}
|
||||
|
||||
/// Enqueue a new audio packet
|
||||
pub fn enqueue_packet(&mut self, packet: Box<AudioPacket>) -> Result<(), EnqueueError> {
|
||||
pub fn enqueue_packet(&mut self, packet: Box<AudioPacket>, is_head_packet: bool) -> Result<(), EnqueueError> {
|
||||
let current_time = current_time_millis();
|
||||
|
||||
/* check if we're expecting a sequence */
|
||||
if current_time - self.last_packet_timestamp < 1000 {
|
||||
if !self.last_packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
|
||||
return Err(EnqueueError::PacketTooOld);
|
||||
} else if self.last_packet_id.difference(&packet.packet_id, Some(self.clipping_window)) > 20 {
|
||||
return Err(EnqueueError::PacketSequenceMismatch(self.last_packet_id.clone()));
|
||||
let sequence_result = self.test_sequence(&packet);
|
||||
if let Err(error) = sequence_result {
|
||||
if !is_head_packet {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
/* enforce a new sequence */
|
||||
self.initialize_sequence(&packet);
|
||||
}
|
||||
} else {
|
||||
/* we've a new sequence */
|
||||
self.last_packet_timestamp = current_time;
|
||||
self.last_packet_id = packet.packet_id - 1; /* reduce the last packet id by one so this packet is the next packet */
|
||||
self.initialize_sequence(&packet);
|
||||
}
|
||||
|
||||
let mut index = 0;
|
||||
|
@ -380,7 +399,7 @@ mod tests {
|
|||
client_id: 0,
|
||||
codec: Codec::Opus,
|
||||
payload: vec![]
|
||||
}))
|
||||
}), false)
|
||||
}
|
||||
|
||||
fn darin_queued_events(queue: &mut AudioPacketQueue, _expect_events: bool) {
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
use wasm_bindgen::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{ Arc, Mutex, MutexGuard };
|
||||
use std::sync::{ Arc, Mutex };
|
||||
use std::sync::atomic::{ AtomicU32, Ordering };
|
||||
use std::cell::RefCell;
|
||||
use once_cell::sync::Lazy;
|
||||
use crate::audio::packet_queue::{AudioPacketQueue, AudioPacketQueueEvent, EnqueueError};
|
||||
use futures::task::Context;
|
||||
use futures;
|
||||
use crate::audio::decoder::{AudioDecoder, AudioDecodeError};
|
||||
use crate::audio::decoder::{AudioDecoder};
|
||||
use wasm_bindgen_futures::spawn_local;
|
||||
use futures::future::{ poll_fn };
|
||||
use crate::audio::{AudioPacket, Codec};
|
||||
use crate::audio::{AudioPacket};
|
||||
use log::*;
|
||||
use crate::audio::converter::interleaved2sequenced;
|
||||
|
||||
pub type AudioClientId = u32;
|
||||
|
||||
|
@ -24,11 +20,6 @@ pub trait AudioCallback {
|
|||
fn handle_stop(&mut self);
|
||||
}
|
||||
|
||||
struct CallbackData {
|
||||
callback: Option<js_sys::Function>,
|
||||
buffer: Vec<f32>
|
||||
}
|
||||
|
||||
pub struct AudioClient {
|
||||
pub client_id: AudioClientId,
|
||||
|
||||
|
@ -68,12 +59,8 @@ impl AudioClient {
|
|||
self.abort_audio_processing();
|
||||
}
|
||||
|
||||
pub fn client_id(&self) -> AudioClientId {
|
||||
self.client_id
|
||||
}
|
||||
|
||||
pub fn enqueue_audio_packet(&self, packet: Box<AudioPacket>) -> Result<(), EnqueueError> {
|
||||
self.packet_queue.lock().unwrap().enqueue_packet(packet)?;
|
||||
pub fn enqueue_audio_packet(&self, packet: Box<AudioPacket>, is_head_packet: bool) -> Result<(), EnqueueError> {
|
||||
self.packet_queue.lock().unwrap().enqueue_packet(packet, is_head_packet)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -82,17 +69,13 @@ impl AudioClient {
|
|||
}
|
||||
|
||||
pub fn abort_audio_processing(&self) {
|
||||
let mut handle = &mut *self.audio_process_abort_handle.lock().unwrap();
|
||||
let handle = &mut *self.audio_process_abort_handle.lock().unwrap();
|
||||
if let Some(ref abort_handle) = handle {
|
||||
abort_handle.abort()
|
||||
}
|
||||
*handle = None;
|
||||
}
|
||||
|
||||
pub fn is_audio_processing(&self) -> bool {
|
||||
self.audio_process_abort_handle.lock().unwrap().is_some()
|
||||
}
|
||||
|
||||
pub fn dispatch_processing_in_this_thread(client: Arc<AudioClient>) {
|
||||
let client_copy = client.clone();
|
||||
let (future, abort_handle) = futures::future::abortable(async move {
|
||||
|
@ -119,7 +102,7 @@ impl AudioClient {
|
|||
break;
|
||||
}
|
||||
|
||||
let mut callback = callback.as_mut().unwrap();
|
||||
let callback = callback.as_mut().unwrap();
|
||||
let callback_buffer = callback.callback_buffer();
|
||||
|
||||
let decode_result = client.decoder.lock().unwrap().decode(&*packet, callback_buffer);
|
||||
|
|
|
@ -8,21 +8,16 @@ mod audio;
|
|||
mod audio_client;
|
||||
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_futures::{ spawn_local };
|
||||
|
||||
use js_sys;
|
||||
use wasm_timer;
|
||||
|
||||
use std::time::Duration;
|
||||
use log::*;
|
||||
use audio::packet_queue::AudioPacketQueue;
|
||||
use crate::audio::codec::opus;
|
||||
use crate::audio_client::{AudioClientId, AudioClient, AudioCallback};
|
||||
use crate::audio::{AudioPacket, Codec, PacketId};
|
||||
use crate::audio::packet_queue::EnqueueError;
|
||||
use crate::audio::converter::interleaved2sequenced;
|
||||
use once_cell::unsync::Lazy;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
extern crate simple_logger;
|
||||
|
@ -60,14 +55,14 @@ pub fn audio_client_create() -> AudioClientId {
|
|||
/// Let the audio client say hi (mutable).
|
||||
/// If an error occurs or the client isn't known an exception will be thrown.
|
||||
#[wasm_bindgen]
|
||||
pub fn audio_client_enqueue_buffer(client_id: AudioClientId, buffer: &[u8], packet_id: u16, codec: u8) -> Result<(), JsValue> {
|
||||
pub fn audio_client_enqueue_buffer(client_id: AudioClientId, buffer: &[u8], packet_id: u16, codec: u8, is_head_packet: bool) -> Result<(), JsValue> {
|
||||
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
|
||||
let result = client.enqueue_audio_packet(Box::new(AudioPacket{
|
||||
client_id: 0,
|
||||
codec: Codec::from_u8(codec),
|
||||
packet_id: PacketId{ packet_id },
|
||||
payload: buffer.to_vec()
|
||||
}));
|
||||
}), is_head_packet);
|
||||
if let Err(error) = result {
|
||||
return Err(match error {
|
||||
EnqueueError::PacketAlreadyExists => JsValue::from_str("packet already exists"),
|
||||
|
@ -94,7 +89,7 @@ impl AudioCallback for JsAudioCallback {
|
|||
|
||||
fn handle_audio(&mut self, sample_count: usize, channel_count: u8) {
|
||||
if channel_count > 1 {
|
||||
let mut sequenced_buffer = unsafe { &mut *AUDIO_SEQUENCED_BUFFER };
|
||||
let sequenced_buffer = unsafe { &mut *AUDIO_SEQUENCED_BUFFER };
|
||||
sequenced_buffer.resize(sample_count * channel_count as usize, 0f32);
|
||||
interleaved2sequenced(
|
||||
unsafe { &mut *AUDIO_BUFFER }.as_slice(),
|
||||
|
|
Loading…
Reference in New Issue