Starting with the whisper system

This commit is contained in:
WolverinDEV 2020-08-26 12:33:53 +02:00
parent f80c7c4e50
commit a98503285f
12 changed files with 306 additions and 115 deletions

View file

@ -8,7 +8,7 @@ import {LocalClientEntry} from "tc-shared/ui/client";
import {ConnectionProfile} from "tc-shared/profiles/ConnectionProfile";
import {ServerAddress} from "tc-shared/ui/server";
import * as log from "tc-shared/log";
import {LogCategory, logError} from "tc-shared/log";
import {LogCategory, logError, logInfo} from "tc-shared/log";
import {createErrorModal, createInfoModal, createInputModal, Modal} from "tc-shared/ui/elements/Modal";
import {hashPassword} from "tc-shared/utils/helpers";
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
@ -35,8 +35,9 @@ import {ServerEventLog} from "tc-shared/ui/frames/log/ServerEventLog";
import {EventType} from "tc-shared/ui/frames/log/Definitions";
import {PluginCmdRegistry} from "tc-shared/connection/PluginCmdHandler";
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
import {VoiceConnectionStatus, WhisperSessionInitializeData} from "tc-shared/connection/VoiceConnection";
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
import {WhisperSession} from "tc-shared/voice/Whisper";
export enum InputHardwareState {
MISSING,
@ -199,6 +200,8 @@ export class ConnectionHandler {
});
this.serverConnection.getVoiceConnection().events.on("notify_connection_status_changed", () => this.update_voice_status());
this.serverConnection.getVoiceConnection().setWhisperSessionInitializer(this.initializeWhisperSession.bind(this));
this.channelTree = new ChannelTree(this);
this.fileManager = new FileManager(this);
this.permissions = new PermissionManager(this);
@ -701,8 +704,8 @@ export class ConnectionHandler {
const vconnection = this.serverConnection.getVoiceConnection();
const codecEncodeSupported = !targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec);
const codecDecodeSupported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
const codecEncodeSupported = !targetChannel || vconnection.encodingSupported(targetChannel.properties.channel_codec);
const codecDecodeSupported = !targetChannel || vconnection.decodingSupported(targetChannel.properties.channel_codec);
const property_update = {
client_input_muted: this.client_status.input_muted,
@ -711,7 +714,7 @@ export class ConnectionHandler {
/* update the encoding codec */
if(codecEncodeSupported && targetChannel) {
vconnection.set_encoder_codec(targetChannel.properties.channel_codec);
vconnection.setEncoderCodec(targetChannel.properties.channel_codec);
}
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
@ -720,10 +723,10 @@ export class ConnectionHandler {
} else {
const recording_supported =
this.getInputHardwareState() === InputHardwareState.VALID &&
(!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec)) &&
(!targetChannel || vconnection.encodingSupported(targetChannel.properties.channel_codec)) &&
vconnection.getConnectionState() === VoiceConnectionStatus.Connected;
const playback_supported = this.hasOutputHardware() && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
const playback_supported = this.hasOutputHardware() && (!targetChannel || vconnection.decodingSupported(targetChannel.properties.channel_codec));
property_update["client_input_hardware"] = recording_supported;
property_update["client_output_hardware"] = playback_supported;
@ -778,7 +781,7 @@ export class ConnectionHandler {
const enableRecording = !this.client_status.input_muted && !this.client_status.output_muted;
/* No need to start the microphone when we're not even connected */
const input = vconnection.voice_recorder()?.input;
const input = vconnection.voiceRecorder()?.input;
if(input) {
if(enableRecording && this.serverConnection.connected()) {
if(this.getInputHardwareState() !== InputHardwareState.START_FAILED)
@ -821,7 +824,7 @@ export class ConnectionHandler {
let recorder: RecorderProfile = default_recorder;
try {
await this.serverConnection.getVoiceConnection().acquire_voice_recorder(recorder);
await this.serverConnection.getVoiceConnection().acquireVoiceRecorder(recorder);
} catch (error) {
logError(LogCategory.AUDIO, tr("Failed to acquire recorder: %o"), error);
createErrorModal(tr("Failed to acquire recorder"), tr("Failed to acquire recorder.\nLookup the console for more details.")).open();
@ -879,7 +882,7 @@ export class ConnectionHandler {
}
}
getVoiceRecorder() : RecorderProfile | undefined { return this.serverConnection.getVoiceConnection().voice_recorder(); }
getVoiceRecorder() : RecorderProfile | undefined { return this.serverConnection.getVoiceConnection().voiceRecorder(); }
reconnect_properties(profile?: ConnectionProfile) : ConnectParameters {
const name = (this.getClient() ? this.getClient().clientNickName() : "") ||
@ -972,6 +975,23 @@ export class ConnectionHandler {
});
}
private async initializeWhisperSession(session: WhisperSession) : Promise<WhisperSessionInitializeData> {
/* TODO: Try to load the clients unique via a clientgetuidfromclid */
if(!session.getClientUniqueId())
throw "missing clients unique id";
logInfo(LogCategory.CLIENT, tr("Initializing a whisper session for client %d (%s | %s)"), session.getClientId(), session.getClientUniqueId(), session.getClientName());
return {
clientName: session.getClientName(),
clientUniqueId: session.getClientUniqueId(),
blocked: false,
volume: 1,
sessionTimeout: 60 * 1000
}
}
destroy() {
this.event_registry.unregister_handler(this);
this.cancel_reconnect(true);

View file

@ -66,7 +66,7 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
super(connection);
}
async acquire_voice_recorder(recorder: RecorderProfile | undefined): Promise<void> {
async acquireVoiceRecorder(recorder: RecorderProfile | undefined): Promise<void> {
if(this.recorder === recorder)
return;
@ -88,15 +88,15 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
this.events.fire("notify_recorder_changed", {});
}
available_clients(): VoiceClient[] {
availableClients(): VoiceClient[] {
return this.voiceClients;
}
decoding_supported(codec: number): boolean {
decodingSupported(codec: number): boolean {
return false;
}
encoding_supported(codec: number): boolean {
encodingSupported(codec: number): boolean {
return false;
}
@ -104,23 +104,23 @@ export class DummyVoiceConnection extends AbstractVoiceConnection {
return VoiceConnectionStatus.ClientUnsupported;
}
get_encoder_codec(): number {
getEncoderCodec(): number {
return 0;
}
register_client(clientId: number): VoiceClient {
registerClient(clientId: number): VoiceClient {
const client = new DummyVoiceClient(clientId);
this.voiceClients.push(client);
return client;
}
set_encoder_codec(codec: number) {}
setEncoderCodec(codec: number) {}
async unregister_client(client: VoiceClient): Promise<void> {
this.voiceClients.remove(client as any);
}
voice_recorder(): RecorderProfile {
voiceRecorder(): RecorderProfile {
return this.recorder;
}

View file

@ -1,6 +1,7 @@
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
import {Registry} from "tc-shared/events";
import {WhisperSession} from "tc-shared/voice/Whisper";
export enum PlayerState {
PREBUFFERING,
@ -55,9 +56,31 @@ export interface VoiceConnectionEvents {
newStatus: VoiceConnectionStatus
},
"notify_recorder_changed": {}
"notify_recorder_changed": {},
"notify_whisper_created": {
session: WhisperSession
},
"notify_whisper_initialized": {
session: WhisperSession
},
"notify_whisper_destroyed": {
session: WhisperSession
}
}
export type WhisperSessionInitializeData = {
clientName: string,
clientUniqueId: string,
sessionTimeout: number,
blocked: boolean,
volume: number
};
export type WhisperSessionInitializer = (session: WhisperSession) => Promise<WhisperSessionInitializeData>;
export abstract class AbstractVoiceConnection {
readonly events: Registry<VoiceConnectionEvents>;
readonly connection: AbstractServerConnection;
@ -69,16 +92,23 @@ export abstract class AbstractVoiceConnection {
abstract getConnectionState() : VoiceConnectionStatus;
abstract encoding_supported(codec: number) : boolean;
abstract decoding_supported(codec: number) : boolean;
abstract encodingSupported(codec: number) : boolean;
abstract decodingSupported(codec: number) : boolean;
abstract register_client(client_id: number) : VoiceClient;
abstract available_clients() : VoiceClient[];
abstract registerClient(client_id: number) : VoiceClient;
abstract availableClients() : VoiceClient[];
abstract unregister_client(client: VoiceClient) : Promise<void>;
abstract voice_recorder() : RecorderProfile;
abstract acquire_voice_recorder(recorder: RecorderProfile | undefined) : Promise<void>;
abstract voiceRecorder() : RecorderProfile;
abstract acquireVoiceRecorder(recorder: RecorderProfile | undefined) : Promise<void>;
abstract get_encoder_codec() : number;
abstract set_encoder_codec(codec: number);
abstract getEncoderCodec() : number;
abstract setEncoderCodec(codec: number);
/* the whisper API */
abstract getWhisperSessions() : WhisperSession[];
abstract dropWhisperSession(session: WhisperSession);
abstract setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined);
abstract getWhisperSessionInitializer() : WhisperSessionInitializer | undefined;
}

View file

@ -25,7 +25,7 @@ export namespace CryptoHelper {
return str.replace(/-/g, '+').replace(/_/g, '/');
}
export function arraybuffer_to_string(buf) {
export function arraybuffer_to_string(buf) : string {
return String.fromCharCode.apply(null, new Uint16Array(buf));
}

View file

@ -2,7 +2,7 @@ import * as contextmenu from "tc-shared/ui/elements/ContextMenu";
import {Registry} from "tc-shared/events";
import {ChannelTree} from "tc-shared/ui/view";
import * as log from "tc-shared/log";
import {LogCategory, LogType} from "tc-shared/log";
import {LogCategory, logInfo, LogType} from "tc-shared/log";
import {Settings, settings} from "tc-shared/settings";
import {Sound} from "tc-shared/sound/Sounds";
import {Group, GroupManager, GroupTarget, GroupType} from "tc-shared/permission/GroupManager";
@ -19,7 +19,7 @@ import {spawnChangeLatency} from "tc-shared/ui/modal/ModalChangeLatency";
import {formatMessage} from "tc-shared/ui/frames/chat";
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
import * as hex from "tc-shared/crypto/hex";
import { ClientEntry as ClientEntryView } from "./tree/Client";
import {ClientEntry as ClientEntryView} from "./tree/Client";
import * as React from "react";
import {ChannelTreeEntry, ChannelTreeEntryEvents} from "tc-shared/ui/TreeEntry";
import {spawnClientVolumeChange, spawnMusicBotVolumeChange} from "tc-shared/ui/modal/ModalChangeVolumeNew";
@ -27,7 +27,7 @@ import {spawnPermissionEditorModal} from "tc-shared/ui/modal/permission/ModalPer
import {EventClient, EventType} from "tc-shared/ui/frames/log/Definitions";
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
import {global_client_actions} from "tc-shared/events/GlobalEvents";
import { ClientIcon } from "svg-sprites/client-icons";
import {ClientIcon} from "svg-sprites/client-icons";
import {VoiceClient} from "tc-shared/connection/VoiceConnection";
export enum ClientType {
@ -279,11 +279,11 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
if(flag) {
this.channelTree.client.serverConnection.send_command('clientmute', {
clid: this.clientId()
});
}).then(() => {});
} else if(this._audio_muted) {
this.channelTree.client.serverConnection.send_command('clientunmute', {
clid: this.clientId()
});
}).then(() => {});
}
this._audio_muted = flag;
@ -393,7 +393,7 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
this.channelTree.client.serverConnection.send_command("servergroupdelclient", {
sgid: group.id,
cldbid: this.properties.client_database_id
});
}).then(() => {});
};
entry.disabled = !this.channelTree.client.permissions.neededPermission(PermissionType.I_GROUP_MEMBER_ADD_POWER).granted(group.requiredMemberRemovePower);
} else {
@ -401,7 +401,7 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
this.channelTree.client.serverConnection.send_command("servergroupaddclient", {
sgid: group.id,
cldbid: this.properties.client_database_id
});
}).then(() => {});
};
entry.disabled = !this.channelTree.client.permissions.neededPermission(PermissionType.I_GROUP_MEMBER_REMOVE_POWER).granted(group.requiredMemberAddPower);
}
@ -424,7 +424,7 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
cldbid: this.properties.client_database_id,
cgid: group.id,
cid: this.currentChannel().channelId
});
}).then(() => {});
};
entry.disabled = !this.channelTree.client.permissions.neededPermission(PermissionType.I_GROUP_MEMBER_ADD_POWER).granted(group.requiredMemberRemovePower);
entry.type = contextmenu.MenuEntryType.CHECKBOX;
@ -482,20 +482,20 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
return this.channelTree.client.serverConnection.send_command("servergroupaddclient", {
sgid: groups[0],
cldbid: this.properties.client_database_id
}).then(result => true);
}).then(() => true);
} else
return this.channelTree.client.serverConnection.send_command("servergroupdelclient", {
sgid: groups[0],
cldbid: this.properties.client_database_id
}).then(result => true);
}).then(() => true);
} else {
const data = groups.map(e => { return {sgid: e}; });
data[0]["cldbid"] = this.properties.client_database_id;
if(flag) {
return this.channelTree.client.serverConnection.send_command("clientaddservergroup", data, {flagset: ["continueonerror"]}).then(result => true);
return this.channelTree.client.serverConnection.send_command("clientaddservergroup", data, {flagset: ["continueonerror"]}).then(() => true);
} else
return this.channelTree.client.serverConnection.send_command("clientdelservergroup", data, {flagset: ["continueonerror"]}).then(result => true);
return this.channelTree.client.serverConnection.send_command("clientdelservergroup", data, {flagset: ["continueonerror"]}).then(() => true);
}
});
}
@ -548,7 +548,7 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
icon_class: ClientIcon.Poke,
name: tr("Poke client"),
callback: () => {
createInputModal(tr("Poke client"), tr("Poke message:<br>"), text => true, result => {
createInputModal(tr("Poke client"), tr("Poke message:<br>"), () => true, result => {
if(typeof(result) === "string") {
this.channelTree.client.serverConnection.send_command("clientpoke", {
clid: this.clientId(),
@ -567,14 +567,14 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
icon_class: ClientIcon.Edit,
name: tr("Change description"),
callback: () => {
createInputModal(tr("Change client description"), tr("New description:<br>"), text => true, result => {
createInputModal(tr("Change client description"), tr("New description:<br>"), () => true, result => {
if(typeof(result) === "string") {
//TODO tr
console.log("Changing " + this.clientNickName() + "'s description to " + result);
this.channelTree.client.serverConnection.send_command("clientedit", {
clid: this.clientId(),
client_description: result
});
}).then(() => {});
}
}, { width: 400, maxLength: 1024 }).open();
@ -590,22 +590,21 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
this.channelTree.client.serverConnection.send_command("clientmove", {
clid: this.clientId(),
cid: this.channelTree.client.getClient().currentChannel().getChannelId()
});
}).then(() => {});
}
}, {
type: contextmenu.MenuEntryType.ENTRY,
icon_class: ClientIcon.KickChannel,
name: tr("Kick client from channel"),
callback: () => {
createInputModal(tr("Kick client from channel"), tr("Kick reason:<br>"), text => true, result => {
createInputModal(tr("Kick client from channel"), tr("Kick reason:<br>"), () => true, result => {
if(typeof(result) !== 'boolean' || result) {
//TODO tr
console.log("Kicking client " + this.clientNickName() + " from channel with reason " + result);
logInfo(LogCategory.CLIENT, tr("Kicking client %s from channel with reason %s"), this.clientNickName(), result);
this.channelTree.client.serverConnection.send_command("clientkick", {
clid: this.clientId(),
reasonid: ViewReasonId.VREASON_CHANNEL_KICK,
reasonmsg: result
});
}).then(() => {});
}
}, { width: 400, maxLength: 255 }).open();
@ -615,16 +614,14 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
icon_class: ClientIcon.KickServer,
name: tr("Kick client fom server"),
callback: () => {
createInputModal(tr("Kick client from server"), tr("Kick reason:<br>"), text => true, result => {
createInputModal(tr("Kick client from server"), tr("Kick reason:<br>"), () => true, result => {
if(typeof(result) !== 'boolean' || result) {
//TODO tr
console.log("Kicking client " + this.clientNickName() + " from server with reason " + result);
logInfo(LogCategory.CLIENT, tr("Kicking client %s from server with reason %s"), this.clientNickName(), result);
this.channelTree.client.serverConnection.send_command("clientkick", {
clid: this.clientId(),
reasonid: ViewReasonId.VREASON_SERVER_KICK,
reasonmsg: result
});
}).then(() => {});
}
}, { width: 400, maxLength: 255 }).open();
}
@ -945,16 +942,12 @@ export class ClientEntry extends ChannelTreeEntry<ClientEvents> {
export class LocalClientEntry extends ClientEntry {
handle: ConnectionHandler;
private renaming: boolean;
constructor(handle: ConnectionHandler) {
super(0, "local client");
this.handle = handle;
}
showContextMenu(x: number, y: number, on_close: () => void = undefined): void {
const _self = this;
contextmenu.spawn_context_menu(x, y,
...this.contextmenu_info(), {
@ -962,19 +955,19 @@ export class LocalClientEntry extends ClientEntry {
tr("Change name") +
(contextmenu.get_provider().html_format_enabled() ? "</b>" : ""),
icon_class: "client-change_nickname",
callback: () =>_self.openRename(),
callback: () => this.openRename(),
type: contextmenu.MenuEntryType.ENTRY
}, {
name: tr("Change description"),
icon_class: "client-edit",
callback: () => {
createInputModal(tr("Change own description"), tr("New description:<br>"), text => true, result => {
createInputModal(tr("Change own description"), tr("New description:<br>"), () => true, result => {
if(result) {
console.log(tr("Changing own description to %s"), result);
_self.channelTree.client.serverConnection.send_command("clientedit", {
clid: _self.clientId(),
logInfo(LogCategory.CLIENT, tr("Changing own description to %s"), result);
this.channelTree.client.serverConnection.send_command("clientedit", {
clid: this.clientId(),
client_description: result
});
}).then(() => {});
}
}, { width: 400, maxLength: 1024 }).open();
@ -994,7 +987,7 @@ export class LocalClientEntry extends ClientEntry {
renameSelf(new_name: string) : Promise<boolean> {
const old_name = this.properties.client_nickname;
this.updateVariables({ key: "client_nickname", value: new_name }); /* change it locally */
return this.handle.serverConnection.send_command("clientupdate", { client_nickname: new_name }).then((e) => {
return this.handle.serverConnection.send_command("clientupdate", { client_nickname: new_name }).then(() => {
settings.changeGlobal(Settings.KEY_CONNECT_USERNAME, new_name);
this.channelTree.client.log.log(EventType.CLIENT_NICKNAME_CHANGED_OWN, {
client: this.log_data(),
@ -1122,8 +1115,7 @@ export class MusicClientEntry extends ClientEntry {
this.channelTree.client.serverConnection.send_command("clientedit", {
clid: this.clientId(),
client_nickname: result
});
}).then(() => {});
}
}, { width: "40em", min_width: "10em", maxLength: 255 }).open();
},
@ -1133,13 +1125,12 @@ export class MusicClientEntry extends ClientEntry {
icon_class: "client-edit",
disabled: false,
callback: () => {
createInputModal(tr("Change music bots description"), tr("New description:<br>"), text => true, result => {
createInputModal(tr("Change music bots description"), tr("New description:<br>"), () => true, result => {
if(typeof(result) === 'string') {
this.channelTree.client.serverConnection.send_command("clientedit", {
clid: this.clientId(),
client_description: result
});
}).then(() => {});
}
}, { width: "60em", min_width: "10em", maxLength: 255 }).open();
},
@ -1159,7 +1150,7 @@ export class MusicClientEntry extends ClientEntry {
icon_class: "client-edit",
disabled: false,
callback: () => {
createInputModal(tr("Please enter the URL"), tr("URL:"), text => true, result => {
createInputModal(tr("Please enter the URL"), tr("URL:"), () => true, result => {
if(result) {
this.channelTree.client.serverConnection.send_command("musicbotqueueadd", {
bot_id: this.properties.client_database_id,
@ -1187,21 +1178,21 @@ export class MusicClientEntry extends ClientEntry {
this.channelTree.client.serverConnection.send_command("clientmove", {
clid: this.clientId(),
cid: this.channelTree.client.getClient().currentChannel().getChannelId()
});
}).then(() => {});
}
}, {
type: contextmenu.MenuEntryType.ENTRY,
icon_class: "client-kick_channel",
name: tr("Kick client from channel"),
callback: () => {
createInputModal(tr("Kick client from channel"), tr("Kick reason:<br>"), text => true, result => {
createInputModal(tr("Kick client from channel"), tr("Kick reason:<br>"), () => true, result => {
if(typeof(result) !== 'boolean' || result) {
console.log(tr("Kicking client %o from channel with reason %o"), this.clientNickName(), result);
logInfo(LogCategory.CLIENT, tr("Kicking client %o from channel with reason %o"), this.clientNickName(), result);
this.channelTree.client.serverConnection.send_command("clientkick", {
clid: this.clientId(),
reasonid: ViewReasonId.VREASON_CHANNEL_KICK,
reasonmsg: result
});
}).then(() => {});
}
}, { width: 400, maxLength: 255 }).open();
}
@ -1249,7 +1240,7 @@ export class MusicClientEntry extends ClientEntry {
if(result) {
this.channelTree.client.serverConnection.send_command("musicbotdelete", {
bot_id: this.properties.client_database_id
});
}).then(() => {});
}
});
},
@ -1282,7 +1273,7 @@ export class MusicClientEntry extends ClientEntry {
this._info_promise_resolve = resolve;
});
this.channelTree.client.serverConnection.send_command("musicbotplayerinfo", {bot_id: this.properties.client_database_id });
this.channelTree.client.serverConnection.send_command("musicbotplayerinfo", {bot_id: this.properties.client_database_id }).then(() => {});
return this._info_promise;
}
}

View file

@ -147,7 +147,7 @@ class ChannelEntryIcons extends ReactComponentBase<ChannelEntryIconsProperties,
switch (voiceState) {
case VoiceConnectionStatus.Connected:
state.is_codec_supported = voiceConnection.decoding_supported(currentCodec);
state.is_codec_supported = voiceConnection.decodingSupported(currentCodec);
break;
default:

View file

@ -505,7 +505,7 @@ export class ChannelTree {
const voice_connection = this.client.serverConnection.getVoiceConnection();
if(voice_connection)
client.set_audio_handle(voice_connection.register_client(client.clientId()));
client.set_audio_handle(voice_connection.registerClient(client.clientId()));
}
unregisterClient(client: ClientEntry) {

View file

@ -0,0 +1,48 @@
import {Registry} from "tc-shared/events";
export interface WhisperSessionEvents {
notify_state_changed: { oldState: WhisperSessionState, newState: WhisperSessionState }
}
export enum WhisperSessionState {
/* the sesston is getting initialized, not all variables may be set */
INITIALIZING,
/* there is currently no whispering */
PAUSED,
/* we're currently buffering */
BUFFERING,
/* we're replaying some whisper */
PLAYING,
/* we're currently receiving a whisper, but it has been blocked */
BLOCKED
}
export const kUnknownWhisperClientUniqueId = "unknown";
export interface WhisperSession {
readonly events: Registry<WhisperSessionEvents>;
/* get information about the whisperer */
getClientId() : number;
/* only ensured to be valid if session has been initialized */
getClientName() : string | undefined;
/* only ensured to be valid if session has been initialized */
getClientUniqueId() : string | undefined;
isBlocked() : boolean;
setBlocked(flag: boolean);
getSessionTimeout() : number;
setSessionTimeout() : number;
getLastWhisperTimestamp() : number;
setVolume(volume: number);
getVolume() : number;
}

View file

@ -6,14 +6,20 @@ import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {VoiceClientController} from "./VoiceClient";
import {settings, ValuedSettingsKey} from "tc-shared/settings";
import {tr} from "tc-shared/i18n/localize";
import {AbstractVoiceConnection, VoiceClient, VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
import {
AbstractVoiceConnection,
VoiceClient,
VoiceConnectionStatus,
WhisperSessionInitializer
} from "tc-shared/connection/VoiceConnection";
import {codecPool} from "./CodecConverter";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
import {ServerConnectionEvents} from "tc-shared/connection/ConnectionBase";
import {ConnectionState} from "tc-shared/ConnectionHandler";
import {VoiceBridge, VoicePacket} from "./bridge/VoiceBridge";
import {VoiceBridge, VoicePacket, VoiceWhisperPacket} from "./bridge/VoiceBridge";
import {NativeWebRTCVoiceBridge} from "./bridge/NativeWebRTCVoiceBridge";
import {EventType} from "tc-shared/ui/frames/log/Definitions";
import {kUnknownWhisperClientUniqueId, WhisperSession} from "tc-shared/voice/Whisper";
export enum VoiceEncodeType {
JS_ENCODE,
@ -46,6 +52,9 @@ export class VoiceConnection extends AbstractVoiceConnection {
private currentAudioSource: RecorderProfile;
private voiceClients: VoiceClientController[] = [];
private whisperSessionInitializer: WhisperSessionInitializer;
private whisperSessions: {[key: number]: WhisperSession} = {};
private voiceBridge: VoiceBridge;
private encoderCodec: number = 5;
@ -53,6 +62,8 @@ export class VoiceConnection extends AbstractVoiceConnection {
constructor(connection: ServerConnection) {
super(connection);
this.setWhisperSessionInitializer(undefined);
this.connectionState = VoiceConnectionStatus.Disconnected;
this.connection = connection;
@ -69,7 +80,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
destroy() {
this.connection.events.off(this.serverConnectionStateListener);
this.dropVoiceBridge();
this.acquire_voice_recorder(undefined, true).catch(error => {
this.acquireVoiceRecorder(undefined, true).catch(error => {
log.warn(LogCategory.VOICE, tr("Failed to release voice recorder: %o"), error);
}).then(() => {
for(const client of this.voiceClients) {
@ -84,7 +95,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.events.destroy();
}
async acquire_voice_recorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
async acquireVoiceRecorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
if(this.currentAudioSource === recorder && !enforce)
return;
@ -151,6 +162,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.voiceBridge = new NativeWebRTCVoiceBridge();
this.voiceBridge.callback_incoming_voice = packet => this.handleVoicePacket(packet);
this.voiceBridge.callback_incoming_whisper = packet => this.handleWhisperPacket(packet);
this.voiceBridge.callback_send_control_data = (request, payload) => {
this.connection.sendData(JSON.stringify(Object.assign({
type: "WebRTC",
@ -176,7 +188,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.connectAttemptCounter = 0;
this.connection.client.log.log(EventType.CONNECTION_VOICE_CONNECT_SUCCEEDED, { });
const currentInput = this.voice_recorder()?.input;
const currentInput = this.voiceRecorder()?.input;
if(currentInput) {
this.voiceBridge.setInput(currentInput).catch(error => {
createErrorModal(tr("Input recorder attechment failed"), tr("Failed to apply the current microphone recorder to the voice sender.")).open();
@ -284,7 +296,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
private handleRecorderUnmount() {
log.info(LogCategory.VOICE, "Lost recorder!");
this.currentAudioSource = undefined;
this.acquire_voice_recorder(undefined, true); /* we can ignore the promise because we should finish this directly */
this.acquireVoiceRecorder(undefined, true); /* we can ignore the promise because we should finish this directly */
}
private setConnectionState(state: VoiceConnectionStatus) {
@ -304,11 +316,11 @@ export class VoiceConnection extends AbstractVoiceConnection {
}
}
voice_recorder(): RecorderProfile {
voiceRecorder(): RecorderProfile {
return this.currentAudioSource;
}
available_clients(): VoiceClient[] {
availableClients(): VoiceClient[] {
return this.voiceClients;
}
@ -327,27 +339,61 @@ export class VoiceConnection extends AbstractVoiceConnection {
return Promise.resolve();
}
register_client(client_id: number): VoiceClient {
registerClient(client_id: number): VoiceClient {
const client = new VoiceClientController(client_id);
this.voiceClients.push(client);
return client;
}
decoding_supported(codec: number): boolean {
decodingSupported(codec: number): boolean {
return VoiceConnection.codecSupported(codec);
}
encoding_supported(codec: number): boolean {
encodingSupported(codec: number): boolean {
return VoiceConnection.codecSupported(codec);
}
get_encoder_codec(): number {
getEncoderCodec(): number {
return this.encoderCodec;
}
set_encoder_codec(codec: number) {
setEncoderCodec(codec: number) {
this.encoderCodec = codec;
}
protected handleWhisperPacket(packet: VoiceWhisperPacket) {
console.error("Received voice whisper packet: %o", packet);
}
getWhisperSessions(): WhisperSession[] {
return Object.values(this.whisperSessions);
}
dropWhisperSession(session: WhisperSession) {
throw "this is currently not supported";
}
setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined) {
this.whisperSessionInitializer = initializer;
if(!this.whisperSessionInitializer) {
this.whisperSessionInitializer = async session => {
logWarn(LogCategory.VOICE, tr("Missing whisper session initializer. Blocking whisper from %d (%s)"), session.getClientId(), session.getClientUniqueId());
return {
clientName: session.getClientName() || tr("Unknown client"),
clientUniqueId: session.getClientUniqueId() || kUnknownWhisperClientUniqueId,
blocked: true,
volume: 1,
sessionTimeout: 60 * 1000
}
}
}
}
getWhisperSessionInitializer(): WhisperSessionInitializer | undefined {
return this.whisperSessionInitializer;
}
}
/* funny fact that typescript dosn't find this */

View file

@ -4,6 +4,9 @@ import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import {WebRTCVoiceBridge} from "./WebRTCVoiceBridge";
import {VoiceWhisperPacket} from "tc-backend/web/voice/bridge/VoiceBridge";
import {CryptoHelper} from "tc-shared/profiles/identities/TeamSpeakIdentity";
import arraybuffer_to_string = CryptoHelper.arraybuffer_to_string;
export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
static isSupported(): boolean {
@ -40,8 +43,8 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
connection.addStream(this.localAudioDestinationNode.stream);
}
protected handleMainDataChannelMessage(message: MessageEvent) {
super.handleMainDataChannelMessage(message);
protected handleVoiceDataChannelMessage(message: MessageEvent) {
super.handleVoiceDataChannelMessage(message);
let bin = new Uint8Array(message.data);
let clientId = bin[2] << 8 | bin[3];
@ -56,6 +59,33 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
});
}
protected handleWhisperDataChannelMessage(message: MessageEvent) {
super.handleWhisperDataChannelMessage(message);
let payload = new Uint8Array(message.data);
let payload_offset = 0;
const flags = payload[payload_offset++];
let packet = {} as VoiceWhisperPacket;
if((flags & 0x01) === 1) {
packet.clientUniqueId = arraybuffer_to_string(payload.subarray(payload_offset, payload_offset + 28));
payload_offset += 28;
packet.clientNickname = arraybuffer_to_string(payload.subarray(payload_offset + 1, payload_offset + 1 + payload[payload_offset]));
payload_offset += payload[payload_offset] + 1;
}
packet.voiceId = payload[payload_offset] << 8 | payload[payload_offset + 1];
payload_offset += 2;
packet.clientId = payload[payload_offset] << 8 | payload[payload_offset + 1];
payload_offset += 2;
packet.codec = payload[payload_offset];
this.callback_incoming_whisper(packet);
}
getInput(): AbstractInput | undefined {
return this.currentInput;
}
@ -104,4 +134,10 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
channel.send(packet);
}
startWhisper() {
}
stopWhisper() {
}
}

View file

@ -17,11 +17,17 @@ export interface VoicePacket {
payload: Uint8Array;
}
export interface VoiceWhisperPacket extends VoicePacket {
clientUniqueId?: string;
clientNickname?: string;
}
export abstract class VoiceBridge {
protected muted: boolean;
callback_send_control_data: (request: string, payload: any) => void;
callback_incoming_voice: (packet: VoicePacket) => void;
callback_incoming_whisper: (packet: VoiceWhisperPacket) => void;
callback_disconnect: () => void;
@ -36,11 +42,9 @@ export abstract class VoiceBridge {
handleControlData(request: string, payload: any) { }
abstract connect(): Promise<VoiceBridgeConnectResult>;
abstract disconnect();
abstract getInput(): AbstractInput | undefined;
abstract setInput(input: AbstractInput | undefined): Promise<void>;
abstract sendStopSignal(codec: number);

View file

@ -10,7 +10,9 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
private connectionState: "unconnected" | "connecting" | "connected";
private rtcConnection: RTCPeerConnection;
private mainDataChannel: RTCDataChannel;
private voiceDataChannel: RTCDataChannel;
private whisperDataChannel: RTCDataChannel;
private cachedIceCandidates: RTCIceCandidateInit[];
private callbackRtcAnswer: (answer: any) => void;
@ -18,7 +20,7 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
private callbackConnectCanceled: (() => void)[] = [];
private callbackRtcConnected: () => void;
private callbackRtcConnectFailed: (error: any) => void;
private callbackMainDatachannelOpened: (() => void)[] = [];
private callbackVoiceDataChannelOpened: (() => void)[] = [];
private allowReconnect: boolean;
@ -90,15 +92,23 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
this.initializeRtpConnection(this.rtcConnection);
}
(window as any).dropVoice = () => this.callback_disconnect();
{
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.mainDataChannel = this.rtcConnection.createDataChannel('main', dataChannelConfig);
this.mainDataChannel.onmessage = this.handleMainDataChannelMessage.bind(this);
this.mainDataChannel.onopen = this.handleMainDataChannelOpen.bind(this);
this.mainDataChannel.binaryType = "arraybuffer";
this.voiceDataChannel = this.rtcConnection.createDataChannel('main', dataChannelConfig);
this.voiceDataChannel.onmessage = this.handleVoiceDataChannelMessage.bind(this);
this.voiceDataChannel.onopen = this.handleVoiceDataChannelOpen.bind(this);
this.voiceDataChannel.binaryType = "arraybuffer";
}
{
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.whisperDataChannel = this.rtcConnection.createDataChannel('voice-whisper', dataChannelConfig);
this.whisperDataChannel.onmessage = this.handleWhisperDataChannelMessage.bind(this);
this.whisperDataChannel.onopen = this.handleWhisperDataChannelOpen.bind(this);
this.whisperDataChannel.binaryType = "arraybuffer";
}
let offer: RTCSessionDescriptionInit;
@ -218,10 +228,10 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
}
private cleanupRtcResources() {
if(this.mainDataChannel) {
this.mainDataChannel.onclose = undefined;
this.mainDataChannel.close();
this.mainDataChannel = undefined;
if(this.voiceDataChannel) {
this.voiceDataChannel.onclose = undefined;
this.voiceDataChannel.close();
this.voiceDataChannel = undefined;
}
if(this.rtcConnection) {
@ -240,15 +250,15 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
}
protected async awaitMainChannelOpened(timeout: number) {
if(typeof this.mainDataChannel === "undefined")
if(typeof this.voiceDataChannel === "undefined")
throw tr("missing main data channel");
if(this.mainDataChannel.readyState === "open")
if(this.voiceDataChannel.readyState === "open")
return;
await new Promise((resolve, reject) => {
const id = setTimeout(reject, timeout);
this.callbackMainDatachannelOpened.push(() => {
this.callbackVoiceDataChannelOpened.push(() => {
clearTimeout(id);
resolve();
});
@ -332,13 +342,19 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
}
}
protected handleMainDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Main data channel is open now"));
while(this.callbackMainDatachannelOpened.length > 0)
this.callbackMainDatachannelOpened.pop()();
protected handleVoiceDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Voice data channel is open now"));
while(this.callbackVoiceDataChannelOpened.length > 0)
this.callbackVoiceDataChannelOpened.pop()();
}
protected handleMainDataChannelMessage(message: MessageEvent) { }
protected handleVoiceDataChannelMessage(message: MessageEvent) { }
protected handleWhisperDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Whisper data channel is open now"));
}
protected handleWhisperDataChannelMessage(message: MessageEvent) { }
handleControlData(request: string, payload: any) {
super.handleControlData(request, payload);
@ -368,7 +384,7 @@ export abstract class WebRTCVoiceBridge extends VoiceBridge {
}
public getMainDataChannel() : RTCDataChannel {
return this.mainDataChannel;
return this.voiceDataChannel;
}
protected abstract initializeRtpConnection(connection: RTCPeerConnection);