Some restructuring for the voice connection part

This commit is contained in:
WolverinDEV 2020-08-10 19:39:28 +02:00
parent 94212d3a6d
commit 405bc7512d
13 changed files with 763 additions and 488 deletions

View file

@ -9,15 +9,13 @@ import {
QueryListEntry, ServerGroupClient
} from "tc-shared/connection/ServerConnectionDeclaration";
import {ChannelEntry} from "tc-shared/ui/channel";
import {ClientEntry} from "tc-shared/ui/client";
import {ChatType} from "tc-shared/ui/frames/chat";
import {AbstractCommandHandler} from "tc-shared/connection/AbstractCommandHandler";
import {tr} from "tc-shared/i18n/localize";
export class CommandHelper extends AbstractCommandHandler {
private _who_am_i: any;
private _awaiters_unique_ids: {[unique_id: string]:((resolved: ClientNameInfo) => any)[]} = {};
private _awaiters_unique_dbid: {[database_id: number]:((resolved: ClientNameInfo) => any)[]} = {};
private infoByUniqueIdRequest: {[unique_id: string]:((resolved: ClientNameInfo) => any)[]} = {};
private infoByDatabaseIdRequest: {[database_id: number]:((resolved: ClientNameInfo) => any)[]} = {};
constructor(connection) {
super(connection);
@ -35,7 +33,7 @@ export class CommandHelper extends AbstractCommandHandler {
const hboss = this.connection.command_handler_boss();
hboss && hboss.unregister_handler(this);
}
this._awaiters_unique_ids = undefined;
this.infoByUniqueIdRequest = undefined;
}
handle_command(command: ServerCommand): boolean {
@ -56,21 +54,6 @@ export class CommandHelper extends AbstractCommandHandler {
});
}
sendMessage(message: string, type: ChatType, target?: ChannelEntry | ClientEntry) : Promise<CommandResult> {
if(type == ChatType.SERVER)
return this.connection.send_command("sendtextmessage", {"targetmode": 3, "target": 0, "msg": message});
else if(type == ChatType.CHANNEL)
return this.connection.send_command("sendtextmessage", {"targetmode": 2, "target": (target as ChannelEntry).getChannelId(), "msg": message});
else if(type == ChatType.CLIENT)
return this.connection.send_command("sendtextmessage", {"targetmode": 1, "target": (target as ClientEntry).clientId(), "msg": message});
}
updateClient(key: string, value: string) : Promise<CommandResult> {
let data = {};
data[key] = value;
return this.connection.send_command("clientupdate", data);
}
async info_from_uid(..._unique_ids: string[]) : Promise<ClientNameInfo[]> {
const response: ClientNameInfo[] = [];
const request = [];
@ -82,7 +65,7 @@ export class CommandHelper extends AbstractCommandHandler {
for(const unique_id of unique_ids) {
request.push({'cluid': unique_id});
(this._awaiters_unique_ids[unique_id] || (this._awaiters_unique_ids[unique_id] = []))
(this.infoByUniqueIdRequest[unique_id] || (this.infoByUniqueIdRequest[unique_id] = []))
.push(unique_id_resolvers[unique_id] = info => response.push(info));
}
@ -97,7 +80,7 @@ export class CommandHelper extends AbstractCommandHandler {
} finally {
/* cleanup */
for(const unique_id of Object.keys(unique_id_resolvers))
(this._awaiters_unique_ids[unique_id] || []).remove(unique_id_resolvers[unique_id]);
(this.infoByUniqueIdRequest[unique_id] || []).remove(unique_id_resolvers[unique_id]);
}
return response;
@ -111,8 +94,8 @@ export class CommandHelper extends AbstractCommandHandler {
client_database_id: parseInt(entry["cldbid"])
};
const functions = this._awaiters_unique_dbid[info.client_database_id] || [];
delete this._awaiters_unique_dbid[info.client_database_id];
const functions = this.infoByDatabaseIdRequest[info.client_database_id] || [];
delete this.infoByDatabaseIdRequest[info.client_database_id];
for(const fn of functions)
fn(info);
@ -130,7 +113,7 @@ export class CommandHelper extends AbstractCommandHandler {
for(const cldbid of unique_cldbid) {
request.push({'cldbid': cldbid});
(this._awaiters_unique_dbid[cldbid] || (this._awaiters_unique_dbid[cldbid] = []))
(this.infoByDatabaseIdRequest[cldbid] || (this.infoByDatabaseIdRequest[cldbid] = []))
.push(unique_cldbid_resolvers[cldbid] = info => response.push(info));
}
@ -145,7 +128,7 @@ export class CommandHelper extends AbstractCommandHandler {
} finally {
/* cleanup */
for(const cldbid of Object.keys(unique_cldbid_resolvers))
(this._awaiters_unique_dbid[cldbid] || []).remove(unique_cldbid_resolvers[cldbid]);
(this.infoByDatabaseIdRequest[cldbid] || []).remove(unique_cldbid_resolvers[cldbid]);
}
return response;
@ -159,8 +142,8 @@ export class CommandHelper extends AbstractCommandHandler {
client_database_id: parseInt(entry["cldbid"])
};
const functions = this._awaiters_unique_ids[entry["cluid"]] || [];
delete this._awaiters_unique_ids[entry["cluid"]];
const functions = this.infoByUniqueIdRequest[entry["cluid"]] || [];
delete this.infoByUniqueIdRequest[entry["cluid"]];
for(const fn of functions)
fn(info);
@ -362,7 +345,7 @@ export class CommandHelper extends AbstractCommandHandler {
});
}
async request_clients_by_server_group(group_id: number) : Promise<ServerGroupClient[]> {
request_clients_by_server_group(group_id: number) : Promise<ServerGroupClient[]> {
//servergroupclientlist sgid=2
//notifyservergroupclientlist sgid=6 cldbid=2 client_nickname=WolverinDEV client_unique_identifier=xxjnc14LmvTk+Lyrm8OOeo4tOqw=
return new Promise<ServerGroupClient[]>((resolve, reject) => {
@ -452,7 +435,7 @@ export class CommandHelper extends AbstractCommandHandler {
/**
* @deprecated
* Its just a workaround for the query management.
* There is no garante that the whoami trick will work forever
* There is no garantee that the whoami trick will work forever
*/
current_virtual_server_id() : Promise<number> {
if(this._who_am_i)

View file

@ -66,6 +66,8 @@ export abstract class AbstractServerConnection {
this.events.fire("notify_connection_state_changed", { oldState: oldState, newState: state });
}
getConnectionState() { return this.connectionState; }
abstract ping() : {
native: number,
javascript?: number

View file

@ -20,6 +20,7 @@ export enum LogCategory {
DNS,
FILE_TRANSFER,
EVENT_REGISTRY,
WEBRTC
}
export enum LogType {
@ -49,6 +50,7 @@ let category_mapping = new Map<number, string>([
[LogCategory.DNS, "DNS "],
[LogCategory.FILE_TRANSFER, "File transfer "],
[LogCategory.EVENT_REGISTRY, "Event registry"],
[LogCategory.WEBRTC, "WebRTC "],
]);
export let enabled_mapping = new Map<number, boolean>([
@ -70,6 +72,7 @@ export let enabled_mapping = new Map<number, boolean>([
[LogCategory.DNS, true],
[LogCategory.FILE_TRANSFER, true],
[LogCategory.EVENT_REGISTRY, true],
[LogCategory.WEBRTC, true],
]);
//Values will be overridden by initialize()

View file

@ -14,7 +14,11 @@ export enum EventType {
DISCONNECTED = "disconnected",
CONNECTION_VOICE_SETUP_FAILED = "connection.voice.setup.failed",
CONNECTION_VOICE_CONNECT = "connection.voice.connect",
CONNECTION_VOICE_CONNECT_FAILED = "connection.voice.connect.failed",
CONNECTION_VOICE_CONNECT_SUCCEEDED = "connection.voice.connect.succeeded",
CONNECTION_VOICE_DROPPED = "connection.voice.dropped",
CONNECTION_COMMAND_ERROR = "connection.command.error",
GLOBAL_MESSAGE = "global.message",
@ -185,11 +189,19 @@ export namespace event {
message: string;
}
export type EventConnectionVoiceSetupFailed = {
export type EventConnectionVoiceConnectFailed = {
reason: string;
reconnect_delay: number; /* if less or equal to 0 reconnect is prohibited */
}
export type EventConnectionVoiceConnectSucceeded = {}
export type EventConnectionVoiceConnect = {
attemptCount: number
}
export type EventConnectionVoiceDropped = {}
export type EventConnectionCommandError = {
error: any;
}
@ -259,7 +271,10 @@ export interface TypeInfo {
"connection.failed": event.EventConnectionFailed;
"connection.login": event.EventConnectionLogin;
"connection.connected": event.EventConnectionConnected;
"connection.voice.setup.failed": event.EventConnectionVoiceSetupFailed;
"connection.voice.dropped": event.EventConnectionVoiceDropped;
"connection.voice.connect": event.EventConnectionVoiceConnect;
"connection.voice.connect.failed": event.EventConnectionVoiceConnectFailed;
"connection.voice.connect.succeeded": event.EventConnectionVoiceConnectSucceeded;
"connection.command.error": event.EventConnectionCommandError;
"reconnect.scheduled": event.EventReconnectScheduled;

View file

@ -80,13 +80,25 @@ registerDispatcher(EventType.CONNECTION_CONNECTED, (data,handlerId) => (
</VariadicTranslatable>
));
registerDispatcher(EventType.CONNECTION_VOICE_SETUP_FAILED, (data) => (
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT, () => (
<Translatable>Connecting voice bridge.</Translatable>
));
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT_SUCCEEDED, () => (
<Translatable>Voice bridge successfully connected.</Translatable>
));
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT_FAILED, (data) => (
<VariadicTranslatable text={"Failed to setup voice bridge: {0}. Allow reconnect: {1}"}>
<>{data.reason}</>
{data.reconnect_delay > 0 ? <Translatable>Yes</Translatable> : <Translatable>No</Translatable>}
</VariadicTranslatable>
));
registerDispatcher(EventType.CONNECTION_VOICE_DROPPED, () => (
<Translatable>Voice bridge has been dropped. Trying to reconnect.</Translatable>
));
registerDispatcher(EventType.ERROR_PERMISSION, data => (
<div className={cssStyleRenderer.errorMessage}>
<VariadicTranslatable text={"Insufficient client permissions. Failed on permission {0}"}>

View file

@ -21,6 +21,7 @@ notificationDefaultStatus[EventType.SERVER_HOST_MESSAGE_DISCONNECT] = true;
notificationDefaultStatus[EventType.GLOBAL_MESSAGE] = true;
notificationDefaultStatus[EventType.CONNECTION_FAILED] = true;
notificationDefaultStatus[EventType.PRIVATE_MESSAGE_RECEIVED] = true;
notificationDefaultStatus[EventType.CONNECTION_VOICE_DROPPED] = true;
let windowFocused = false;
@ -143,12 +144,30 @@ registerDispatcher(EventType.DISCONNECTED, () => {
/* snipped RECONNECT_EXECUTE */
/* snipped RECONNECT_CANCELED */
registerDispatcher(EventType.CONNECTION_VOICE_SETUP_FAILED, (data, handlerId) => {
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT, (data, handlerId) => {
spawnServerNotification(handlerId, {
body: tr("Connecting voice bridge.")
});
});
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT_SUCCEEDED, (data, handlerId) => {
spawnServerNotification(handlerId, {
body: tr("Voice bridge successfully connected.")
});
});
registerDispatcher(EventType.CONNECTION_VOICE_CONNECT_FAILED, (data, handlerId) => {
spawnServerNotification(handlerId, {
body: tra("Failed to setup voice bridge: {0}. Allow reconnect: {1}", data.reason, data.reconnect_delay > 0 ? tr("Yes") : tr("No"))
});
});
registerDispatcher(EventType.CONNECTION_VOICE_DROPPED, (data, handlerId) => {
spawnServerNotification(handlerId, {
body: tr("Voice bridge has been dropped. Trying to reconnect.")
});
});
registerDispatcher(EventType.CONNECTION_COMMAND_ERROR, (data, handlerId) => {
spawnServerNotification(handlerId, {
body: tra("Command execution resulted in an error.")

View file

@ -13,6 +13,7 @@ import {spawnFileTransferModal} from "tc-shared/ui/modal/transfer/ModalFileTrans
import {ClientIconRenderer} from "tc-shared/ui/react-elements/Icons";
import {ClientIcon} from "svg-sprites/client-icons";
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
const channelStyle = require("./Channel.scss");
const viewStyle = require("./View.scss");
@ -37,6 +38,7 @@ interface ChannelEntryIconsState {
@BatchUpdateAssignment(BatchUpdateType.CHANNEL_TREE)
class ChannelEntryIcons extends ReactComponentBase<ChannelEntryIconsProperties, ChannelEntryIconsState> {
private readonly listenerVoiceStatusChange;
private serverConnection: AbstractServerConnection;
constructor(props) {
super(props);
@ -48,23 +50,20 @@ class ChannelEntryIcons extends ReactComponentBase<ChannelEntryIconsProperties,
}
}
private serverConnection() {
return this.props.channel.channelTree.client.serverConnection;
}
componentDidMount() {
const voiceConnection = this.serverConnection().getVoiceConnection();
const voiceConnection = this.serverConnection.getVoiceConnection();
voiceConnection.events.on("notify_connection_status_changed", this.listenerVoiceStatusChange);
}
componentWillUnmount() {
const voiceConnection = this.serverConnection().getVoiceConnection();
const voiceConnection = this.serverConnection.getVoiceConnection();
voiceConnection.events.off("notify_connection_status_changed", this.listenerVoiceStatusChange);
}
protected defaultState(): ChannelEntryIconsState {
const properties = this.props.channel.properties;
this.serverConnection = this.props.channel.channelTree.client.serverConnection;
const properties = this.props.channel.properties;
const status = {
icons_shown: this.props.channel.parsed_channel_name.alignment === "normal",
custom_icon_id: properties.channel_icon_id,
@ -143,7 +142,7 @@ class ChannelEntryIcons extends ReactComponentBase<ChannelEntryIconsProperties,
}
private updateVoiceStatus(state: ChannelEntryIconsState, currentCodec: number) {
const voiceConnection = this.serverConnection().getVoiceConnection();
const voiceConnection = this.serverConnection.getVoiceConnection();
const voiceState = voiceConnection.getConnectionState();
switch (voiceState) {

View file

@ -192,8 +192,10 @@ export class RecorderProfile {
}
async unmount() : Promise<void> {
if(this.callback_unmount)
if(this.callback_unmount) {
this.callback_unmount();
}
if(this.input) {
try {
await this.input.set_consumer(undefined);

View file

@ -284,11 +284,6 @@ export class ServerConnection extends AbstractServerConnection {
//TODO send disconnect reason
}
if(this.voiceConnection)
this.voiceConnection.drop_rtp_session();
if(this.socket) {
this.socket.callbackMessage = undefined;
this.socket.callbackDisconnect = undefined;
@ -335,17 +330,12 @@ export class ServerConnection extends AbstractServerConnection {
this.pingStatistics.thread_id = setInterval(() => this.doNextPing(), this.pingStatistics.interval) as any;
this.doNextPing();
this.updateConnectionState(ConnectionState.CONNECTED);
if(this.voiceConnection)
this.voiceConnection.start_rtc_session(); /* FIXME: Move it to a handler boss and not here! */
}
/* devel-block(log-networking-commands) */
group.end();
/* devel-block-end */
} else if(json["type"] === "WebRTC") {
if(this.voiceConnection)
this.voiceConnection.handleControlPacket(json);
else
log.warn(LogCategory.NETWORKING, tr("Dropping WebRTC command packet, because we haven't a bridge."))
this.voiceConnection?.handleControlPacket(json);
} else if(json["type"] === "ping") {
this.sendData(JSON.stringify({
type: 'pong',

View file

@ -1,15 +1,19 @@
import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {LogCategory, logDebug, logInfo, logWarn} from "tc-shared/log";
import * as aplayer from "../audio/player";
import {ServerConnection} from "../connection/ServerConnection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {VoiceClientController} from "./VoiceClient";
import {settings, ValuedSettingsKey} from "tc-shared/settings";
import {CallbackInputConsumer, InputConsumerType, NodeInputConsumer} from "tc-shared/voice/RecorderBase";
import {tr} from "tc-shared/i18n/localize";
import {EventType} from "tc-shared/ui/frames/log/Definitions";
import {AbstractVoiceConnection, VoiceClient, VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
import {codecPool, CodecPool} from "tc-backend/web/voice/CodecConverter";
import {codecPool} from "./CodecConverter";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
import {ServerConnectionEvents} from "tc-shared/connection/ConnectionBase";
import {ConnectionState} from "tc-shared/ConnectionHandler";
import {VoiceBridge, VoicePacket} from "./bridge/VoiceBridge";
import {NativeWebRTCVoiceBridge} from "./bridge/NativeWebRTCVoiceBridge";
import {EventType} from "tc-shared/ui/frames/log/Definitions";
export enum VoiceEncodeType {
JS_ENCODE,
@ -23,34 +27,28 @@ const KEY_VOICE_CONNECTION_TYPE: ValuedSettingsKey<number> = {
};
export class VoiceConnection extends AbstractVoiceConnection {
readonly connection: ServerConnection;
connectionState: VoiceConnectionStatus;
rtcPeerConnection: RTCPeerConnection;
dataChannel: RTCDataChannel;
private connectionType: VoiceEncodeType = VoiceEncodeType.NATIVE_ENCODE;
private localAudioStarted = false;
/*
* To ensure we're not sending any audio because the settings activates the input,
* we self mute the audio stream
*/
local_audio_mute: GainNode;
local_audio_stream: MediaStreamAudioDestinationNode;
static codecSupported(type: number) : boolean {
return !!codecPool && codecPool.length > type && codecPool[type].supported();
}
private voice_packet_id: number = 0;
private chunkVPacketId: number = 0;
private send_task: number;
readonly connection: ServerConnection;
private _audio_source: RecorderProfile;
private _audio_clients: VoiceClientController[] = [];
private readonly serverConnectionStateListener;
private connectionType: VoiceEncodeType = VoiceEncodeType.NATIVE_ENCODE;
private connectionState: VoiceConnectionStatus;
private _encoder_codec: number = 5;
private localAudioStarted = false;
private connectionLostModalOpen = false;
private connectAttemptCounter = 0;
private awaitingAudioInitialize = false;
private currentAudioSource: RecorderProfile;
private voiceClients: VoiceClientController[] = [];
private voiceBridge: VoiceBridge;
private encoderCodec: number = 5;
constructor(connection: ServerConnection) {
super(connection);
@ -59,6 +57,9 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.connection = connection;
this.connectionType = settings.static_global(KEY_VOICE_CONNECTION_TYPE, this.connectionType);
this.connection.events.on("notify_connection_state_changed",
this.serverConnectionStateListener = this.handleServerConnectionStateChanged.bind(this));
}
getConnectionState(): VoiceConnectionStatus {
@ -66,489 +67,207 @@ export class VoiceConnection extends AbstractVoiceConnection {
}
destroy() {
clearInterval(this.send_task);
this.drop_rtp_session();
this.connection.events.off(this.serverConnectionStateListener);
this.dropVoiceBridge();
this.acquire_voice_recorder(undefined, true).catch(error => {
log.warn(LogCategory.VOICE, tr("Failed to release voice recorder: %o"), error);
}).then(() => {
for(const client of this._audio_clients) {
for(const client of this.voiceClients) {
client.abort_replay();
client.callback_playback = undefined;
client.callback_state_changed = undefined;
client.callback_stopped = undefined;
}
this._audio_clients = undefined;
this._audio_source = undefined;
this.voiceClients = undefined;
this.currentAudioSource = undefined;
});
this.events.destroy();
}
static native_encoding_supported() : boolean {
const context = window.webkitAudioContext || window.AudioContext;
if(!context)
return false;
if(!context.prototype.createMediaStreamDestination)
return false; /* Required, but not available within edge */
return true;
}
static javascript_encoding_supported() : boolean {
return typeof window.RTCPeerConnection !== "undefined" && typeof window.RTCPeerConnection.prototype.createDataChannel === "function";
}
current_encoding_supported() : boolean {
switch (this.connectionType) {
case VoiceEncodeType.JS_ENCODE:
return VoiceConnection.javascript_encoding_supported();
case VoiceEncodeType.NATIVE_ENCODE:
return VoiceConnection.native_encoding_supported();
}
return false;
}
private setup_native() {
log.info(LogCategory.VOICE, tr("Setting up native voice stream!"));
if(!VoiceConnection.native_encoding_supported()) {
log.warn(LogCategory.VOICE, tr("Native codec isn't supported!"));
return;
}
if(!this.local_audio_stream) {
this.local_audio_stream = aplayer.context().createMediaStreamDestination();
}
if(!this.local_audio_mute) {
this.local_audio_mute = aplayer.context().createGain();
this.local_audio_mute.connect(this.local_audio_stream);
this.local_audio_mute.gain.value = 1;
}
}
private setup_js() {
if(!VoiceConnection.javascript_encoding_supported()) return;
if(!this.send_task)
this.send_task = setInterval(this.send_next_voice_packet.bind(this), 20); /* send all 20ms out voice packets */
}
async acquire_voice_recorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
if(this._audio_source === recorder && !enforce)
if(this.currentAudioSource === recorder && !enforce)
return;
if(recorder) {
await recorder.unmount();
}
if(this._audio_source) {
await this._audio_source.unmount();
if(this.currentAudioSource) {
await this.voiceBridge?.setInput(undefined);
this.currentAudioSource.callback_unmount = undefined;
await this.currentAudioSource.unmount();
}
this.handleLocalVoiceEnded();
this._audio_source = recorder;
this.handleRecorderStop();
this.currentAudioSource = recorder;
if(recorder) {
recorder.current_handler = this.connection.client;
recorder.callback_unmount = this.on_recorder_yield.bind(this);
recorder.callback_start = this.handleLocalVoiceStarted.bind(this);
recorder.callback_stop = this.handleLocalVoiceEnded.bind(this);
recorder.callback_unmount = this.handleRecorderUnmount.bind(this);
recorder.callback_start = this.handleRecorderStart.bind(this);
recorder.callback_stop = this.handleRecorderStop.bind(this);
recorder.callback_input_change = async (old_input, new_input) => {
if(old_input) {
try {
await old_input.set_consumer(undefined);
} catch(error) {
log.warn(LogCategory.VOICE, tr("Failed to release own consumer from old input: %o"), error);
}
recorder.callback_input_change = async (oldInput, newInput) => {
if(!this.voiceBridge)
return;
if(this.voiceBridge.getInput() && this.voiceBridge.getInput() !== oldInput) {
logWarn(LogCategory.VOICE,
tr("Having a recorder input change, but our voice bridge still has another input (Having: %o, Expecting: %o)!"),
this.voiceBridge.getInput(), oldInput);
}
if(new_input) {
if(this.connectionType == VoiceEncodeType.NATIVE_ENCODE) {
if(!this.local_audio_stream)
this.setup_native(); /* requires initialized audio */
try {
await new_input.set_consumer({
type: InputConsumerType.NODE,
callback_node: node => {
if(!this.local_audio_stream || !this.local_audio_mute)
return;
node.connect(this.local_audio_mute);
},
callback_disconnect: node => {
if(!this.local_audio_mute)
return;
node.disconnect(this.local_audio_mute);
}
} as NodeInputConsumer);
log.debug(LogCategory.VOICE, tr("Successfully set/updated to the new input for the recorder"));
} catch (e) {
log.warn(LogCategory.VOICE, tr("Failed to set consumer to the new recorder input: %o"), e);
}
} else {
try {
await recorder.input.set_consumer({
type: InputConsumerType.CALLBACK,
callback_audio: buffer => this.handleLocalVoiceBuffer(buffer, false)
} as CallbackInputConsumer);
log.debug(LogCategory.VOICE, tr("Successfully set/updated to the new input for the recorder"));
} catch (e) {
log.warn(LogCategory.VOICE, tr("Failed to set consumer to the new recorder input: %o"), e);
}
}
}
await this.voiceBridge.setInput(newInput);
};
}
this.events.fire("notify_recorder_changed");
}
get_encoder_type() : VoiceEncodeType { return this.connectionType; }
set_encoder_type(target: VoiceEncodeType) {
if(target == this.connectionType) return;
this.connectionType = target;
if(this.connectionType == VoiceEncodeType.NATIVE_ENCODE)
this.setup_native();
else
this.setup_js();
this.start_rtc_session();
}
voice_playback_support() : boolean {
return this.dataChannel && this.dataChannel.readyState == "open";
}
voice_send_support() : boolean {
if(this.connectionType == VoiceEncodeType.NATIVE_ENCODE)
return VoiceConnection.native_encoding_supported() && this.rtcPeerConnection.getLocalStreams().length > 0;
else
return this.voice_playback_support();
}
private voice_send_queue: {data: Uint8Array, codec: number}[] = [];
handleEncodedVoicePacket(data: Uint8Array, codec: number){
this.voice_send_queue.push({data: data, codec: codec});
}
private send_next_voice_packet() {
const buffer = this.voice_send_queue.pop_front();
if(!buffer)
return;
this.sendVoicePacket(buffer.data, buffer.codec);
}
private fillVoicePacketHeader(packet: Uint8Array, codec: number) {
packet[0] = this.chunkVPacketId++ < 5 ? 1 : 0; //Flag header
packet[1] = 0; //Flag fragmented
packet[2] = (this.voice_packet_id >> 8) & 0xFF; //HIGHT (voiceID)
packet[3] = (this.voice_packet_id >> 0) & 0xFF; //LOW (voiceID)
packet[4] = codec; //Codec
}
sendVoicePacket(encoded_data: Uint8Array, codec: number) {
if(this.dataChannel) {
this.voice_packet_id++;
if(this.voice_packet_id > 65535)
this.voice_packet_id = 0;
let packet = new Uint8Array(encoded_data.byteLength + 5);
this.fillVoicePacketHeader(packet, codec);
packet.set(encoded_data, 5);
try {
this.dataChannel.send(packet);
} catch (error) {
log.warn(LogCategory.VOICE, tr("Failed to send voice packet. Error: %o"), error);
}
} else {
log.warn(LogCategory.VOICE, tr("Could not transfer audio (not connected)"));
}
}
sendVoiceStopPacket(codec: number) {
if(!this.dataChannel)
return;
const packet = new Uint8Array(5);
this.fillVoicePacketHeader(packet, codec);
try {
this.dataChannel.send(packet);
} catch (error) {
log.warn(LogCategory.VOICE, tr("Failed to send voice packet. Error: %o"), error);
}
}
private _audio_player_waiting = false;
start_rtc_session() {
private startVoiceBridge() {
if(!aplayer.initialized()) {
log.info(LogCategory.VOICE, tr("Audio player isn't initialized yet. Waiting for gesture."));
if(!this._audio_player_waiting) {
this._audio_player_waiting = true;
aplayer.on_ready(() => this.start_rtc_session());
logDebug(LogCategory.VOICE, tr("Audio player isn't initialized yet. Waiting for it to initialize."));
if(!this.awaitingAudioInitialize) {
this.awaitingAudioInitialize = true;
aplayer.on_ready(() => this.startVoiceBridge());
}
return;
}
if(!this.current_encoding_supported())
return false;
if(this.connection.getConnectionState() !== ConnectionState.CONNECTED)
return;
if(this.connectionType == VoiceEncodeType.NATIVE_ENCODE)
this.setup_native();
else
this.setup_js();
this.drop_rtp_session();
this._ice_use_cache = true;
this.setConnectionState(VoiceConnectionStatus.Connecting);
let config: RTCConfiguration = {};
config.iceServers = [];
config.iceServers.push({ urls: 'stun:stun.l.google.com:19302' });
//config.iceServers.push({ urls: "stun:stun.teaspeak.de:3478" });
this.rtcPeerConnection = new RTCPeerConnection(config);
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.dataChannel = this.rtcPeerConnection.createDataChannel('main', dataChannelConfig);
this.dataChannel.onmessage = this.onMainDataChannelMessage.bind(this);
this.dataChannel.onopen = this.onMainDataChannelOpen.bind(this);
this.dataChannel.binaryType = "arraybuffer";
let sdpConstraints : RTCOfferOptions = {};
sdpConstraints.offerToReceiveAudio = this.connectionType == VoiceEncodeType.NATIVE_ENCODE;
sdpConstraints.offerToReceiveVideo = false;
sdpConstraints.voiceActivityDetection = true;
this.rtcPeerConnection.onicegatheringstatechange = () => console.log("ICE gathering state changed to %s", this.rtcPeerConnection.iceGatheringState);
this.rtcPeerConnection.oniceconnectionstatechange = () => console.log("ICE connection state changed to %s", this.rtcPeerConnection.iceConnectionState);
this.rtcPeerConnection.onicecandidate = this.on_local_ice_candidate.bind(this);
if(this.local_audio_stream) { //May a typecheck?
this.rtcPeerConnection.addStream(this.local_audio_stream.stream);
log.info(LogCategory.VOICE, tr("Adding native audio stream (%o)!"), this.local_audio_stream.stream);
this.connectAttemptCounter++;
if(this.voiceBridge) {
this.voiceBridge.callback_disconnect = undefined;
this.voiceBridge.disconnect();
}
this.rtcPeerConnection.createOffer(sdpConstraints)
.then(offer => this.on_local_offer_created(offer))
.catch(error => {
log.error(LogCategory.VOICE, tr("Could not create ice offer! error: %o"), error);
});
this.voiceBridge = new NativeWebRTCVoiceBridge();
this.voiceBridge.callback_incoming_voice = packet => this.handleVoicePacket(packet);
this.voiceBridge.callback_send_control_data = (request, payload) => {
this.connection.sendData(JSON.stringify(Object.assign({
type: "WebRTC",
request: request
}, payload)))
};
this.voiceBridge.callback_disconnect = () => {
this.connection.client.log.log(EventType.CONNECTION_VOICE_DROPPED, { });
if(!this.connectionLostModalOpen) {
this.connectionLostModalOpen = true;
const modal = createErrorModal(tr("Voice connection lost"), tr("Lost voice connection to the target server. Trying to reconnect..."));
modal.close_listener.push(() => this.connectionLostModalOpen = false);
modal.open();
}
logInfo(LogCategory.WEBRTC, tr("Lost voice connection to target server. Trying to reconnect."));
this.startVoiceBridge();
}
this.connection.client.log.log(EventType.CONNECTION_VOICE_CONNECT, { attemptCount: this.connectAttemptCounter });
this.setConnectionState(VoiceConnectionStatus.Connecting);
this.voiceBridge.connect().then(result => {
if(result.type === "success") {
this.connectAttemptCounter = 0;
this.connection.client.log.log(EventType.CONNECTION_VOICE_CONNECT_SUCCEEDED, { });
const currentInput = this.voice_recorder()?.input;
if(currentInput) {
this.voiceBridge.setInput(currentInput).catch(error => {
createErrorModal(tr("Input recorder attechment failed"), tr("Failed to apply the current microphone recorder to the voice sender.")).open();
logWarn(LogCategory.VOICE, tr("Failed to apply the input to the voice bridge: %o"), error);
this.handleRecorderUnmount();
});
}
this.setConnectionState(VoiceConnectionStatus.Connected);
} else if(result.type === "canceled") {
/* we've to do nothing here */
} else if(result.type === "failed") {
logWarn(LogCategory.VOICE, tr("Failed to setup voice bridge: %s. Reconnect: %o"), result.message, result.allowReconnect);
this.connection.client.log.log(EventType.CONNECTION_VOICE_CONNECT_FAILED, {
reason: result.message,
reconnect_delay: result.allowReconnect ? 1 : 0
});
if(result.allowReconnect) {
this.startVoiceBridge();
}
}
});
}
drop_rtp_session() {
if(this.dataChannel) {
this.dataChannel.close();
this.dataChannel = undefined;
private dropVoiceBridge() {
if(this.voiceBridge) {
this.voiceBridge.callback_disconnect = undefined;
this.voiceBridge.disconnect();
this.voiceBridge = undefined;
}
if(this.rtcPeerConnection) {
this.rtcPeerConnection.close();
this.rtcPeerConnection = undefined;
}
this._ice_use_cache = true;
this._ice_cache = [];
this.setConnectionState(VoiceConnectionStatus.Disconnected);
}
private registerRemoteICECandidate(candidate: RTCIceCandidate) {
if(candidate.candidate === "") {
console.log("Adding end candidate");
this.rtcPeerConnection.addIceCandidate(null).catch(error => {
log.info(LogCategory.VOICE, tr("Failed to add remote cached ice candidate finish: %o"), error);
});
return;
}
const pcandidate = new RTCIceCandidate(candidate);
if(pcandidate.protocol !== "tcp") return; /* UDP does not work currently */
log.info(LogCategory.VOICE, tr("Add remote ice! (%o)"), pcandidate);
this.rtcPeerConnection.addIceCandidate(pcandidate).catch(error => {
log.info(LogCategory.VOICE, tr("Failed to add remote cached ice candidate %o: %o"), candidate, error);
});
}
private _ice_use_cache: boolean = true;
private _ice_cache: RTCIceCandidate[] = [];
handleControlPacket(json) {
if(json["request"] === "answer") {
const session_description = new RTCSessionDescription(json["msg"]);
log.info(LogCategory.VOICE, tr("Received answer to our offer. Answer: %o"), session_description);
this.rtcPeerConnection.setRemoteDescription(session_description).then(() => {
log.info(LogCategory.VOICE, tr("Answer applied successfully. Applying ICE candidates (%d)."), this._ice_cache.length);
this._ice_use_cache = false;
for(let candidate of this._ice_cache)
this.registerRemoteICECandidate(candidate);
this._ice_cache = [];
}).catch(error => {
log.info(LogCategory.VOICE, tr("Failed to apply remote description: %o"), error); //FIXME error handling!
});
} else if(json["request"] === "ice" || json["request"] === "ice_finish") {
const candidate = new RTCIceCandidate(json["msg"]);
if(!this._ice_use_cache) {
this.registerRemoteICECandidate(candidate);
} else {
log.info(LogCategory.VOICE, tr("Cache remote ice! (%o)"), json["msg"]);
this._ice_cache.push(candidate);
}
} else if(json["request"] == "status") {
if(json["state"] == "failed") {
const chandler = this.connection.client;
chandler.log.log(EventType.CONNECTION_VOICE_SETUP_FAILED, {
reason: json["reason"],
reconnect_delay: json["allow_reconnect"] ? 1 : 0
});
log.error(LogCategory.NETWORKING, tr("Failed to setup voice bridge (%s). Allow reconnect: %s"), json["reason"], json["allow_reconnect"]);
if(json["allow_reconnect"] == true) {
this.start_rtc_session();
}
//TODO handle fail specially when its not allowed to reconnect
}
} else {
log.warn(LogCategory.NETWORKING, tr("Received unknown web client control packet: %s"), json["request"]);
}
this.voiceBridge.handleControlData(json["request"], json);
return;
}
private on_local_ice_candidate(event: RTCPeerConnectionIceEvent) {
if (event) {
if(event.candidate && event.candidate.protocol !== "tcp")
return;
if(event.candidate) {
log.info(LogCategory.VOICE, tr("Gathered local ice candidate for stream %d: %s"), event.candidate.sdpMLineIndex, event.candidate.candidate);
this.connection.sendData(JSON.stringify({
type: 'WebRTC',
request: "ice",
msg: event.candidate,
}));
} else {
log.info(LogCategory.VOICE, tr("Local ICE candidate gathering finish."));
this.connection.sendData(JSON.stringify({
type: 'WebRTC',
request: "ice_finish"
}));
}
}
}
private on_local_offer_created(localSession) {
log.info(LogCategory.VOICE, tr("Local offer created. Setting up local description. (%o)"), localSession);
this.rtcPeerConnection.setLocalDescription(localSession).then(() => {
log.info(LogCategory.VOICE, tr("Offer applied successfully. Sending offer to server."));
this.connection.sendData(JSON.stringify({type: 'WebRTC', request: "create", msg: localSession}));
}).catch(error => {
log.info(LogCategory.VOICE, tr("Failed to apply local description: %o"), error);
//FIXME error handling
});
}
private onMainDataChannelOpen(channel) {
log.info(LogCategory.VOICE, tr("Got new data channel! (%s)"), this.dataChannel.readyState);
this.setConnectionState(VoiceConnectionStatus.Connected);
}
private onMainDataChannelMessage(message: MessageEvent) {
protected handleVoicePacket(packet: VoicePacket) {
const chandler = this.connection.client;
if(chandler.isSpeakerMuted() || chandler.isSpeakerDisabled()) /* we dont need to do anything with sound playback when we're not listening to it */
return;
let bin = new Uint8Array(message.data);
let clientId = bin[2] << 8 | bin[3];
let packetId = bin[0] << 8 | bin[1];
let codec = bin[4];
//log.info(LogCategory.VOICE, "Client id " + clientId + " PacketID " + packetId + " Codec: " + codec);
let client = this.find_client(clientId);
let client = this.find_client(packet.clientId);
if(!client) {
log.error(LogCategory.VOICE, tr("Having voice from unknown audio client? (ClientID: %o)"), clientId);
log.error(LogCategory.VOICE, tr("Having voice from unknown audio client? (ClientID: %o)"), packet.clientId);
return;
}
let codec_pool = codecPool[codec];
let codec_pool = codecPool[packet.codec];
if(!codec_pool) {
log.error(LogCategory.VOICE, tr("Could not playback codec %o"), codec);
log.error(LogCategory.VOICE, tr("Could not playback codec %o"), packet.codec);
return;
}
let encodedData;
if(message.data.subarray)
encodedData = message.data.subarray(5);
else encodedData = new Uint8Array(message.data, 5);
if(encodedData.length == 0) {
if(packet.payload.length == 0) {
client.stopAudio();
codec_pool.releaseCodec(clientId);
codec_pool.releaseCodec(packet.clientId);
} else {
codec_pool.ownCodec(clientId, e => this.handleEncodedVoicePacket(e, codec), true)
.then(decoder => decoder.decodeSamples(client.get_codec_cache(codec), encodedData))
codec_pool.ownCodec(packet.clientId, () => {
logWarn(LogCategory.VOICE, tr("Received an encoded voice packet even thou we're only decoding!"));
}, true)
.then(decoder => decoder.decodeSamples(client.get_codec_cache(packet.codec), packet.payload))
.then(buffer => client.playback_buffer(buffer)).catch(error => {
log.error(LogCategory.VOICE, tr("Could not playback client's (%o) audio (%o)"), clientId, error);
log.error(LogCategory.VOICE, tr("Could not playback client's (%o) audio (%o)"), packet.clientId, error);
if(error instanceof Error)
log.error(LogCategory.VOICE, error.stack);
});
}
}
private handleLocalVoiceBuffer(data: AudioBuffer, head: boolean) {
const chandler = this.connection.client;
if(!this.localAudioStarted || !chandler.connected)
return false;
if(chandler.isMicrophoneMuted())
return false;
if(head)
this.chunkVPacketId = 0;
let client = this.find_client(chandler.getClientId());
if(!client) {
log.error(LogCategory.VOICE, tr("Tried to send voice data, but local client hasn't a voice client handle"));
return;
}
const codec = this._encoder_codec;
codecPool[codec]
.ownCodec(chandler.getClientId(), e => this.handleEncodedVoicePacket(e, codec), true)
.then(encoder => encoder.encodeSamples(client.get_codec_cache(codec), data));
}
private handleLocalVoiceEnded() {
private handleRecorderStop() {
const chandler = this.connection.client;
const ch = chandler.getClient();
if(ch) ch.speaking = false;
if(!chandler.connected)
return false;
if(chandler.isMicrophoneMuted())
return false;
log.info(LogCategory.VOICE, tr("Local voice ended"));
this.localAudioStarted = false;
if(this.connectionType === VoiceEncodeType.NATIVE_ENCODE) {
setTimeout(() => {
/* first send all data, than send the stop signal */
this.sendVoiceStopPacket(this._encoder_codec);
}, 150);
} else {
this.sendVoiceStopPacket(this._encoder_codec);
}
this.voiceBridge?.sendStopSignal(this.encoderCodec);
}
private handleLocalVoiceStarted() {
private handleRecorderStart() {
const chandler = this.connection.client;
if(chandler.isMicrophoneMuted()) {
log.warn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted! Do not send any voice."));
if(this.local_audio_mute)
this.local_audio_mute.gain.value = 0;
log.warn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted!"));
return;
}
if(this.local_audio_mute)
this.local_audio_mute.gain.value = 1;
this.localAudioStarted = true;
log.info(LogCategory.VOICE, tr("Local voice started"));
@ -557,9 +276,9 @@ export class VoiceConnection extends AbstractVoiceConnection {
if(ch) ch.speaking = true;
}
private on_recorder_yield() {
private handleRecorderUnmount() {
log.info(LogCategory.VOICE, "Lost recorder!");
this._audio_source = undefined;
this.currentAudioSource = undefined;
this.acquire_voice_recorder(undefined, true); /* we can ignore the promise because we should finish this directly */
}
@ -572,20 +291,24 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.events.fire("notify_connection_status_changed", { newStatus: state, oldStatus: oldState });
}
connected(): boolean {
return typeof(this.dataChannel) !== "undefined" && this.dataChannel.readyState === "open";
private handleServerConnectionStateChanged(event: ServerConnectionEvents["notify_connection_state_changed"]) {
if(event.newState === ConnectionState.CONNECTED) {
this.startVoiceBridge();
} else {
this.dropVoiceBridge();
}
}
voice_recorder(): RecorderProfile {
return this._audio_source;
return this.currentAudioSource;
}
available_clients(): VoiceClient[] {
return this._audio_clients;
return this.voiceClients;
}
find_client(client_id: number) : VoiceClientController | undefined {
for(const client of this._audio_clients)
for(const client of this.voiceClients)
if(client.client_id === client_id)
return client;
return undefined;
@ -595,13 +318,13 @@ export class VoiceConnection extends AbstractVoiceConnection {
if(!(client instanceof VoiceClientController))
throw "Invalid client type";
this._audio_clients.remove(client);
this.voiceClients.remove(client);
return Promise.resolve();
}
register_client(client_id: number): VoiceClient {
const client = new VoiceClientController(client_id);
this._audio_clients.push(client);
this.voiceClients.push(client);
return client;
}
@ -614,15 +337,14 @@ export class VoiceConnection extends AbstractVoiceConnection {
}
get_encoder_codec(): number {
return this._encoder_codec;
return this.encoderCodec;
}
set_encoder_codec(codec: number) {
this._encoder_codec = codec;
this.encoderCodec = codec;
}
}
/* funny fact that typescript dosn't find this */
declare global {
interface RTCPeerConnection {
@ -630,6 +352,5 @@ declare global {
getLocalStreams(): MediaStream[];
getStreamById(streamId: string): MediaStream | null;
removeStream(stream: MediaStream): void;
createOffer(successCallback?: RTCSessionDescriptionCallback, failureCallback?: RTCPeerConnectionErrorCallback, options?: RTCOfferOptions): Promise<RTCSessionDescription>;
}
}

View file

@ -0,0 +1,106 @@
import {AbstractInput, InputConsumerType, NodeInputConsumer} from "tc-shared/voice/RecorderBase";
import * as aplayer from "tc-backend/web/audio/player";
import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import {WebRTCVoiceBridge} from "./WebRTCVoiceBridge";
export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
static isSupported(): boolean {
const context = window.webkitAudioContext || window.AudioContext;
if (!context)
return false;
if (!context.prototype.createMediaStreamDestination)
return false; /* Required, but not available within edge */
return true;
}
private readonly localAudioDestinationNode: MediaStreamAudioDestinationNode;
private currentInput: AbstractInput;
private voicePacketId: number;
constructor() {
super();
this.voicePacketId = 0;
this.localAudioDestinationNode = aplayer.context().createMediaStreamDestination();
}
protected generateRtpOfferOptions(): RTCOfferOptions {
let options: RTCOfferOptions = {};
options.offerToReceiveAudio = false;
options.offerToReceiveVideo = false;
options.voiceActivityDetection = true;
return options;
}
protected initializeRtpConnection(connection: RTCPeerConnection) {
connection.addStream(this.localAudioDestinationNode.stream);
}
protected handleMainDataChannelMessage(message: MessageEvent) {
super.handleMainDataChannelMessage(message);
let bin = new Uint8Array(message.data);
let clientId = bin[2] << 8 | bin[3];
let packetId = bin[0] << 8 | bin[1];
let codec = bin[4];
this.callback_incoming_voice({
clientId: clientId,
voiceId: packetId,
codec: codec,
payload: new Uint8Array(message.data, 5)
});
}
getInput(): AbstractInput | undefined {
return this.currentInput;
}
async setInput(input: AbstractInput | undefined) {
if (this.currentInput === input)
return;
if (this.currentInput) {
await this.currentInput.set_consumer(undefined);
this.currentInput = undefined;
}
this.currentInput = input;
if (this.currentInput) {
try {
await this.currentInput.set_consumer({
type: InputConsumerType.NODE,
callback_node: node => node.connect(this.localAudioDestinationNode),
callback_disconnect: node => node.disconnect(this.localAudioDestinationNode)
} as NodeInputConsumer);
log.debug(LogCategory.VOICE, tr("Successfully set/updated to the new input for the recorder"));
} catch (e) {
log.warn(LogCategory.VOICE, tr("Failed to set consumer to the new recorder input: %o"), e);
}
}
}
private fillVoicePacketHeader(packet: Uint8Array, codec: number) {
packet[0] = 0; //Flag header
packet[1] = 0; //Flag fragmented
packet[2] = (this.voicePacketId >> 8) & 0xFF; //HIGHT (voiceID)
packet[3] = (this.voicePacketId >> 0) & 0xFF; //LOW (voiceID)
packet[4] = codec; //Codec
}
sendStopSignal(codec: number) {
const packet = new Uint8Array(5);
this.fillVoicePacketHeader(packet, codec);
const channel = this.getMainDataChannel();
if (!channel || channel.readyState !== "open")
return;
channel.send(packet);
}
}

View file

@ -0,0 +1,47 @@
import {AbstractInput} from "tc-shared/voice/RecorderBase";
export type VoiceBridgeConnectResult = {
type: "success"
} | {
type: "canceled"
} | {
type: "failed",
message: string,
allowReconnect: boolean
};
export interface VoicePacket {
voiceId: number;
clientId: number;
codec: number;
payload: Uint8Array;
}
export abstract class VoiceBridge {
protected muted: boolean;
callback_send_control_data: (request: string, payload: any) => void;
callback_incoming_voice: (packet: VoicePacket) => void;
callback_disconnect: () => void;
setMuted(flag: boolean) {
this.muted = flag;
}
isMuted(): boolean {
return this.muted;
}
handleControlData(request: string, payload: any) { }
abstract connect(): Promise<VoiceBridgeConnectResult>;
abstract disconnect();
abstract getInput(): AbstractInput | undefined;
abstract setInput(input: AbstractInput | undefined): Promise<void>;
abstract sendStopSignal(codec: number);
}

View file

@ -0,0 +1,376 @@
import * as aplayer from "tc-backend/web/audio/player";
import {LogCategory, logDebug, logError, logInfo, logTrace, logWarn} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import * as log from "tc-shared/log";
import {VoiceBridge, VoiceBridgeConnectResult} from "./VoiceBridge";
export abstract class WebRTCVoiceBridge extends VoiceBridge {
private readonly muteAudioNode: GainNode;
private connectionState: "unconnected" | "connecting" | "connected";
private rtcConnection: RTCPeerConnection;
private mainDataChannel: RTCDataChannel;
private cachedIceCandidates: RTCIceCandidateInit[];
private callbackRtcAnswer: (answer: any) => void;
private callbackConnectCanceled: (() => void)[] = [];
private callbackRtcConnected: () => void;
private callbackRtcConnectFailed: (error: any) => void;
private callbackMainDatachannelOpened: (() => void)[] = [];
private allowReconnect: boolean;
protected constructor() {
super();
this.connectionState = "unconnected";
const audioContext = aplayer.context();
this.muteAudioNode = audioContext.createGain();
}
connect(): Promise<VoiceBridgeConnectResult> {
this.disconnect(); /* just to ensure */
this.connectionState = "connecting";
this.allowReconnect = true;
return new Promise<VoiceBridgeConnectResult>(resolve => {
let cancelState = { value: false };
const cancelHandler = () => {
cancelState.value = true;
resolve({ type: "canceled" });
}
this.callbackConnectCanceled.push(cancelHandler);
this.doConnect(cancelState).then(() => {
if(cancelState.value) return;
this.callbackConnectCanceled.remove(cancelHandler);
this.connectionState = "connected";
resolve({ type: "success" });
}).catch(error => {
if(cancelState.value) return;
this.callbackConnectCanceled.remove(cancelHandler);
this.connectionState = "unconnected";
this.cleanupRtcResources();
resolve({ type: "failed", message: error, allowReconnect: this.allowReconnect === true });
})
});
}
disconnect() {
switch (this.connectionState) {
case "connecting":
this.abortConnectionAttempt();
break;
case "connected":
this.doDisconnect();
break;
}
}
private async doConnect(canceled: { value: boolean }) {
{
let rtcConfig: RTCConfiguration = {};
rtcConfig.iceServers = [];
rtcConfig.iceServers.push({ urls: 'stun:stun.l.google.com:19302' });
//rtcConfig.iceServers.push({ urls: "stun:stun.teaspeak.de:3478" });
this.rtcConnection = new RTCPeerConnection(rtcConfig);
this.rtcConnection.onicegatheringstatechange = this.handleIceGatheringStateChange.bind(this);
this.rtcConnection.oniceconnectionstatechange = this.handleIceConnectionStateChange.bind(this);
this.rtcConnection.onicecandidate = this.handleIceCandidate.bind(this);
this.rtcConnection.onicecandidateerror = this.handleIceCandidateError.bind(this);
this.rtcConnection.onconnectionstatechange = this.handleRtcConnectionStateChange.bind(this);
this.initializeRtpConnection(this.rtcConnection);
}
(window as any).dropVoice = () => this.callback_disconnect();
{
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.mainDataChannel = this.rtcConnection.createDataChannel('main', dataChannelConfig);
this.mainDataChannel.onmessage = this.handleMainDataChannelMessage.bind(this);
this.mainDataChannel.onopen = this.handleMainDataChannelOpen.bind(this);
this.mainDataChannel.binaryType = "arraybuffer";
}
let offer: RTCSessionDescriptionInit;
try {
offer = await this.rtcConnection.createOffer(this.generateRtpOfferOptions());
if(canceled.value) return;
} catch (error) {
logError(LogCategory.VOICE, tr("Failed to generate RTC offer: %o"), error);
throw tr("failed to generate local offer");
}
try {
await this.rtcConnection.setLocalDescription(offer);
if(canceled.value) return;
} catch (error) {
logError(LogCategory.VOICE, tr("Failed to apply local description: %o"), error);
throw tr("failed to apply local description");
}
/* cache all ICE candidates until we've received out answer */
this.cachedIceCandidates = [];
/* exchange the offer and answer */
let answer;
{
this.callback_send_control_data("create", {
msg: {
type: offer.type,
sdp: offer.sdp
}
});
answer = await new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
if(canceled.value) {
resolve();
return;
}
this.callbackRtcAnswer = undefined;
reject(tr("failed to received a WebRTC answer (timeout)"));
}, 5000);
this.callbackRtcAnswer = answer => {
this.callbackRtcAnswer = undefined;
clearTimeout(timeout);
resolve(answer);
};
});
if(canceled.value) return;
}
if(!('msg' in answer)) {
throw tr("Missing msg in servers answer");
}
try {
await this.rtcConnection.setRemoteDescription(new RTCSessionDescription(answer.msg));
if(canceled.value) return;
} catch (error) {
const kParseErrorPrefix = "Failed to execute 'setRemoteDescription' on 'RTCPeerConnection': ";
if(error instanceof DOMException && error.message.startsWith(kParseErrorPrefix))
throw error.message.substring(kParseErrorPrefix.length);
logError(LogCategory.VOICE, tr("Failed to apply remotes description: %o"), error);
throw tr("failed to apply remotes description");
}
while(this.cachedIceCandidates.length > 0)
this.registerRemoteIceCandidate(this.cachedIceCandidates.pop_front());
await new Promise((resolve, reject) => {
if(this.rtcConnection.connectionState === "connected") {
resolve();
return;
}
const timeout = setTimeout(() => {
reject(tr("failed to establish a connection"));
}, 20 * 1000);
this.callbackRtcConnected = () => {
clearTimeout(timeout);
resolve();
};
this.callbackRtcConnectFailed = error => {
clearTimeout(timeout);
reject(error);
};
});
if(canceled.value) return;
logDebug(LogCategory.WEBRTC, tr("Successfully connected to server. Awaiting main data channel to open."));
try {
await this.awaitMainChannelOpened(10 * 1000);
} catch {
throw tr("failed to open the main data channel");
}
logInfo(LogCategory.WEBRTC, tr("Successfully initialized session with server."));
}
private doDisconnect() {
this.cleanupRtcResources();
this.connectionState = "unconnected";
if(this.callback_disconnect)
this.callback_disconnect();
}
private abortConnectionAttempt() {
while(this.callbackConnectCanceled.length > 0)
this.callbackConnectCanceled.pop()();
this.cleanupRtcResources();
this.connectionState = "unconnected";
}
private cleanupRtcResources() {
if(this.mainDataChannel) {
this.mainDataChannel.onclose = undefined;
this.mainDataChannel.close();
this.mainDataChannel = undefined;
}
if(this.rtcConnection) {
this.rtcConnection.onicegatheringstatechange = undefined;
this.rtcConnection.oniceconnectionstatechange = undefined;
this.rtcConnection.onicecandidate = undefined;
this.rtcConnection.onicecandidateerror = undefined;
this.rtcConnection.onconnectionstatechange = undefined;
this.rtcConnection.close();
this.rtcConnection = undefined;
}
this.cachedIceCandidates = undefined;
}
protected async awaitMainChannelOpened(timeout: number) {
if(typeof this.mainDataChannel === "undefined")
throw tr("missing main data channel");
if(this.mainDataChannel.readyState === "open")
return;
await new Promise((resolve, reject) => {
const id = setTimeout(reject, timeout);
this.callbackMainDatachannelOpened.push(() => {
clearTimeout(id);
resolve();
});
})
}
private registerRemoteIceCandidate(candidate: RTCIceCandidateInit) {
if(!this.rtcConnection) {
logDebug(LogCategory.WEBRTC, tr("Tried to register a remote ICE candidate without a RTC connection. Dropping candidate."));
return;
}
if(candidate.candidate === "") {
logDebug(LogCategory.WEBRTC, tr("Remote send candidate finish for channel %d."), candidate.sdpMLineIndex);
this.rtcConnection.addIceCandidate(candidate).catch(error => {
logWarn(LogCategory.WEBRTC, tr("Failed to add remote ICE end candidate to local rtc connection: %o"), error);
});
} else {
const pcandidate = new RTCIceCandidate(candidate);
if(pcandidate.protocol !== "tcp") return; /* UDP does not work currently */
logTrace(LogCategory.WEBRTC, tr("Adding remote ICE candidate %s for media line %d: %s"), pcandidate.foundation, candidate.sdpMLineIndex, candidate.candidate);
this.rtcConnection.addIceCandidate(pcandidate).catch(error => {
logWarn(LogCategory.WEBRTC, tr("Failed to add remote ICE candidate %s: %o"), pcandidate.foundation, error);
})
}
}
private handleRtcConnectionStateChange() {
log.debug(LogCategory.WEBRTC, tr("Connection state changed to %s"), this.rtcConnection.connectionState);
switch (this.rtcConnection.connectionState) {
case "connected":
if(this.callbackRtcConnected)
this.callbackRtcConnected();
break;
case "failed":
if(this.callbackRtcConnectFailed)
this.callbackRtcConnectFailed(tr("connect attempt failed"));
else if(this.callback_disconnect)
this.callback_disconnect();
break;
case "disconnected":
case "closed":
if(this.callback_disconnect)
this.callback_disconnect();
break;
}
}
private handleIceGatheringStateChange() {
log.trace(LogCategory.WEBRTC, tr("ICE gathering state changed to %s"), this.rtcConnection.iceGatheringState);
}
private handleIceConnectionStateChange() {
log.trace(LogCategory.WEBRTC, tr("ICE connection state changed to %s"), this.rtcConnection.iceConnectionState);
}
private handleIceCandidate(event: RTCPeerConnectionIceEvent) {
if(event.candidate && event.candidate.protocol !== "tcp")
return;
if(event.candidate) {
log.debug(LogCategory.WEBRTC, tr("Gathered local ice candidate for stream %d: %s"), event.candidate.sdpMLineIndex, event.candidate.candidate);
this.callback_send_control_data("ice", { msg: event.candidate.toJSON() });
} else {
log.debug(LogCategory.WEBRTC, tr("Local ICE candidate gathering finish."));
this.callback_send_control_data("ice_finish", {});
}
}
private handleIceCandidateError(event: RTCPeerConnectionIceErrorEvent) {
if(this.rtcConnection.iceGatheringState === "gathering") {
log.warn(LogCategory.WEBRTC, tr("Received error while gathering the ice candidates: %d/%s for %s (url: %s)"),
event.errorCode, event.errorText, event.hostCandidate, event.url);
} else {
log.trace(LogCategory.WEBRTC, tr("Ice candidate %s (%s) errored: %d/%s"),
event.url, event.hostCandidate, event.errorCode, event.errorText);
}
}
protected handleMainDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Main data channel is open now"));
while(this.callbackMainDatachannelOpened.length > 0)
this.callbackMainDatachannelOpened.pop()();
}
protected handleMainDataChannelMessage(message: MessageEvent) { }
handleControlData(request: string, payload: any) {
super.handleControlData(request, payload);
if(request === "answer") {
if(typeof this.callbackRtcAnswer === "function") {
this.callbackRtcAnswer(payload);
} else {
logWarn(LogCategory.WEBRTC, tr("Received answer, but we're not expecting one. Dropping it."));
}
return;
} else if(request === "ice" || request === "ice_finish") {
if(this.cachedIceCandidates) {
this.cachedIceCandidates.push(payload["msg"]);
} else {
this.registerRemoteIceCandidate(payload["msg"]);
}
} else if(request === "status") {
if(request["state"] === "failed") {
if(this.callbackRtcConnectFailed) {
this.allowReconnect = request["allow_reconnect"];
this.callbackRtcConnectFailed(payload["reason"]);
}
return;
}
}
}
public getMainDataChannel() : RTCDataChannel {
return this.mainDataChannel;
}
protected abstract initializeRtpConnection(connection: RTCPeerConnection);
protected abstract generateRtpOfferOptions() : RTCOfferOptions;
}