Improved the audio API especially for the web client

canary
WolverinDEV 2020-08-19 19:33:57 +02:00
parent 0d7a34c31a
commit 400b4f4293
20 changed files with 906 additions and 800 deletions

View File

@ -5,10 +5,10 @@ import {GroupManager} from "tc-shared/permission/GroupManager";
import {ServerSettings, Settings, settings, StaticSettings} from "tc-shared/settings";
import {Sound, SoundManager} from "tc-shared/sound/Sounds";
import {LocalClientEntry} from "tc-shared/ui/client";
import {ConnectionProfile, default_profile, find_profile} from "tc-shared/profiles/ConnectionProfile";
import {ConnectionProfile} from "tc-shared/profiles/ConnectionProfile";
import {ServerAddress} from "tc-shared/ui/server";
import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {LogCategory, logError} from "tc-shared/log";
import {createErrorModal, createInfoModal, createInputModal, Modal} from "tc-shared/ui/elements/Modal";
import {hashPassword} from "tc-shared/utils/helpers";
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
@ -16,8 +16,7 @@ import * as htmltags from "./ui/htmltags";
import {ChannelEntry} from "tc-shared/ui/channel";
import {InputStartResult, InputState} from "tc-shared/voice/RecorderBase";
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
import * as bipc from "./ipc/BrowserIPC";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {default_recorder, RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {Frame} from "tc-shared/ui/frames/chat_frame";
import {Hostbanner} from "tc-shared/ui/frames/hostbanner";
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
@ -38,7 +37,12 @@ import {PluginCmdRegistry} from "tc-shared/connection/PluginCmdHandler";
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
import {getRecorderBackend} from "tc-shared/audio/recorder";
export enum InputHardwareState {
MISSING,
START_FAILED,
VALID
}
export enum DisconnectReason {
HANDLER_DESTROYED,
@ -102,7 +106,6 @@ export enum ViewReasonId {
}
export interface LocalClientStatus {
input_hardware: boolean;
input_muted: boolean;
output_muted: boolean;
@ -129,7 +132,6 @@ export interface ConnectParameters {
auto_reconnect_attempt?: boolean;
}
declare const native_client;
export class ConnectionHandler {
readonly handlerId: string;
@ -164,8 +166,8 @@ export class ConnectionHandler {
private pluginCmdRegistry: PluginCmdRegistry;
private client_status: LocalClientStatus = {
input_hardware: false,
input_muted: false,
output_muted: false,
away: false,
channel_subscribe_all: true,
@ -177,7 +179,8 @@ export class ConnectionHandler {
channel_codec_decoding_supported: undefined
};
invoke_resized_on_activate: boolean = false;
private inputHardwareState: InputHardwareState = InputHardwareState.MISSING;
log: ServerEventLog;
constructor() {
@ -190,7 +193,10 @@ export class ConnectionHandler {
this.serverConnection = getServerConnectionFactory().create(this);
this.serverConnection.events.on("notify_connection_state_changed", event => this.on_connection_state_changed(event.oldState, event.newState));
this.serverConnection.getVoiceConnection().events.on("notify_recorder_changed", () => this.update_voice_status());
this.serverConnection.getVoiceConnection().events.on("notify_recorder_changed", () => {
this.setInputHardwareState(this.getVoiceRecorder() ? InputHardwareState.VALID : InputHardwareState.MISSING);
this.update_voice_status();
});
this.serverConnection.getVoiceConnection().events.on("notify_connection_status_changed", () => this.update_voice_status());
this.channelTree = new ChannelTree(this);
@ -252,11 +258,10 @@ export class ConnectionHandler {
}
async startConnection(addr: string, profile: ConnectionProfile, user_action: boolean, parameters: ConnectParameters) {
this.tab_set_name(tr("Connecting"));
this.cancel_reconnect(false);
this._reconnect_attempt = parameters.auto_reconnect_attempt || false;
if(this.serverConnection)
this.handleDisconnect(DisconnectReason.REQUESTED);
this.handleDisconnect(DisconnectReason.REQUESTED);
this.tab_set_name(tr("Connecting"));
let server_address: ServerAddress = {
host: "",
@ -345,7 +350,7 @@ export class ConnectionHandler {
this.cancel_reconnect(true);
if(!this.connected) return;
this.handleDisconnect(DisconnectReason.REQUESTED); //TODO message?
this.handleDisconnect(DisconnectReason.REQUESTED);
try {
await this.serverConnection.disconnect();
} catch (error) {
@ -370,42 +375,44 @@ export class ConnectionHandler {
@EventHandler<ConnectionEvents>("notify_connection_state_changed")
private handleConnectionConnected(event: ConnectionEvents["notify_connection_state_changed"]) {
if(event.new_state !== ConnectionState.CONNECTED) return;
private handleConnectionStateChanged(event: ConnectionEvents["notify_connection_state_changed"]) {
this.connection_state = event.new_state;
if(event.new_state === ConnectionState.CONNECTED) {
log.info(LogCategory.CLIENT, tr("Client connected"));
this.log.log(EventType.CONNECTION_CONNECTED, {
serverAddress: {
server_port: this.channelTree.server.remote_address.port,
server_hostname: this.channelTree.server.remote_address.host
},
serverName: this.channelTree.server.properties.virtualserver_name,
own_client: this.getClient().log_data()
});
this.sound.play(Sound.CONNECTION_CONNECTED);
log.info(LogCategory.CLIENT, tr("Client connected"));
this.log.log(EventType.CONNECTION_CONNECTED, {
serverAddress: {
server_port: this.channelTree.server.remote_address.port,
server_hostname: this.channelTree.server.remote_address.host
},
serverName: this.channelTree.server.properties.virtualserver_name,
own_client: this.getClient().log_data()
});
this.sound.play(Sound.CONNECTION_CONNECTED);
this.permissions.requestPermissionList();
if(this.groups.serverGroups.length == 0)
this.groups.requestGroups();
this.permissions.requestPermissionList();
if(this.groups.serverGroups.length == 0)
this.groups.requestGroups();
this.settings.setServer(this.channelTree.server.properties.virtualserver_unique_identifier);
this.settings.setServer(this.channelTree.server.properties.virtualserver_unique_identifier);
/* apply the server settings */
if(this.client_status.channel_subscribe_all)
this.channelTree.subscribe_all_channels();
else
this.channelTree.unsubscribe_all_channels();
this.channelTree.toggle_server_queries(this.client_status.queries_visible);
/* apply the server settings */
if(this.client_status.channel_subscribe_all)
this.channelTree.subscribe_all_channels();
else
this.channelTree.unsubscribe_all_channels();
this.channelTree.toggle_server_queries(this.client_status.queries_visible);
this.sync_status_with_server();
this.channelTree.server.updateProperties();
/*
No need to update the voice stuff because as soon we see ourself we're doing it
this.update_voice_status();
if(control_bar.current_connection_handler() === this)
control_bar.apply_server_voice_state();
*/
this.sync_status_with_server();
this.channelTree.server.updateProperties();
/*
No need to update the voice stuff because as soon we see ourself we're doing it
this.update_voice_status();
if(control_bar.current_connection_handler() === this)
control_bar.apply_server_voice_state();
*/
} else {
this.setInputHardwareState(this.getVoiceRecorder() ? InputHardwareState.VALID : InputHardwareState.MISSING);
}
}
get connected() : boolean {
@ -440,52 +447,7 @@ export class ConnectionHandler {
if(pathname.endsWith(".php"))
pathname = pathname.substring(0, pathname.lastIndexOf("/"));
/* certaccept is currently not working! */
if(bipc.supported() && false) {
tag.attr('href', "#");
let popup: Window;
tag.on('click', event => {
const features = {
status: "no",
location: "no",
toolbar: "no",
menubar: "no",
width: 600,
height: 400
};
if(popup)
popup.close();
properties["certificate_callback"] = bipc.getInstance().register_certificate_accept_callback(() => {
log.info(LogCategory.GENERAL, tr("Received notification that the certificate has been accepted! Attempting reconnect!"));
if(this._certificate_modal)
this._certificate_modal.close();
popup.close(); /* no need, but nicer */
const profile = find_profile(properties.connect_profile) || default_profile();
const cprops = this.reconnect_properties(profile);
this.startConnection(properties.connect_address, profile, true, cprops);
});
const url = build_url(document.location.origin + pathname + "/popup/certaccept/", "", properties);
const features_string = Object.keys(features).map(e => e + "=" + features[e]).join(",");
popup = window.open(url, "TeaWeb certificate accept", features_string);
try {
popup.focus();
} catch(e) {
log.warn(LogCategory.GENERAL, tr("Certificate accept popup has been blocked. Trying a blank page and replacing href"));
window.open(url, "TeaWeb certificate accept"); /* trying without features */
tag.attr("target", "_blank");
tag.attr("href", url);
tag.unbind('click');
}
});
} else {
tag.attr('href', build_url(document.location.origin + pathname, document.location.search, properties));
}
tag.attr('href', build_url(document.location.origin + pathname, document.location.search, properties));
return tag;
}
@ -527,7 +489,7 @@ export class ConnectionHandler {
else
log.error(LogCategory.CLIENT, tr("Could not connect to remote host!"), data);
if(native_client || !dns.resolve_address_ipv4) {
if(__build.target === "client" || !dns.resolve_address_ipv4) {
createErrorModal(
tr("Could not connect"),
tr("Could not connect to remote host (Connection refused)")
@ -727,43 +689,47 @@ export class ConnectionHandler {
});
}
private _last_record_error_popup: number;
private _last_record_error_popup: number = 0;
update_voice_status(targetChannel?: ChannelEntry) {
//TODO: Simplify this
if(!this._local_client) return; /* we've been destroyed */
if(!this._local_client) {
/* we've been destroyed */
return;
}
targetChannel = targetChannel || this.getClient().currentChannel();
if(typeof targetChannel === "undefined")
targetChannel = this.getClient().currentChannel();
const vconnection = this.serverConnection.getVoiceConnection();
const basic_voice_support = vconnection.getConnectionState() === VoiceConnectionStatus.Connected && targetChannel;
const support_record = basic_voice_support && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
const support_playback = basic_voice_support && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
const hasInputDevice = getRecorderBackend().getDeviceList().getPermissionState() === "granted" && !!vconnection.voice_recorder();
const codecEncodeSupported = !targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec);
const codecDecodeSupported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
const property_update = {
client_input_muted: this.client_status.input_muted,
client_output_muted: this.client_status.output_muted
};
if(support_record && basic_voice_support)
/* update the encoding codec */
if(codecEncodeSupported && targetChannel) {
vconnection.set_encoder_codec(targetChannel.properties.channel_codec);
}
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
property_update["client_input_hardware"] = false;
property_update["client_output_hardware"] = false;
this.client_status.input_hardware = hasInputDevice;
/* no icons are shown so no update at all */
} else {
const recording_supported = hasInputDevice && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
const playback_supported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
const recording_supported =
this.getInputHardwareState() === InputHardwareState.VALID &&
(!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec)) &&
vconnection.getConnectionState() === VoiceConnectionStatus.Connected;
const playback_supported = this.hasOutputHardware() && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
property_update["client_input_hardware"] = recording_supported;
property_update["client_output_hardware"] = playback_supported;
this.client_status.input_hardware = recording_supported;
}
/* update icons */
{
const client_properties = this.getClient().properties;
for(const key of Object.keys(property_update)) {
if(client_properties[key] === property_update[key])
@ -773,7 +739,7 @@ export class ConnectionHandler {
if(Object.keys(property_update).length > 0) {
this.serverConnection.send_command("clientupdate", property_update).catch(error => {
log.warn(LogCategory.GENERAL, tr("Failed to update client audio hardware properties. Error: %o"), error);
this.log.log(EventType.ERROR_CUSTOM, {message: tr("Failed to update audio hardware properties.")});
this.log.log(EventType.ERROR_CUSTOM, { message: tr("Failed to update audio hardware properties.") });
/* Update these properties anyways (for case the server fails to handle the command) */
const updates = [];
@ -784,50 +750,39 @@ export class ConnectionHandler {
}
}
if(targetChannel && basic_voice_support) {
const encoding_supported = vconnection && vconnection.encoding_supported(targetChannel.properties.channel_codec);
const decoding_supported = vconnection && vconnection.decoding_supported(targetChannel.properties.channel_codec);
if(this.client_status.channel_codec_decoding_supported !== decoding_supported || this.client_status.channel_codec_encoding_supported !== encoding_supported) {
this.client_status.channel_codec_decoding_supported = decoding_supported;
this.client_status.channel_codec_encoding_supported = encoding_supported;
if(targetChannel) {
if(this.client_status.channel_codec_decoding_supported !== codecDecodeSupported || this.client_status.channel_codec_encoding_supported !== codecEncodeSupported) {
this.client_status.channel_codec_decoding_supported = codecDecodeSupported;
this.client_status.channel_codec_encoding_supported = codecEncodeSupported;
let message;
if(!encoding_supported && !decoding_supported)
if(!codecEncodeSupported && !codecDecodeSupported) {
message = tr("This channel has an unsupported codec.<br>You cant speak or listen to anybody within this channel!");
else if(!encoding_supported)
} else if(!codecEncodeSupported) {
message = tr("This channel has an unsupported codec.<br>You cant speak within this channel!");
else if(!decoding_supported)
message = tr("This channel has an unsupported codec.<br>You listen to anybody within this channel!"); /* implies speaking does not work as well */
if(message)
} else if(!codecDecodeSupported) {
message = tr("This channel has an unsupported codec.<br>You cant listen to anybody within this channel!");
}
if(message) {
createErrorModal(tr("Channel codec unsupported"), message).open();
}
}
}
this.client_status = this.client_status || {} as any;
this.client_status.sound_record_supported = support_record;
this.client_status.sound_playback_supported = support_playback;
this.client_status.sound_record_supported = codecEncodeSupported;
this.client_status.sound_playback_supported = codecDecodeSupported;
if(vconnection && vconnection.voice_recorder()) {
const active = !this.client_status.input_muted && !this.client_status.output_muted;
{
const enableRecording = !this.client_status.input_muted && !this.client_status.output_muted;
/* No need to start the microphone when we're not even connected */
const input = vconnection.voice_recorder().input;
const input = vconnection.voice_recorder()?.input;
if(input) {
if(active && this.serverConnection.connected()) {
if(input.current_state() === InputState.PAUSED) {
input.start().then(result => {
if(result != InputStartResult.EOK)
throw result;
}).catch(error => {
log.warn(LogCategory.VOICE, tr("Failed to start microphone input (%s)."), error);
if(Date.now() - (this._last_record_error_popup || 0) > 10 * 1000) {
this._last_record_error_popup = Date.now();
createErrorModal(tr("Failed to start recording"), formatMessage(tr("Microphone start failed.{:br:}Error: {}"), error)).open();
}
});
}
if(enableRecording && this.serverConnection.connected()) {
if(this.getInputHardwareState() !== InputHardwareState.START_FAILED)
this.startVoiceRecorder(Date.now() - this._last_record_error_popup > 10 * 1000);
} else {
input.stop();
}
@ -838,6 +793,7 @@ export class ConnectionHandler {
this.event_registry.fire("notify_state_updated", {
state: "microphone"
});
this.event_registry.fire("notify_state_updated", {
state: "speaker"
});
@ -851,7 +807,7 @@ export class ConnectionHandler {
client_output_muted: this.client_status.output_muted,
client_away: typeof(this.client_status.away) === "string" || this.client_status.away,
client_away_message: typeof(this.client_status.away) === "string" ? this.client_status.away : "",
client_input_hardware: this.client_status.sound_record_supported && this.client_status.input_hardware,
client_input_hardware: this.client_status.sound_record_supported && this.getInputHardwareState() === InputHardwareState.VALID,
client_output_hardware: this.client_status.sound_playback_supported
}).catch(error => {
log.warn(LogCategory.GENERAL, tr("Failed to sync handler state with server. Error: %o"), error);
@ -859,15 +815,67 @@ export class ConnectionHandler {
});
}
resize_elements() {
this.invoke_resized_on_activate = false;
/* can be called as much as you want, does nothing if nothing changed */
async acquireInputHardware() {
/* if we're having multiple recorders, try to get the right one */
let recorder: RecorderProfile = default_recorder;
try {
await this.serverConnection.getVoiceConnection().acquire_voice_recorder(recorder);
} catch (error) {
logError(LogCategory.AUDIO, tr("Failed to acquire recorder: %o"), error);
createErrorModal(tr("Failed to acquire recorder"), tr("Failed to acquire recorder.\nLookup the console for more details.")).open();
return;
}
if(this.connection_state === ConnectionState.CONNECTED) {
await this.startVoiceRecorder(true);
} else {
this.setInputHardwareState(InputHardwareState.VALID);
}
}
acquire_recorder(voice_recoder: RecorderProfile, update_control_bar: boolean) {
const vconnection = this.serverConnection.getVoiceConnection();
vconnection.acquire_voice_recorder(voice_recoder).catch(error => {
log.warn(LogCategory.VOICE, tr("Failed to acquire recorder (%o)"), error);
});
async startVoiceRecorder(notifyError: boolean) {
const input = this.getVoiceRecorder()?.input;
if(!input) return;
if(input.currentState() === InputState.PAUSED && this.connection_state === ConnectionState.CONNECTED) {
try {
const result = await input.start();
if(result !== InputStartResult.EOK) {
throw result;
}
this.setInputHardwareState(InputHardwareState.VALID);
this.update_voice_status();
} catch (error) {
this.setInputHardwareState(InputHardwareState.START_FAILED);
let errorMessage;
if(error === InputStartResult.ENOTSUPPORTED) {
errorMessage = tr("Your browser does not support voice recording");
} else if(error === InputStartResult.EBUSY) {
errorMessage = tr("The input device is busy");
} else if(error === InputStartResult.EDEVICEUNKNOWN) {
errorMessage = tr("Invalid input device");
} else if(error === InputStartResult.ENOTALLOWED) {
errorMessage = tr("No permissions");
} else if(error instanceof Error) {
errorMessage = error.message;
} else if(typeof error === "string") {
errorMessage = error;
} else {
errorMessage = tr("lookup the console");
}
log.warn(LogCategory.VOICE, tr("Failed to start microphone input (%s)."), error);
if(notifyError) {
this._last_record_error_popup = Date.now();
createErrorModal(tr("Failed to start recording"), tra("Microphone start failed.\nError: {}", errorMessage)).open();
}
}
} else {
this.setInputHardwareState(InputHardwareState.VALID);
}
}
getVoiceRecorder() : RecorderProfile | undefined { return this.serverConnection.getVoiceConnection().voice_recorder(); }
@ -1017,15 +1025,9 @@ export class ConnectionHandler {
this.update_voice_status();
}
toggleMicrophone() { this.setMicrophoneMuted(!this.isMicrophoneMuted()); }
isMicrophoneMuted() { return this.client_status.input_muted; }
/*
* Returns whatever the client is able to talk or not. Reasons for returning true could be:
* - Channel codec isn't supported
* - No recorder has been acquired
* - Voice bridge hasn't been set upped yet
*/
isMicrophoneDisabled() { return !this.client_status.input_hardware; }
isMicrophoneMuted() { return this.client_status.input_muted; }
isMicrophoneDisabled() { return this.inputHardwareState !== InputHardwareState.VALID; }
setSpeakerMuted(muted: boolean) {
if(this.client_status.output_muted === muted) return;
@ -1101,8 +1103,16 @@ export class ConnectionHandler {
return this.client_status.queries_visible;
}
hasInputHardware() : boolean { return this.client_status.input_hardware; }
hasOutputHardware() : boolean { return this.client_status.output_muted; }
getInputHardwareState() : InputHardwareState { return this.inputHardwareState; }
private setInputHardwareState(state: InputHardwareState) {
if(this.inputHardwareState === state)
return;
this.inputHardwareState = state;
this.event_registry.fire("notify_state_updated", { state: "microphone" });
}
hasOutputHardware() : boolean { return true; }
getPluginCmdRegistry() : PluginCmdRegistry { return this.pluginCmdRegistry; }
}

View File

@ -106,7 +106,7 @@ export abstract class AbstractDeviceList implements DeviceList {
return new Promise<void>(resolve => {
const callback = (event: DeviceListEvents["notify_state_changed"]) => {
if(event.newState !== "uninitialized")
if(event.newState === "uninitialized")
return;
this.events.off("notify_state_changed", callback);

View File

@ -576,7 +576,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
let self = client instanceof LocalClientEntry;
let channel_to = tree.findChannel(parseInt(json["ctid"]));
let channel_from = tree.findChannel(parseInt(json["cfid"]));
let channelFrom = tree.findChannel(parseInt(json["cfid"]));
if(!client) {
log.error(LogCategory.NETWORKING, tr("Unknown client move (Client)!"));
@ -589,17 +589,17 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
}
if(!self) {
if(!channel_from) {
if(!channelFrom) {
log.error(LogCategory.NETWORKING, tr("Unknown client move (Channel from)!"));
channel_from = client.currentChannel();
} else if(channel_from != client.currentChannel()) {
channelFrom = client.currentChannel();
} else if(channelFrom != client.currentChannel()) {
log.error(LogCategory.NETWORKING,
tr("Client move from invalid source channel! Local client registered in channel %d but server send %d."),
client.currentChannel().channelId, channel_from.channelId
client.currentChannel().channelId, channelFrom.channelId
);
}
} else {
channel_from = client.currentChannel();
channelFrom = client.currentChannel();
}
tree.moveClient(client, channel_to);
@ -607,7 +607,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
if(self) {
this.connection_handler.update_voice_status(channel_to);
for(const entry of client.channelTree.clientsByChannel(channel_from)) {
for(const entry of client.channelTree.clientsByChannel(channelFrom)) {
if(entry !== client && entry.get_audio_handle()) {
entry.get_audio_handle().abort_replay();
entry.speaking = false;
@ -616,16 +616,18 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
const side_bar = this.connection_handler.side_bar;
side_bar.info_frame().update_channel_talk();
} else {
client.speaking = false;
}
const own_channel = this.connection.client.getClient().currentChannel();
const event = self ? EventType.CLIENT_VIEW_MOVE_OWN : (channel_from == own_channel || channel_to == own_channel ? EventType.CLIENT_VIEW_MOVE_OWN_CHANNEL : EventType.CLIENT_VIEW_MOVE);
const event = self ? EventType.CLIENT_VIEW_MOVE_OWN : (channelFrom == own_channel || channel_to == own_channel ? EventType.CLIENT_VIEW_MOVE_OWN_CHANNEL : EventType.CLIENT_VIEW_MOVE);
this.connection_handler.log.log(event, {
channel_from: channel_from ? {
channel_id: channel_from.channelId,
channel_name: channel_from.channelName()
channel_from: channelFrom ? {
channel_id: channelFrom.channelId,
channel_name: channelFrom.channelName()
} : undefined,
channel_from_own: channel_from == own_channel,
channel_from_own: channelFrom == own_channel,
channel_to: channel_to ? {
channel_id: channel_to.channelId,
@ -650,20 +652,20 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
this.connection_handler.sound.play(Sound.USER_MOVED_SELF);
else if(own_channel == channel_to)
this.connection_handler.sound.play(Sound.USER_ENTERED_MOVED);
else if(own_channel == channel_from)
else if(own_channel == channelFrom)
this.connection_handler.sound.play(Sound.USER_LEFT_MOVED);
} else if(json["reasonid"] == ViewReasonId.VREASON_USER_ACTION) {
if(self) {} //If we do an action we wait for the error response
else if(own_channel == channel_to)
this.connection_handler.sound.play(Sound.USER_ENTERED);
else if(own_channel == channel_from)
else if(own_channel == channelFrom)
this.connection_handler.sound.play(Sound.USER_LEFT);
} else if(json["reasonid"] == ViewReasonId.VREASON_CHANNEL_KICK) {
if(self) {
this.connection_handler.sound.play(Sound.CHANNEL_KICKED);
} else if(own_channel == channel_to)
this.connection_handler.sound.play(Sound.USER_ENTERED_KICKED);
else if(own_channel == channel_from)
else if(own_channel == channelFrom)
this.connection_handler.sound.play(Sound.USER_LEFT_KICKED_CHANNEL);
} else {
console.warn(tr("Unknown reason id %o"), json["reasonid"]);

View File

@ -100,8 +100,8 @@ export class HandshakeHandler {
client_server_password: this.parameters.password ? this.parameters.password.password : undefined,
client_browser_engine: navigator.product,
client_input_hardware: this.connection.client.hasInputHardware(),
client_output_hardware: false,
client_input_hardware: this.connection.client.isMicrophoneDisabled(),
client_output_hardware: this.connection.client.hasOutputHardware(),
client_input_muted: this.connection.client.isMicrophoneMuted(),
client_output_muted: this.connection.client.isSpeakerMuted(),
};

View File

@ -336,27 +336,10 @@ function main() {
top_menu.initialize();
const initial_handler = server_connections.spawn_server_connection();
initial_handler.acquire_recorder(default_recorder, false);
initial_handler.acquireInputHardware().then(() => {});
cmanager.server_connections.set_active_connection(initial_handler);
/** Setup the XF forum identity **/
fidentity.update_forum();
let _resize_timeout;
$(window).on('resize', event => {
if(event.target !== window)
return;
if(_resize_timeout)
clearTimeout(_resize_timeout);
_resize_timeout = setTimeout(() => {
for(const connection of server_connections.all_connections())
connection.invoke_resized_on_activate = true;
const active_connection = server_connections.active_connection();
if(active_connection)
active_connection.resize_elements();
$(".window-resize-listener").trigger('resize');
}, 1000);
});
keycontrol.initialize();
stats.initialize({

View File

@ -34,7 +34,7 @@ export const ModalFunctions = {
case "string":
if(type == ElementType.HEADER)
return $.spawn("div").addClass("modal-title").text(val);
return $("<div>" + val + "</div>");
return $("<div>" + val.replace(/\n/g, "<br />") + "</div>");
case "object": return val as JQuery;
case "undefined":
return undefined;

View File

@ -113,9 +113,6 @@ export class ConnectionManager {
this._container_channel_tree.append(handler.channelTree.tag_tree());
this._container_chat.append(handler.side_bar.html_tag());
this._container_log_server.append(handler.log.getHTMLTag());
if(handler.invoke_resized_on_activate)
handler.resize_elements();
}
const old_handler = this.active_handler;
this.active_handler = handler;

View File

@ -3,7 +3,12 @@ import {Button} from "./button";
import {DropdownEntry} from "tc-shared/ui/frames/control-bar/dropdown";
import {Translatable} from "tc-shared/ui/react-elements/i18n";
import {ReactComponentBase} from "tc-shared/ui/react-elements/ReactComponentBase";
import {ConnectionEvents, ConnectionHandler, ConnectionStateUpdateType} from "tc-shared/ConnectionHandler";
import {
ConnectionEvents,
ConnectionHandler,
ConnectionState as CConnectionState,
ConnectionStateUpdateType
} from "tc-shared/ConnectionHandler";
import {Event, EventHandler, ReactEventHandler, Registry} from "tc-shared/events";
import {ConnectionManagerEvents, server_connections} from "tc-shared/ui/frames/connection_handlers";
import {Settings, settings} from "tc-shared/settings";
@ -21,6 +26,7 @@ import {createInputModal} from "tc-shared/ui/elements/Modal";
import {default_recorder} from "tc-shared/voice/RecorderProfile";
import {global_client_actions} from "tc-shared/events/GlobalEvents";
import {icon_cache_loader} from "tc-shared/file/Icons";
import {InputState} from "tc-shared/voice/RecorderBase";
const cssStyle = require("./index.scss");
const cssButtonStyle = require("./button.scss");
@ -704,8 +710,7 @@ function initialize(event_registry: Registry<InternalControlBarEvents>) {
if(current_connection_handler) {
current_connection_handler.setMicrophoneMuted(!state);
if(!current_connection_handler.getVoiceRecorder())
current_connection_handler.acquire_recorder(default_recorder, true); /* acquire_recorder already updates the voice status */
current_connection_handler.acquireInputHardware().then(() => {});
}
});

View File

@ -12,8 +12,6 @@ import {LogCategory} from "tc-shared/log";
import * as profiles from "tc-shared/profiles/ConnectionProfile";
import {RepositoryTranslation, TranslationRepository} from "tc-shared/i18n/localize";
import {Registry} from "tc-shared/events";
import {key_description} from "tc-shared/PPTListener";
import {default_recorder} from "tc-shared/voice/RecorderProfile";
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
import * as i18n from "tc-shared/i18n/localize";
import * as i18nc from "tc-shared/i18n/country";
@ -22,12 +20,9 @@ import * as events from "tc-shared/events";
import * as sound from "tc-shared/sound/Sounds";
import * as forum from "tc-shared/profiles/identities/teaspeak-forum";
import {formatMessage, set_icon_size} from "tc-shared/ui/frames/chat";
import {spawnKeySelect} from "tc-shared/ui/modal/ModalKeySelect";
import {spawnTeamSpeakIdentityImport, spawnTeamSpeakIdentityImprove} from "tc-shared/ui/modal/ModalIdentity";
import {Device} from "tc-shared/audio/player";
import {LevelMeter} from "tc-shared/voice/RecorderBase";
import * as aplayer from "tc-backend/audio/player";
import * as arecorder from "tc-backend/audio/recorder";
import {KeyMapSettings} from "tc-shared/ui/modal/settings/Keymap";
import * as React from "react";
import * as ReactDOM from "react-dom";

View File

@ -542,13 +542,17 @@ export class ChannelTree {
client["_channel"] = targetChannel;
targetChannel?.registerClient(client);
if(oldChannel)
if(oldChannel) {
this.client.side_bar.info_frame().update_channel_client_count(oldChannel);
if(targetChannel)
}
if(targetChannel) {
this.client.side_bar.info_frame().update_channel_client_count(targetChannel);
if(oldChannel && targetChannel)
}
if(oldChannel && targetChannel) {
client.events.fire("notify_client_moved", { oldChannel: oldChannel, newChannel: targetChannel });
client.speaking = false;
}
} finally {
flush_batched_updates(BatchUpdateType.CHANNEL_TREE);
}

View File

@ -7,41 +7,42 @@ export enum FilterType {
export interface FilterBase {
readonly priority: number;
set_enabled(flag: boolean) : void;
is_enabled() : boolean;
setEnabled(flag: boolean) : void;
isEnabled() : boolean;
}
export interface MarginedFilter {
get_margin_frames() : number;
set_margin_frames(value: number);
getMarginFrames() : number;
setMarginFrames(value: number);
}
export interface ThresholdFilter extends FilterBase, MarginedFilter {
readonly type: FilterType.THRESHOLD;
get_threshold() : number;
set_threshold(value: number) : Promise<void>;
getThreshold() : number;
setThreshold(value: number);
get_attack_smooth() : number;
get_release_smooth() : number;
getAttackSmooth() : number;
getReleaseSmooth() : number;
set_attack_smooth(value: number);
set_release_smooth(value: number);
setAttackSmooth(value: number);
setReleaseSmooth(value: number);
callback_level?: (value: number) => any;
registerLevelCallback(callback: (value: number) => void);
removeLevelCallback(callback: (value: number) => void);
}
export interface VoiceLevelFilter extends FilterBase, MarginedFilter {
type: FilterType.VOICE_LEVEL;
get_level() : number;
getLevel() : number;
}
export interface StateFilter extends FilterBase {
type: FilterType.STATE;
set_state(state: boolean) : Promise<void>;
is_active() : boolean; /* if true the the filter allows data to pass */
setState(state: boolean);
isActive() : boolean; /* if true the the filter allows data to pass */
}
export type FilterTypeClass<T extends FilterType> =

View File

@ -7,32 +7,43 @@ export enum InputConsumerType {
NODE,
NATIVE
}
export interface InputConsumer {
type: InputConsumerType;
}
export interface CallbackInputConsumer extends InputConsumer {
export interface CallbackInputConsumer {
type: InputConsumerType.CALLBACK;
callback_audio?: (buffer: AudioBuffer) => any;
callback_buffer?: (buffer: Float32Array, samples: number, channels: number) => any;
}
export interface NodeInputConsumer extends InputConsumer {
export interface NodeInputConsumer {
type: InputConsumerType.NODE;
callback_node: (source_node: AudioNode) => any;
callback_disconnect: (source_node: AudioNode) => any;
}
export interface NativeInputConsumer {
type: InputConsumerType.NATIVE;
}
export type InputConsumer = CallbackInputConsumer | NodeInputConsumer | NativeInputConsumer;
export enum InputState {
/* Input recording has been paused */
PAUSED,
/*
* Recording has been requested, and is currently initializing.
* This state may persist, when the audio context hasn't been initialized yet
*/
INITIALIZING,
RECORDING,
DRY
/* we're currently recording the input */
RECORDING
}
export enum InputStartResult {
EOK = "eok",
EUNKNOWN = "eunknown",
EDEVICEUNKNOWN = "edeviceunknown",
EBUSY = "ebusy",
ENOTALLOWED = "enotallowed",
ENOTSUPPORTED = "enotsupported"
@ -51,17 +62,28 @@ export interface AbstractInput {
start() : Promise<InputStartResult>;
stop() : Promise<void>;
currentDevice() : IDevice | undefined;
setDevice(device: IDevice | undefined) : Promise<void>;
/*
* Returns true if the input is currently filtered.
* If the current state isn't recording, than it will return true.
*/
isFiltered() : boolean;
currentDeviceId() : string | undefined;
/*
* This method should not throw!
* If the target device is unknown than it should return EDEVICEUNKNOWN on start.
* After changing the device, the input state falls to InputState.PAUSED.
*/
setDeviceId(device: string | undefined) : Promise<void>;
currentConsumer() : InputConsumer | undefined;
setConsumer(consumer: InputConsumer) : Promise<void>;
supportsFilter(type: FilterType) : boolean;
createFilter<T extends FilterType>(type: T, priority: number) : FilterTypeClass<T>;
removeFilter(filter: Filter);
resetFilter();
/* resetFilter(); */
getVolume() : number;
setVolume(volume: number);

View File

@ -7,7 +7,7 @@ import {ConnectionHandler} from "tc-shared/ConnectionHandler";
import * as aplayer from "tc-backend/audio/player";
import * as ppt from "tc-backend/ppt";
import {getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
import {FilterType, StateFilter} from "tc-shared/voice/Filter";
import {FilterType, StateFilter, ThresholdFilter} from "tc-shared/voice/Filter";
export type VadType = "threshold" | "push_to_talk" | "active";
export interface RecorderProfileConfig {
@ -38,6 +38,7 @@ export let default_recorder: RecorderProfile; /* needs initialize */
export function set_default_recorder(recorder: RecorderProfile) {
default_recorder = recorder;
}
export class RecorderProfile {
readonly name;
readonly volatile; /* not saving profile */
@ -47,7 +48,8 @@ export class RecorderProfile {
current_handler: ConnectionHandler;
callback_input_change: (oldInput: AbstractInput | undefined, newInput: AbstractInput | undefined) => Promise<void>;
/* attention: this callback will only be called when the audio input hasn't been initialized! */
callback_input_initialized: (input: AbstractInput) => void;
callback_start: () => any;
callback_stop: () => any;
@ -58,7 +60,11 @@ export class RecorderProfile {
private pptHookRegistered: boolean;
private registeredFilter = {
"ppt-gate": undefined as StateFilter
"ppt-gate": undefined as StateFilter,
"threshold": undefined as ThresholdFilter,
/* disable voice transmission by default, e.g. when reinitializing filters etc. */
"default-disabled": undefined as StateFilter
}
constructor(name: string, volatile?: boolean) {
@ -71,7 +77,7 @@ export class RecorderProfile {
clearTimeout(this.pptTimeout);
this.pptTimeout = setTimeout(() => {
this.registeredFilter["ppt-gate"]?.set_state(true);
this.registeredFilter["ppt-gate"]?.setState(true);
}, Math.max(this.config.vad_push_to_talk.delay, 0));
},
@ -79,7 +85,7 @@ export class RecorderProfile {
if(this.pptTimeout)
clearTimeout(this.pptTimeout);
this.registeredFilter["ppt-gate"]?.set_state(false);
this.registeredFilter["ppt-gate"]?.setState(false);
},
cancel: false
@ -120,15 +126,16 @@ export class RecorderProfile {
}
aplayer.on_ready(async () => {
await getRecorderBackend().getDeviceList().awaitHealthy();
console.error("AWAITING DEVICE LIST");
await getRecorderBackend().getDeviceList().awaitInitialized();
console.error("AWAITING DEVICE LIST DONE");
this.initialize_input();
await this.load();
await this.initializeInput();
await this.reinitializeFilter();
});
}
private initialize_input() {
private async initializeInput() {
this.input = getRecorderBackend().createInput();
this.input.events.on("notify_voice_start", () => {
@ -143,28 +150,24 @@ export class RecorderProfile {
this.callback_stop();
});
//TODO: Await etc?
this.callback_input_change && this.callback_input_change(undefined, this.input);
}
this.registeredFilter["default-disabled"] = this.input.createFilter(FilterType.STATE, 20);
await this.registeredFilter["default-disabled"].setState(true); /* filter */
this.registeredFilter["default-disabled"].setEnabled(true);
private async load() {
this.input.setVolume(this.config.volume / 100);
this.registeredFilter["ppt-gate"] = this.input.createFilter(FilterType.STATE, 100);
this.registeredFilter["ppt-gate"].setEnabled(false);
{
const allDevices = getRecorderBackend().getDeviceList().getDevices();
const defaultDeviceId = getRecorderBackend().getDeviceList().getDefaultDeviceId();
console.error("Devices: %o | Searching: %s", allDevices, this.config.device_id);
this.registeredFilter["threshold"] = this.input.createFilter(FilterType.THRESHOLD, 100);
this.registeredFilter["threshold"].setEnabled(false);
const devices = allDevices.filter(e => e.deviceId === defaultDeviceId || e.deviceId === this.config.device_id);
const device = devices.find(e => e.deviceId === this.config.device_id) || devices[0];
log.info(LogCategory.VOICE, tr("Loaded record profile device %s | %o (%o)"), this.config.device_id, device, allDevices);
try {
await this.input.setDevice(device);
} catch(error) {
log.error(LogCategory.VOICE, tr("Failed to set input device (%o)"), error);
}
if(this.callback_input_initialized) {
this.callback_input_initialized(this.input);
}
/* apply initial config values */
this.input.setVolume(this.config.volume / 100);
await this.input.setDeviceId(this.config.device_id);
}
private save() {
@ -172,13 +175,33 @@ export class RecorderProfile {
settings.changeGlobal(Settings.FN_PROFILE_RECORD(this.name), this.config);
}
private reinitializePPTHook() {
if(this.config.vad_type !== "push_to_talk")
return;
if(this.pptHookRegistered) {
ppt.unregister_key_hook(this.pptHook);
this.pptHookRegistered = false;
}
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
this.pptHook[key] = this.config.vad_push_to_talk[key];
ppt.register_key_hook(this.pptHook);
this.pptHookRegistered = true;
this.registeredFilter["ppt-gate"]?.setState(true);
}
private async reinitializeFilter() {
if(!this.input) return;
/* TODO: Really required? If still same input we can just use the registered filters */
/* don't let any audio pass while we initialize the other filters */
this.registeredFilter["default-disabled"].setEnabled(true);
this.input.resetFilter();
delete this.registeredFilter["ppt-gate"];
/* disable all filter */
this.registeredFilter["threshold"].setEnabled(false);
this.registeredFilter["ppt-gate"].setEnabled(false);
if(this.pptHookRegistered) {
ppt.unregister_key_hook(this.pptHook);
@ -186,23 +209,29 @@ export class RecorderProfile {
}
if(this.config.vad_type === "threshold") {
const filter = this.input.createFilter(FilterType.THRESHOLD, 100);
await filter.set_threshold(this.config.vad_threshold.threshold);
const filter = this.registeredFilter["threshold"];
filter.setEnabled(true);
filter.setThreshold(this.config.vad_threshold.threshold);
filter.set_margin_frames(10); /* 500ms */
filter.set_attack_smooth(.25);
filter.set_release_smooth(.9);
filter.setMarginFrames(10); /* 500ms */
filter.setAttackSmooth(.25);
filter.setReleaseSmooth(.9);
} else if(this.config.vad_type === "push_to_talk") {
const filter = this.input.createFilter(FilterType.STATE, 100);
await filter.set_state(true);
this.registeredFilter["ppt-gate"] = filter;
const filter = this.registeredFilter["ppt-gate"];
filter.setEnabled(true);
filter.setState(true); /* by default set filtered */
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
this.pptHook[key] = this.config.vad_push_to_talk[key];
ppt.register_key_hook(this.pptHook);
this.pptHookRegistered = true;
} else if(this.config.vad_type === "active") {}
} else if(this.config.vad_type === "active") {
/* we don't have to initialize any filters */
}
this.registeredFilter["default-disabled"].setEnabled(false);
}
async unmount() : Promise<void> {
@ -218,7 +247,7 @@ export class RecorderProfile {
}
}
this.callback_input_change = undefined;
this.callback_input_initialized = undefined;
this.callback_start = undefined;
this.callback_stop = undefined;
this.callback_unmount = undefined;
@ -229,6 +258,7 @@ export class RecorderProfile {
set_vad_type(type: VadType) : boolean {
if(this.config.vad_type === type)
return true;
if(["push_to_talk", "threshold", "active"].findIndex(e => e === type) == -1)
return false;
@ -244,7 +274,7 @@ export class RecorderProfile {
return;
this.config.vad_threshold.threshold = value;
this.reinitializeFilter();
this.registeredFilter["threshold"]?.setThreshold(this.config.vad_threshold.threshold);
this.save();
}
@ -253,7 +283,7 @@ export class RecorderProfile {
for(const _key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
this.config.vad_push_to_talk[_key] = key[_key];
this.reinitializeFilter();
this.reinitializePPTHook();
this.save();
}
@ -263,7 +293,6 @@ export class RecorderProfile {
return;
this.config.vad_push_to_talk.delay = value;
this.reinitializeFilter();
this.save();
}
@ -280,7 +309,7 @@ export class RecorderProfile {
return;
this.config.volume = volume;
this.input && this.input.setVolume(volume / 100);
this.input?.setVolume(volume / 100);
this.save();
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,19 +1,10 @@
import {
AbstractDeviceList,
AudioRecorderBacked,
DeviceList,
DeviceListEvents,
DeviceListState,
IDevice,
PermissionState
} from "tc-shared/audio/recorder";
import {AudioRecorderBacked, DeviceList, IDevice,} from "tc-shared/audio/recorder";
import {Registry} from "tc-shared/events";
import * as rbase from "tc-shared/voice/RecorderBase";
import {
AbstractInput,
CallbackInputConsumer,
InputConsumer,
InputConsumerType, InputEvents,
InputConsumerType,
InputEvents,
InputStartResult,
InputState,
LevelMeter,
@ -23,8 +14,8 @@ import * as log from "tc-shared/log";
import {LogCategory, logWarn} from "tc-shared/log";
import * as aplayer from "./player";
import {JAbstractFilter, JStateFilter, JThresholdFilter} from "./RecorderFilter";
import * as loader from "tc-loader";
import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
import {inputDeviceList} from "tc-backend/web/audio/RecorderDeviceList";
declare global {
interface MediaStream {
@ -36,24 +27,10 @@ export interface WebIDevice extends IDevice {
groupId: string;
}
function getUserMediaFunctionPromise() : (constraints: MediaStreamConstraints) => Promise<MediaStream> {
if('mediaDevices' in navigator && 'getUserMedia' in navigator.mediaDevices)
return constraints => navigator.mediaDevices.getUserMedia(constraints);
const _callbacked_function = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
if(!_callbacked_function)
return undefined;
return constraints => new Promise<MediaStream>((resolve, reject) => _callbacked_function(constraints, resolve, reject));
}
async function requestMicrophoneMediaStream(constraints: MediaTrackConstraints, updateDeviceList: boolean) : Promise<InputStartResult | MediaStream> {
const mediaFunction = getUserMediaFunctionPromise();
if(!mediaFunction) return InputStartResult.ENOTSUPPORTED;
try {
log.info(LogCategory.AUDIO, tr("Requesting a microphone stream for device %s in group %s"), constraints.deviceId, constraints.groupId);
const stream = mediaFunction({ audio: constraints });
const stream = await navigator.mediaDevices.getUserMedia({ audio: constraints });
if(updateDeviceList && inputDeviceList.getStatus() === "no-permissions") {
inputDeviceList.refresh().then(() => {}); /* added the then body to avoid a inspection warning... */
@ -76,155 +53,37 @@ async function requestMicrophoneMediaStream(constraints: MediaTrackConstraints,
}
}
async function requestMicrophonePermissions() : Promise<PermissionState> {
const begin = Date.now();
/* request permission for devices only one per time! */
let currentMediaStreamRequest: Promise<MediaStream | InputStartResult>;
async function requestMediaStream(deviceId: string, groupId: string) : Promise<MediaStream | InputStartResult> {
/* wait for the current media stream requests to finish */
while(currentMediaStreamRequest) {
try {
await currentMediaStreamRequest;
} catch(error) { }
}
const audioConstrains: MediaTrackConstraints = {};
if(window.detectedBrowser?.name === "firefox") {
/*
* Firefox only allows to open one mic as well deciding whats the input device it.
* It does not respect the deviceId nor the groupId
*/
} else {
audioConstrains.deviceId = deviceId;
audioConstrains.groupId = groupId;
}
audioConstrains.echoCancellation = true;
audioConstrains.autoGainControl = true;
audioConstrains.noiseSuppression = true;
const promise = (currentMediaStreamRequest = requestMicrophoneMediaStream(audioConstrains, true));
try {
await getUserMediaFunctionPromise()({ audio: { deviceId: "default" }, video: false });
return "granted";
} catch (error) {
const end = Date.now();
const isSystem = (end - begin) < 250;
log.debug(LogCategory.AUDIO, tr("Microphone device request took %d milliseconds. System answered: %s"), end - begin, isSystem);
return "denied";
}
}
let inputDeviceList: WebInputDeviceList;
class WebInputDeviceList extends AbstractDeviceList {
private devices: WebIDevice[];
private deviceListQueryPromise: Promise<void>;
constructor() {
super();
this.devices = [];
}
getDefaultDeviceId(): string {
return "default";
}
getDevices(): IDevice[] {
return this.devices;
}
getEvents(): Registry<DeviceListEvents> {
return this.events;
}
getStatus(): DeviceListState {
return this.listState;
}
isRefreshAvailable(): boolean {
return true;
}
refresh(askPermissions?: boolean): Promise<void> {
return this.queryDevices(askPermissions === true);
}
async requestPermissions(): Promise<PermissionState> {
if(this.permissionState !== "unknown")
return this.permissionState;
let result = await requestMicrophonePermissions();
if(result === "granted" && this.listState === "no-permissions") {
/* if called within doQueryDevices, queryDevices will just return the promise */
this.queryDevices(false).then(() => {});
}
this.setPermissionState(result);
return result;
}
private queryDevices(askPermissions: boolean) : Promise<void> {
if(this.deviceListQueryPromise)
return this.deviceListQueryPromise;
this.deviceListQueryPromise = this.doQueryDevices(askPermissions).catch(error => {
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
if(this.listState !== "healthy")
this.listState = "error";
}).then(() => {
this.deviceListQueryPromise = undefined;
});
return this.deviceListQueryPromise || Promise.resolve();
}
private async doQueryDevices(askPermissions: boolean) {
let devices = await navigator.mediaDevices.enumerateDevices();
let hasPermissions = devices.findIndex(e => e.label !== "") !== -1;
if(!hasPermissions && askPermissions) {
this.setState("no-permissions");
let skipPermissionAsk = false;
if('permissions' in navigator && 'query' in navigator.permissions) {
try {
const result = await navigator.permissions.query({ name: "microphone" });
if(result.state === "denied") {
this.setPermissionState("denied");
skipPermissionAsk = true;
}
} catch (error) {
logWarn(LogCategory.GENERAL, tr("Failed to query for microphone permissions: %s"), error);
}
}
if(skipPermissionAsk) {
/* request permissions */
hasPermissions = await this.requestPermissions() === "granted";
if(hasPermissions) {
devices = await navigator.mediaDevices.enumerateDevices();
}
}
}
if(hasPermissions) {
this.setPermissionState("granted");
}
if(window.detectedBrowser?.name === "firefox") {
devices = [{
label: tr("Default Firefox device"),
groupId: "default",
deviceId: "default",
kind: "audioinput",
toJSON: undefined
}];
}
const inputDevices = devices.filter(e => e.kind === "audioinput");
const oldDeviceList = this.devices;
this.devices = [];
let devicesAdded = 0;
for(const device of inputDevices) {
const oldIndex = oldDeviceList.findIndex(e => e.deviceId === device.deviceId);
if(oldIndex === -1) {
devicesAdded++;
} else {
oldDeviceList.splice(oldIndex, 1);
}
this.devices.push({
deviceId: device.deviceId,
driver: "WebAudio",
groupId: device.groupId,
name: device.label
});
}
this.events.fire("notify_list_updated", { addedDeviceCount: devicesAdded, removedDeviceCount: oldDeviceList.length });
if(hasPermissions) {
this.setState("healthy");
} else {
this.setState("no-permissions");
}
return await currentMediaStreamRequest;
} finally {
if(currentMediaStreamRequest === promise)
currentMediaStreamRequest = undefined;
}
}
@ -234,7 +93,7 @@ export class WebAudioRecorder implements AudioRecorderBacked {
}
async createLevelMeter(device: IDevice): Promise<LevelMeter> {
const meter = new JavascriptLevelmeter(device as any);
const meter = new JavascriptLevelMeter(device as any);
await meter.initialize();
return meter;
}
@ -247,245 +106,203 @@ export class WebAudioRecorder implements AudioRecorderBacked {
class JavascriptInput implements AbstractInput {
public readonly events: Registry<InputEvents>;
private _state: InputState = InputState.PAUSED;
private _current_device: WebIDevice | undefined;
private _current_consumer: InputConsumer;
private state: InputState = InputState.PAUSED;
private deviceId: string | undefined;
private consumer: InputConsumer;
private _current_stream: MediaStream;
private _current_audio_stream: MediaStreamAudioSourceNode;
private currentStream: MediaStream;
private currentAudioStream: MediaStreamAudioSourceNode;
private _audio_context: AudioContext;
private _source_node: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
private _consumer_callback_node: ScriptProcessorNode;
private readonly _consumer_audio_callback;
private _volume_node: GainNode;
private _mute_node: GainNode;
private audioContext: AudioContext;
private sourceNode: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
private audioNodeCallbackConsumer: ScriptProcessorNode;
private readonly audioScriptProcessorCallback;
private audioNodeVolume: GainNode;
/* The node is connected to the audio context. Used for the script processor so it has a sink */
private audioNodeMute: GainNode;
private registeredFilters: (Filter & JAbstractFilter<AudioNode>)[] = [];
private _filter_active: boolean = false;
private inputFiltered: boolean = false;
private _volume: number = 1;
private startPromise: Promise<InputStartResult>;
callback_begin: () => any = undefined;
callback_end: () => any = undefined;
private volumeModifier: number = 1;
constructor() {
this.events = new Registry<InputEvents>();
aplayer.on_ready(() => this._audio_initialized());
this._consumer_audio_callback = this._audio_callback.bind(this);
aplayer.on_ready(() => this.handleAudioInitialized());
this.audioScriptProcessorCallback = this.handleAudio.bind(this);
}
private _audio_initialized() {
this._audio_context = aplayer.context();
if(!this._audio_context)
return;
private handleAudioInitialized() {
this.audioContext = aplayer.context();
this.audioNodeMute = this.audioContext.createGain();
this.audioNodeMute.gain.value = 0;
this.audioNodeMute.connect(this.audioContext.destination);
this._mute_node = this._audio_context.createGain();
this._mute_node.gain.value = 0;
this._mute_node.connect(this._audio_context.destination);
this.audioNodeCallbackConsumer = this.audioContext.createScriptProcessor(1024 * 4);
this.audioNodeCallbackConsumer.connect(this.audioNodeMute);
this._consumer_callback_node = this._audio_context.createScriptProcessor(1024 * 4);
this._consumer_callback_node.connect(this._mute_node);
this._volume_node = this._audio_context.createGain();
this._volume_node.gain.value = this._volume;
this.audioNodeVolume = this.audioContext.createGain();
this.audioNodeVolume.gain.value = this.volumeModifier;
this.initializeFilters();
if(this._state === InputState.INITIALIZING)
this.start();
if(this.state === InputState.INITIALIZING) {
this.start().catch(error => {
logWarn(LogCategory.AUDIO, tr("Failed to automatically start audio recording: %s"), error);
});
}
}
private initializeFilters() {
for(const filter of this.registeredFilters) {
if(filter.is_enabled())
filter.finalize();
}
this.registeredFilters.forEach(e => e.finalize());
this.registeredFilters.sort((a, b) => a.priority - b.priority);
if(this._audio_context && this._volume_node) {
const active_filter = this.registeredFilters.filter(e => e.is_enabled());
let stream: AudioNode = this._volume_node;
for(const f of active_filter) {
f.initialize(this._audio_context, stream);
stream = f.audio_node;
if(this.audioContext && this.audioNodeVolume) {
const activeFilters = this.registeredFilters.filter(e => e.isEnabled());
let chain = "output <- ";
let currentSource: AudioNode = this.audioNodeVolume;
for(const f of activeFilters) {
f.initialize(this.audioContext, currentSource);
f.setPaused(false);
currentSource = f.audioNode;
chain += FilterType[f.type] + " <- ";
}
this._switch_source_node(stream);
chain += "input";
console.error("Filter chain: %s", chain);
this.switchSourceNode(currentSource);
}
}
private _audio_callback(event: AudioProcessingEvent) {
if(!this._current_consumer || this._current_consumer.type !== InputConsumerType.CALLBACK)
private handleAudio(event: AudioProcessingEvent) {
if(this.consumer?.type !== InputConsumerType.CALLBACK) {
return;
}
const callback = this._current_consumer as CallbackInputConsumer;
if(callback.callback_audio)
callback.callback_audio(event.inputBuffer);
if(this.consumer.callback_audio) {
this.consumer.callback_audio(event.inputBuffer);
}
if(callback.callback_buffer) {
if(this.consumer.callback_buffer) {
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
}
}
current_state() : InputState { return this._state; };
private _start_promise: Promise<InputStartResult>;
async start() : Promise<InputStartResult> {
if(this._start_promise) {
while(this.startPromise) {
try {
await this._start_promise;
if(this._state != InputState.PAUSED)
return;
} catch(error) {
log.debug(LogCategory.AUDIO, tr("JavascriptInput:start() Start promise await resulted in an error: %o"), error);
}
await this.startPromise;
} catch {}
}
return await (this._start_promise = this._start());
if(this.state != InputState.PAUSED)
return;
return await (this.startPromise = this.doStart());
}
/* request permission for devices only one per time! */
private static _running_request: Promise<MediaStream | InputStartResult>;
static async request_media_stream(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
while(this._running_request) {
try {
await this._running_request;
} catch(error) { }
}
const audio_constrains: MediaTrackConstraints = {};
if(window.detectedBrowser?.name === "firefox") {
/*
* Firefox only allows to open one mic as well deciding whats the input device it.
* It does not respect the deviceId nor the groupId
*/
} else {
audio_constrains.deviceId = device_id;
audio_constrains.groupId = group_id;
}
audio_constrains.echoCancellation = true;
audio_constrains.autoGainControl = true;
audio_constrains.noiseSuppression = true;
const promise = (this._running_request = requestMicrophoneMediaStream(audio_constrains, true));
private async doStart() : Promise<InputStartResult> {
try {
return await this._running_request;
} finally {
if(this._running_request === promise)
this._running_request = undefined;
}
}
private async _start() : Promise<InputStartResult> {
try {
if(this._state != InputState.PAUSED)
if(this.state != InputState.PAUSED)
throw tr("recorder already started");
this._state = InputState.INITIALIZING;
if(!this._current_device)
this.state = InputState.INITIALIZING;
if(!this.deviceId) {
throw tr("invalid device");
if(!this._audio_context) {
debugger;
throw tr("missing audio context");
}
const _result = await JavascriptInput.request_media_stream(this._current_device.deviceId, this._current_device.groupId);
if(!(_result instanceof MediaStream)) {
this._state = InputState.PAUSED;
return _result;
if(!this.audioContext) {
/* Awaiting the audio context to be initialized */
return;
}
this._current_stream = _result;
for(const f of this.registeredFilters) {
if(f.is_enabled()) {
f.set_pause(false);
const requestResult = await requestMediaStream(this.deviceId, undefined);
if(!(requestResult instanceof MediaStream)) {
this.state = InputState.PAUSED;
return requestResult;
}
this.currentStream = requestResult;
for(const filter of this.registeredFilters) {
if(filter.isEnabled()) {
filter.setPaused(false);
}
}
this._consumer_callback_node.addEventListener('audioprocess', this._consumer_audio_callback);
/* TODO: Only add if we're really having a callback consumer */
this.audioNodeCallbackConsumer.addEventListener('audioprocess', this.audioScriptProcessorCallback);
this.currentAudioStream = this.audioContext.createMediaStreamSource(this.currentStream);
this.currentAudioStream.connect(this.audioNodeVolume);
this.state = InputState.RECORDING;
this.recalculateFilterStatus(true);
this._current_audio_stream = this._audio_context.createMediaStreamSource(this._current_stream);
this._current_audio_stream.connect(this._volume_node);
this._state = InputState.RECORDING;
return InputStartResult.EOK;
} catch(error) {
if(this._state == InputState.INITIALIZING) {
this._state = InputState.PAUSED;
if(this.state == InputState.INITIALIZING) {
this.state = InputState.PAUSED;
}
throw error;
} finally {
this._start_promise = undefined;
this.startPromise = undefined;
}
}
async stop() {
/* await all starts */
try {
if(this._start_promise)
await this._start_promise;
} catch(error) {}
this._state = InputState.PAUSED;
if(this._current_audio_stream) {
this._current_audio_stream.disconnect();
/* await the start */
if(this.startPromise) {
try {
await this.startPromise;
} catch {}
}
if(this._current_stream) {
if(this._current_stream.stop) {
this._current_stream.stop();
this.state = InputState.PAUSED;
if(this.currentAudioStream) {
this.currentAudioStream.disconnect();
}
if(this.currentStream) {
if(this.currentStream.stop) {
this.currentStream.stop();
} else {
this._current_stream.getTracks().forEach(value => {
this.currentStream.getTracks().forEach(value => {
value.stop();
});
}
}
this._current_stream = undefined;
this._current_audio_stream = undefined;
this.currentStream = undefined;
this.currentAudioStream = undefined;
for(const f of this.registeredFilters) {
if(f.is_enabled()) {
f.set_pause(true);
if(f.isEnabled()) {
f.setPaused(true);
}
}
if(this._consumer_callback_node) {
this._consumer_callback_node.removeEventListener('audioprocess', this._consumer_audio_callback);
if(this.audioNodeCallbackConsumer) {
this.audioNodeCallbackConsumer.removeEventListener('audioprocess', this.audioScriptProcessorCallback);
}
return undefined;
}
current_device(): IDevice | undefined {
return this._current_device;
}
async set_device(device: IDevice | undefined) {
if(this._current_device === device)
async setDeviceId(deviceId: string | undefined) {
if(this.deviceId === deviceId)
return;
const savedState = this._state;
try {
await this.stop();
} catch(error) {
log.warn(LogCategory.AUDIO, tr("Failed to stop previous record session (%o)"), error);
}
this._current_device = device as any;
if(!device) {
this._state = savedState === InputState.PAUSED ? InputState.PAUSED : InputState.DRY;
return;
}
if(savedState !== InputState.PAUSED) {
try {
await this.start()
} catch(error) {
log.warn(LogCategory.AUDIO, tr("Failed to start new recording stream (%o)"), error);
throw "failed to start record";
}
}
return;
this.deviceId = deviceId;
}
@ -507,10 +324,12 @@ class JavascriptInput implements AbstractInput {
throw tr("unknown filter type");
}
filter.callback_active_change = () => this._recalculate_filter_status();
filter.callback_active_change = () => this.recalculateFilterStatus(false);
filter.callback_enabled_change = () => this.initializeFilters();
this.registeredFilters.push(filter);
this.initializeFilters();
this._recalculate_filter_status();
this.recalculateFilterStatus(false);
return filter as any;
}
@ -532,7 +351,7 @@ class JavascriptInput implements AbstractInput {
this.registeredFilters = [];
this.initializeFilters();
this._recalculate_filter_status();
this.recalculateFilterStatus(false);
}
removeFilter(filterInstance: Filter) {
@ -544,85 +363,104 @@ class JavascriptInput implements AbstractInput {
filter.enabled = false;
this.initializeFilters();
this._recalculate_filter_status();
this.recalculateFilterStatus(false);
}
private _recalculate_filter_status() {
let filtered = this.registeredFilters.filter(e => e.is_enabled()).filter(e => (e as JAbstractFilter<AudioNode>).active).length > 0;
if(filtered === this._filter_active)
private recalculateFilterStatus(forceUpdate: boolean) {
let filtered = this.registeredFilters.filter(e => e.isEnabled()).filter(e => e.active).length > 0;
if(filtered === this.inputFiltered && !forceUpdate)
return;
this._filter_active = filtered;
this.inputFiltered = filtered;
if(filtered) {
if(this.callback_end)
this.callback_end();
this.events.fire("notify_voice_end");
} else {
if(this.callback_begin)
this.callback_begin();
this.events.fire("notify_voice_start");
}
}
current_consumer(): InputConsumer | undefined {
return this._current_consumer;
isRecording(): boolean {
return !this.inputFiltered;
}
async set_consumer(consumer: InputConsumer) {
if(this._current_consumer) {
if(this._current_consumer.type == InputConsumerType.NODE) {
if(this._source_node)
(this._current_consumer as NodeInputConsumer).callback_disconnect(this._source_node)
} else if(this._current_consumer.type === InputConsumerType.CALLBACK) {
if(this._source_node)
this._source_node.disconnect(this._consumer_callback_node);
async setConsumer(consumer: InputConsumer) {
if(this.consumer) {
if(this.consumer.type == InputConsumerType.NODE) {
if(this.sourceNode)
(this.consumer as NodeInputConsumer).callback_disconnect(this.sourceNode)
} else if(this.consumer.type === InputConsumerType.CALLBACK) {
if(this.sourceNode)
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
}
}
if(consumer) {
if(consumer.type == InputConsumerType.CALLBACK) {
if(this._source_node)
this._source_node.connect(this._consumer_callback_node);
if(this.sourceNode)
this.sourceNode.connect(this.audioNodeCallbackConsumer);
} else if(consumer.type == InputConsumerType.NODE) {
if(this._source_node)
(consumer as NodeInputConsumer).callback_node(this._source_node);
if(this.sourceNode)
(consumer as NodeInputConsumer).callback_node(this.sourceNode);
} else {
throw "native callback consumers are not supported!";
}
}
this._current_consumer = consumer;
this.consumer = consumer;
}
private _switch_source_node(new_node: AudioNode) {
if(this._current_consumer) {
if(this._current_consumer.type == InputConsumerType.NODE) {
const node_consumer = this._current_consumer as NodeInputConsumer;
if(this._source_node)
node_consumer.callback_disconnect(this._source_node);
if(new_node)
node_consumer.callback_node(new_node);
} else if(this._current_consumer.type == InputConsumerType.CALLBACK) {
this._source_node.disconnect(this._consumer_callback_node);
if(new_node)
new_node.connect(this._consumer_callback_node);
private switchSourceNode(newNode: AudioNode) {
if(this.consumer) {
if(this.consumer.type == InputConsumerType.NODE) {
const node_consumer = this.consumer as NodeInputConsumer;
if(this.sourceNode) {
node_consumer.callback_disconnect(this.sourceNode);
}
if(newNode) {
node_consumer.callback_node(newNode);
}
} else if(this.consumer.type == InputConsumerType.CALLBACK) {
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
if(newNode) {
newNode.connect(this.audioNodeCallbackConsumer);
}
}
}
this._source_node = new_node;
this.sourceNode = newNode;
}
get_volume(): number {
return this._volume;
currentConsumer(): InputConsumer | undefined {
return this.consumer;
}
set_volume(volume: number) {
if(volume === this._volume)
currentDeviceId(): string | undefined {
return this.deviceId;
}
currentState(): InputState {
return this.state;
}
getVolume(): number {
return this.volumeModifier;
}
setVolume(volume: number) {
if(volume === this.volumeModifier)
return;
this._volume = volume;
this._volume_node.gain.value = volume;
this.volumeModifier = volume;
this.audioNodeVolume.gain.value = volume;
}
isFiltered(): boolean {
return this.state === InputState.RECORDING ? this.inputFiltered : true;
}
}
class JavascriptLevelmeter implements LevelMeter {
private static _instances: JavascriptLevelmeter[] = [];
private static _update_task: number;
class JavascriptLevelMeter implements LevelMeter {
private static meterInstances: JavascriptLevelMeter[] = [];
private static meterUpdateTask: number;
readonly _device: WebIDevice;
@ -671,7 +509,7 @@ class JavascriptLevelmeter implements LevelMeter {
this._analyse_buffer = new Uint8Array(this._analyser_node.fftSize);
/* starting stream */
const _result = await JavascriptInput.request_media_stream(this._device.deviceId, this._device.groupId);
const _result = await requestMediaStream(this._device.deviceId, this._device.groupId);
if(!(_result instanceof MediaStream)){
if(_result === InputStartResult.ENOTALLOWED)
throw tr("No permissions");
@ -690,18 +528,18 @@ class JavascriptLevelmeter implements LevelMeter {
this._analyser_node.connect(this._gain_node);
this._gain_node.connect(this._context.destination);
JavascriptLevelmeter._instances.push(this);
if(JavascriptLevelmeter._instances.length == 1) {
clearInterval(JavascriptLevelmeter._update_task);
JavascriptLevelmeter._update_task = setInterval(() => JavascriptLevelmeter._analyse_all(), JThresholdFilter.update_task_interval) as any;
JavascriptLevelMeter.meterInstances.push(this);
if(JavascriptLevelMeter.meterInstances.length == 1) {
clearInterval(JavascriptLevelMeter.meterUpdateTask);
JavascriptLevelMeter.meterUpdateTask = setInterval(() => JavascriptLevelMeter._analyse_all(), JThresholdFilter.update_task_interval) as any;
}
}
destroy() {
JavascriptLevelmeter._instances.remove(this);
if(JavascriptLevelmeter._instances.length == 0) {
clearInterval(JavascriptLevelmeter._update_task);
JavascriptLevelmeter._update_task = 0;
JavascriptLevelMeter.meterInstances.remove(this);
if(JavascriptLevelMeter.meterInstances.length == 0) {
clearInterval(JavascriptLevelMeter.meterUpdateTask);
JavascriptLevelMeter.meterUpdateTask = 0;
}
if(this._source_node) {
@ -736,31 +574,15 @@ class JavascriptLevelmeter implements LevelMeter {
}
private static _analyse_all() {
for(const instance of [...this._instances])
for(const instance of [...this.meterInstances])
instance._analyse();
}
private _analyse() {
this._analyser_node.getByteTimeDomainData(this._analyse_buffer);
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
this._current_level = JThresholdFilter.calculateAudioLevel(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
if(this._callback)
this._callback(this._current_level);
}
}
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
function: async () => {
inputDeviceList = new WebInputDeviceList();
},
priority: 80,
name: "initialize media devices"
});
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
function: async () => {
inputDeviceList.refresh().then(() => {});
},
priority: 10,
name: "query media devices"
});
}

View File

@ -0,0 +1,190 @@
import {
AbstractDeviceList,
DeviceListEvents,
DeviceListState,
IDevice,
PermissionState
} from "tc-shared/audio/recorder";
import * as log from "tc-shared/log";
import {LogCategory, logWarn} from "tc-shared/log";
import {Registry} from "tc-shared/events";
import {WebIDevice} from "tc-backend/web/audio/Recorder";
import * as loader from "tc-loader";
async function requestMicrophonePermissions() : Promise<PermissionState> {
const begin = Date.now();
try {
await navigator.mediaDevices.getUserMedia({ audio: { deviceId: "default" }, video: false });
return "granted";
} catch (error) {
const end = Date.now();
const isSystem = (end - begin) < 250;
log.debug(LogCategory.AUDIO, tr("Microphone device request took %d milliseconds. System answered: %s"), end - begin, isSystem);
return "denied";
}
}
export let inputDeviceList: WebInputDeviceList;
class WebInputDeviceList extends AbstractDeviceList {
private devices: WebIDevice[];
private deviceListQueryPromise: Promise<void>;
constructor() {
super();
this.devices = [];
}
async initialize() {
if('permissions' in navigator && 'query' in navigator.permissions) {
try {
const result = await navigator.permissions.query({ name: "microphone" });
switch (result.state) {
case "denied":
this.setPermissionState("denied");
break;
case "granted":
this.setPermissionState("granted");
break;
default:
return "unknown";
}
} catch (error) {
logWarn(LogCategory.GENERAL, tr("Failed to query for microphone permissions: %s"), error);
}
}
}
getDefaultDeviceId(): string {
return "default";
}
getDevices(): IDevice[] {
return this.devices;
}
getEvents(): Registry<DeviceListEvents> {
return this.events;
}
getStatus(): DeviceListState {
return this.listState;
}
isRefreshAvailable(): boolean {
return true;
}
refresh(askPermissions?: boolean): Promise<void> {
return this.queryDevices(askPermissions === true);
}
async requestPermissions(): Promise<PermissionState> {
if(this.permissionState !== "unknown")
return this.permissionState;
let result = await requestMicrophonePermissions();
if(result === "granted" && this.listState === "no-permissions") {
/* if called within doQueryDevices, queryDevices will just return the promise */
this.queryDevices(false).then(() => {});
}
this.setPermissionState(result);
return result;
}
private queryDevices(askPermissions: boolean) : Promise<void> {
if(this.deviceListQueryPromise) {
return this.deviceListQueryPromise;
}
this.deviceListQueryPromise = this.doQueryDevices(askPermissions).catch(error => {
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
if(this.listState !== "healthy") {
this.setState("error");
}
}).then(() => {
this.deviceListQueryPromise = undefined;
});
return this.deviceListQueryPromise;
}
private async doQueryDevices(askPermissions: boolean) {
let devices = await navigator.mediaDevices.enumerateDevices();
let hasPermissions = devices.findIndex(e => e.label !== "") !== -1;
if(!hasPermissions && askPermissions) {
this.setState("no-permissions");
/* request permissions */
hasPermissions = await this.requestPermissions() === "granted";
if(hasPermissions) {
devices = await navigator.mediaDevices.enumerateDevices();
}
}
if(hasPermissions) {
this.setPermissionState("granted");
}
if(window.detectedBrowser?.name === "firefox") {
devices = [{
label: tr("Default Firefox device"),
groupId: "default",
deviceId: "default",
kind: "audioinput",
toJSON: undefined
}];
}
const inputDevices = devices.filter(e => e.kind === "audioinput");
const oldDeviceList = this.devices;
this.devices = [];
let devicesAdded = 0;
for(const device of inputDevices) {
const oldIndex = oldDeviceList.findIndex(e => e.deviceId === device.deviceId);
if(oldIndex === -1) {
devicesAdded++;
} else {
oldDeviceList.splice(oldIndex, 1);
}
this.devices.push({
deviceId: device.deviceId,
driver: "WebAudio",
groupId: device.groupId,
name: device.label
});
}
this.events.fire("notify_list_updated", { addedDeviceCount: devicesAdded, removedDeviceCount: oldDeviceList.length });
if(hasPermissions) {
this.setState("healthy");
} else {
this.setState("no-permissions");
}
}
}
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
function: async () => {
inputDeviceList = new WebInputDeviceList();
await inputDeviceList.initialize();
},
priority: 80,
name: "initialize media devices"
});
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
function: async () => {
inputDeviceList.refresh(false).then(() => {});
},
priority: 10,
name: "query media devices"
});

View File

@ -4,13 +4,15 @@ export abstract class JAbstractFilter<NodeType extends AudioNode> {
readonly priority: number;
source_node: AudioNode;
audio_node: NodeType;
audioNode: NodeType;
context: AudioContext;
enabled: boolean = false;
active: boolean = false; /* if true the filter filters! */
callback_active_change: (new_state: boolean) => any;
callback_enabled_change: () => any;
paused: boolean = true;
@ -18,18 +20,24 @@ export abstract class JAbstractFilter<NodeType extends AudioNode> {
this.priority = priority;
}
abstract initialize(context: AudioContext, source_node: AudioNode);
/* Attention: After initialized, paused is the default state */
abstract initialize(context: AudioContext, sourceNode: AudioNode);
abstract finalize();
/* whatever the input has been paused and we don't expect any input */
abstract set_pause(flag: boolean);
abstract setPaused(flag: boolean);
abstract isPaused() : boolean;
is_enabled(): boolean {
isEnabled(): boolean {
return this.enabled;
}
set_enabled(flag: boolean) {
setEnabled(flag: boolean) {
this.enabled = flag;
if(this.callback_enabled_change) {
this.callback_enabled_change();
}
}
}
@ -37,99 +45,100 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
public static update_task_interval = 20; /* 20ms */
readonly type = FilterType.THRESHOLD;
callback_level?: (value: number) => any;
private _threshold = 50;
private threshold = 50;
private _update_task: any;
private _analyser: AnalyserNode;
private _analyse_buffer: Uint8Array;
private analyzeTask: any;
private audioAnalyserNode: AnalyserNode;
private analyseBuffer: Uint8Array;
private _silence_count = 0;
private _margin_frames = 5;
private silenceCount = 0;
private marginFrames = 5;
private _current_level = 0;
private _smooth_release = 0;
private _smooth_attack = 0;
private currentLevel = 0;
private smoothRelease = 0;
private smoothAttack = 0;
private levelCallbacks: ((level: number) => void)[] = [];
finalize() {
this.set_pause(true);
this.paused = true;
this.shutdownAnalyzer();
if(this.source_node) {
try { this.source_node.disconnect(this._analyser) } catch (error) {}
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
try { this.source_node.disconnect(this.audioAnalyserNode) } catch (error) {}
try { this.source_node.disconnect(this.audioNode) } catch (error) {}
}
this._analyser = undefined;
this.audioAnalyserNode = undefined;
this.source_node = undefined;
this.audio_node = undefined;
this.audioNode = undefined;
this.context = undefined;
}
initialize(context: AudioContext, source_node: AudioNode) {
this.paused = true;
this.context = context;
this.source_node = source_node;
this.audio_node = context.createGain();
this._analyser = context.createAnalyser();
this.audioNode = context.createGain();
this.audioAnalyserNode = context.createAnalyser();
const optimal_ftt_size = Math.ceil((source_node.context || context).sampleRate * (JThresholdFilter.update_task_interval / 1000));
const base2_ftt = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
this._analyser.fftSize = base2_ftt;
this.audioAnalyserNode.fftSize = base2_ftt;
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser.fftSize)
this._analyse_buffer = new Uint8Array(this._analyser.fftSize);
if(!this.analyseBuffer || this.analyseBuffer.length < this.audioAnalyserNode.fftSize)
this.analyseBuffer = new Uint8Array(this.audioAnalyserNode.fftSize);
this.active = false;
this.audio_node.gain.value = 1;
this.audioNode.gain.value = 0; /* silence by default */
this.source_node.connect(this.audio_node);
this.source_node.connect(this._analyser);
/* force update paused state */
this.set_pause(!(this.paused = !this.paused));
this.source_node.connect(this.audioNode);
this.source_node.connect(this.audioAnalyserNode);
}
get_margin_frames(): number { return this._margin_frames; }
set_margin_frames(value: number) {
this._margin_frames = value;
getMarginFrames(): number { return this.marginFrames; }
setMarginFrames(value: number) {
this.marginFrames = value;
}
get_attack_smooth(): number {
return this._smooth_attack;
getAttackSmooth(): number {
return this.smoothAttack;
}
get_release_smooth(): number {
return this._smooth_release;
getReleaseSmooth(): number {
return this.smoothRelease;
}
set_attack_smooth(value: number) {
this._smooth_attack = value;
setAttackSmooth(value: number) {
this.smoothAttack = value;
}
set_release_smooth(value: number) {
this._smooth_release = value;
setReleaseSmooth(value: number) {
this.smoothRelease = value;
}
get_threshold(): number {
return this._threshold;
getThreshold(): number {
return this.threshold;
}
set_threshold(value: number): Promise<void> {
this._threshold = value;
return Promise.resolve();
setThreshold(value: number) {
this.threshold = value;
this.updateGainNode(false);
}
public static process(buffer: Uint8Array, ftt_size: number, previous: number, smooth: number) {
public static calculateAudioLevel(buffer: Uint8Array, fttSize: number, previous: number, smooth: number) : number {
let level;
{
let total = 0, float, rms;
for(let index = 0; index < ftt_size; index++) {
for(let index = 0; index < fttSize; index++) {
float = ( buffer[index++] / 0x7f ) - 1;
total += (float * float);
}
rms = Math.sqrt(total / ftt_size);
rms = Math.sqrt(total / fttSize);
let db = 20 * ( Math.log(rms) / Math.log(10) );
// sanity check
@ -140,38 +149,44 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
return previous * smooth + level * (1 - smooth);
}
private _analyse() {
this._analyser.getByteTimeDomainData(this._analyse_buffer);
private analyzeAnalyseBuffer() {
if(!this.audioNode || !this.audioAnalyserNode)
return;
this.audioAnalyserNode.getByteTimeDomainData(this.analyseBuffer);
let smooth;
if(this._silence_count == 0)
smooth = this._smooth_release;
if(this.silenceCount == 0)
smooth = this.smoothRelease;
else
smooth = this._smooth_attack;
smooth = this.smoothAttack;
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser.fftSize, this._current_level, smooth);
this.currentLevel = JThresholdFilter.calculateAudioLevel(this.analyseBuffer, this.audioAnalyserNode.fftSize, this.currentLevel, smooth);
this._update_gain_node();
if(this.callback_level)
this.callback_level(this._current_level);
this.updateGainNode(true);
for(const callback of this.levelCallbacks)
callback(this.currentLevel);
}
private _update_gain_node() {
private updateGainNode(increaseSilenceCount: boolean) {
let state;
if(this._current_level > this._threshold) {
this._silence_count = 0;
if(this.currentLevel > this.threshold) {
this.silenceCount = 0;
state = true;
} else {
state = this._silence_count++ < this._margin_frames;
state = this.silenceCount < this.marginFrames;
if(increaseSilenceCount)
this.silenceCount++;
}
if(state) {
this.audio_node.gain.value = 1;
this.audioNode.gain.value = 1;
if(this.active) {
this.active = false;
this.callback_active_change(false);
}
} else {
this.audio_node.gain.value = 0;
this.audioNode.gain.value = 0;
if(!this.active) {
this.active = true;
this.callback_active_change(true);
@ -179,22 +194,42 @@ export class JThresholdFilter extends JAbstractFilter<GainNode> implements Thres
}
}
set_pause(flag: boolean) {
if(flag === this.paused) return;
this.paused = flag;
isPaused(): boolean {
return this.paused;
}
if(this.paused) {
clearInterval(this._update_task);
this._update_task = undefined;
if(this.active) {
this.active = false;
this.callback_active_change(false);
}
} else {
if(!this._update_task && this._analyser)
this._update_task = setInterval(() => this._analyse(), JThresholdFilter.update_task_interval);
setPaused(flag: boolean) {
if(flag === this.paused) {
return;
}
this.paused = flag;
this.initializeAnalyzer();
}
registerLevelCallback(callback: (value: number) => void) {
this.levelCallbacks.push(callback);
}
removeLevelCallback(callback: (value: number) => void) {
this.levelCallbacks.remove(callback);
}
private initializeAnalyzer() {
if(this.analyzeTask) {
return;
}
/* by default we're consuming the input */
this.active = true;
this.audioNode.gain.value = 0;
this.analyzeTask = setInterval(() => this.analyzeAnalyseBuffer(), JThresholdFilter.update_task_interval);
}
private shutdownAnalyzer() {
clearInterval(this.analyzeTask);
this.analyzeTask = undefined;
}
}
@ -203,11 +238,11 @@ export class JStateFilter extends JAbstractFilter<GainNode> implements StateFilt
finalize() {
if(this.source_node) {
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
try { this.source_node.disconnect(this.audioNode) } catch (error) {}
}
this.source_node = undefined;
this.audio_node = undefined;
this.audioNode = undefined;
this.context = undefined;
}
@ -215,28 +250,31 @@ export class JStateFilter extends JAbstractFilter<GainNode> implements StateFilt
this.context = context;
this.source_node = source_node;
this.audio_node = context.createGain();
this.audio_node.gain.value = this.active ? 0 : 1;
this.audioNode = context.createGain();
this.audioNode.gain.value = this.active ? 0 : 1;
this.source_node.connect(this.audio_node);
this.source_node.connect(this.audioNode);
}
is_active(): boolean {
isActive(): boolean {
return this.active;
}
set_state(state: boolean): Promise<void> {
setState(state: boolean) {
if(this.active === state)
return Promise.resolve();
return;
this.active = state;
if(this.audio_node)
this.audio_node.gain.value = state ? 0 : 1;
if(this.audioNode)
this.audioNode.gain.value = state ? 0 : 1;
this.callback_active_change(state);
return Promise.resolve();
}
set_pause(flag: boolean) {
isPaused(): boolean {
return this.paused;
}
setPaused(flag: boolean) {
this.paused = flag;
}
}

View File

@ -103,7 +103,7 @@ export class WrappedWebSocket {
try {
if(this.socket.readyState === WebSocket.OPEN) {
this.socket.close();
this.socket.close(3000);
} else if(this.socket.readyState === WebSocket.CONNECTING) {
if(kPreventOpeningWebSocketClosing) {
/* to prevent the "WebSocket is closed before the connection is established." warning in the console */

View File

@ -88,16 +88,15 @@ export class VoiceConnection extends AbstractVoiceConnection {
if(this.currentAudioSource === recorder && !enforce)
return;
if(recorder) {
await recorder.unmount();
}
if(this.currentAudioSource) {
await this.voiceBridge?.setInput(undefined);
this.currentAudioSource.callback_unmount = undefined;
await this.currentAudioSource.unmount();
}
/* unmount our target recorder */
await recorder?.unmount();
this.handleRecorderStop();
this.currentAudioSource = recorder;
@ -108,18 +107,24 @@ export class VoiceConnection extends AbstractVoiceConnection {
recorder.callback_start = this.handleRecorderStart.bind(this);
recorder.callback_stop = this.handleRecorderStop.bind(this);
recorder.callback_input_change = async (oldInput, newInput) => {
recorder.callback_input_initialized = async input => {
if(!this.voiceBridge)
return;
if(this.voiceBridge.getInput() && this.voiceBridge.getInput() !== oldInput) {
logWarn(LogCategory.VOICE,
tr("Having a recorder input change, but our voice bridge still has another input (Having: %o, Expecting: %o)!"),
this.voiceBridge.getInput(), oldInput);
}
await this.voiceBridge.setInput(newInput);
await this.voiceBridge.setInput(input);
};
if(recorder.input && this.voiceBridge) {
await this.voiceBridge.setInput(recorder.input);
}
if(!recorder.input || recorder.input.isFiltered()) {
this.handleRecorderStop();
} else {
this.handleRecorderStart();
}
} else {
await this.voiceBridge.setInput(undefined);
}
this.events.fire("notify_recorder_changed");

View File

@ -61,11 +61,12 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
}
async setInput(input: AbstractInput | undefined) {
console.error("SET INPUT: %o", input);
if (this.currentInput === input)
return;
if (this.currentInput) {
await this.currentInput.set_consumer(undefined);
await this.currentInput.setConsumer(undefined);
this.currentInput = undefined;
}
@ -73,7 +74,7 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
if (this.currentInput) {
try {
await this.currentInput.set_consumer({
await this.currentInput.setConsumer({
type: InputConsumerType.NODE,
callback_node: node => node.connect(this.localAudioDestinationNode),
callback_disconnect: node => node.disconnect(this.localAudioDestinationNode)
@ -91,6 +92,7 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
packet[2] = (this.voicePacketId >> 8) & 0xFF; //HIGHT (voiceID)
packet[3] = (this.voicePacketId >> 0) & 0xFF; //LOW (voiceID)
packet[4] = codec; //Codec
this.voicePacketId++;
}
sendStopSignal(codec: number) {