Commiting changes before going on vacation (Still contains errors)
parent
56d9e9fa16
commit
0d7a34c31a
|
@ -1,7 +1,7 @@
|
|||
# Changelog:
|
||||
* **11.08.20**
|
||||
- Fixed the voice push to talk delay
|
||||
|
||||
/* FIXME: Newcomer modal with the microphone */
|
||||
* **09.08.20**
|
||||
- Added a "watch to gather" context menu entry for clients
|
||||
- Disassembled the current client icon sprite into his icons
|
||||
|
|
|
@ -1,6 +1,15 @@
|
|||
import * as loader from "../loader/loader";
|
||||
import {Stage} from "../loader/loader";
|
||||
import {detect as detectBrowser} from "detect-browser";
|
||||
import {
|
||||
BrowserInfo,
|
||||
detect as detectBrowser,
|
||||
} from "detect-browser";
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
detectedBrowser: BrowserInfo
|
||||
}
|
||||
}
|
||||
|
||||
if(__build.target === "web") {
|
||||
loader.register_task(Stage.SETUP, {
|
||||
|
@ -13,14 +22,15 @@ if(__build.target === "web") {
|
|||
return;
|
||||
|
||||
console.log("Resolved browser manufacturer to \"%s\" version \"%s\" on %s", browser.name, browser.version, browser.os);
|
||||
if(browser.type && browser.type !== "browser") {
|
||||
if(browser.type !== "browser") {
|
||||
loader.critical_error("Your device isn't supported.", "User agent type " + browser.type + " isn't supported.");
|
||||
throw "unsupported user type";
|
||||
}
|
||||
|
||||
window.detectedBrowser = browser;
|
||||
|
||||
switch (browser?.name) {
|
||||
case "aol":
|
||||
case "bot":
|
||||
case "crios":
|
||||
case "ie":
|
||||
loader.critical_error("Browser not supported", "We're sorry, but your browser isn't supported.");
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
import {AbstractInput, InputDevice, LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||
|
||||
export function devices() : InputDevice[];
|
||||
|
||||
export function device_refresh_available() : boolean;
|
||||
export function refresh_devices() : Promise<void>;
|
||||
|
||||
export function create_input() : AbstractInput;
|
||||
export function create_levelmeter(device: InputDevice) : Promise<LevelMeter>;
|
|
@ -0,0 +1,25 @@
|
|||
<svg version="1.1" viewBox="0 0 10.165 16" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<clipPath>
|
||||
<path d="m1e4 0h-1e4v1e4h1e4v-1e4m-5943.8 8689.6c-213.71 0-405.54-93.27-537.3-241.21-113.23-127.12-182.11-294.58-182.11-478.21v-3963.2c0-397.32 322.08-719.41 719.41-719.41h1887.5c397.33 0 719.42 322.09 719.42 719.41v3963.2c0 187.84-72.06 358.81-189.95 486.93-131.49 142.89-319.99 232.49-529.47 232.49h-1887.5m2225-2462.5h-264.44c-23.1 48.63-36.03 103.04-36.03 160.47 0 181.5 129.1 332.79 300.47 367.21v-527.68m-494.82 0h-677.21c-23.1 48.64-36.02 103.04-36.02 160.47 0 206.89 167.73 374.62 374.62 374.62 206.91 0 374.62-167.73 374.62-374.62 0-57.43-12.92-111.83-36.01-160.47m-907.57 0h-677.21c-23.09 48.64-36.01 103.04-36.01 160.47 0 206.89 167.72 374.62 374.62 374.62 206.89 0 374.62-167.73 374.62-374.62 0-57.43-12.92-111.83-36.02-160.47m-907.64 0h-252.37v1743.1c0 53.67 12.93 104.25 35.31 149.35 47.97 96.64 140.65 167.31 250.94 184.22 1.46-13.35 2.21-26.91 2.21-40.65 0-177.2-123.17-325.29-288.46-364.27v-502.08c33.37 172.71 185.17 303.2 367.63 303.2 206.89 0 374.62-167.72 374.62-374.61 0-206.9-167.73-374.62-374.62-374.62-182.46 0-334.26 130.49-367.63 303.19v-502.08c165.29-38.96 288.46-187.06 288.46-364.26 0-57.46-12.95-111.86-36.09-160.47m1930.4 723.62c-206.9 0-374.61 167.72-374.61 374.62 0 206.89 167.71 374.61 374.61 374.61 206.89 0 374.61-167.72 374.61-374.61 0-206.9-167.72-374.62-374.61-374.62m-907.58 0c-206.88 0-374.61 167.72-374.61 374.62 0 206.89 167.73 374.61 374.61 374.61 206.9 0 374.63-167.72 374.63-374.61 0-206.9-167.73-374.62-374.63-374.62m453.79 937.76c-206.89 0-374.62 167.73-374.62 374.62 0 15.08.91 29.94 2.67 44.54h743.91c1.75-14.6 2.66-29.46 2.66-44.54 0-206.89-167.71-374.62-374.62-374.62m-907.56 0c-206.9 0-374.62 167.73-374.62 374.62 0 15.08.9 29.94 2.66 44.54h743.91c1.76-14.6 2.67-29.46 2.67-44.54 0-206.89-167.73-374.62-374.62-374.62m1741 7.4c-171.37 34.42-300.47 185.71-300.47 367.22 0 14.29.81 28.39 2.38 42.26 110.79-12.93 205.34-79.77 256.72-173.49 26.34-48.05 41.37-103.13 41.37-161.69v-74.3"/>
|
||||
</clipPath>
|
||||
<clipPath id="clipPath42">
|
||||
<path d="m6016.8 6227.1h-230.38c23.09 48.64 36.01 103.04 36.01 160.47 0 206.89-167.71 374.62-374.62 374.62-206.89 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.02-160.47h-230.36c23.1 48.64 36.02 103.04 36.02 160.47 0 206.89-167.73 374.62-374.62 374.62-206.9 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.01-160.47h-230.43c23.14 48.61 36.09 103.01 36.09 160.47 0 177.2-123.17 325.3-288.46 364.26v502.08c33.37-172.7 185.17-303.19 367.63-303.19 206.89 0 374.62 167.72 374.62 374.62 0 206.89-167.73 374.61-374.62 374.61-182.46 0-334.26-130.49-367.63-303.2v502.08c165.29 38.98 288.46 187.07 288.46 364.27 0 13.74-.75 27.3-2.21 40.65 16.7 2.56 33.8 3.89 51.21 3.89h111.99c-1.76-14.6-2.66-29.46-2.66-44.54 0-206.89 167.72-374.62 374.62-374.62 206.89 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.67 44.54h163.66c-1.76-14.6-2.67-29.46-2.67-44.54 0-206.89 167.73-374.62 374.62-374.62 206.91 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.66 44.54h124.03c13.31 0 26.44-.78 39.35-2.28-1.57-13.87-2.38-27.97-2.38-42.26 0-181.51 129.1-332.8 300.47-367.22v-1141.1c-171.37-34.42-300.47-185.71-300.47-367.21 0-57.43 12.93-111.84 36.03-160.47m-115.2 1472.8c-206.9 0-374.61-167.72-374.61-374.61 0-206.9 167.71-374.62 374.61-374.62 206.89 0 374.61 167.72 374.61 374.62 0 206.89-167.72 374.61-374.61 374.61m-907.58 0c-206.88 0-374.61-167.72-374.61-374.61 0-206.9 167.73-374.62 374.61-374.62 206.9 0 374.63 167.72 374.63 374.62 0 206.89-167.73 374.61-374.63 374.61"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g transform="matrix(1.3333 0 0 -1.3333 -661.58 674.67)" style="stroke-width:.99975">
|
||||
<g transform="matrix(.0016262 0 0 .0016262 491.87 491.87)" style="stroke-width:9.9975">
|
||||
<g style="stroke-width:9.9975"/>
|
||||
<path d="m6281.2 6563.5v-336.35h-2562.4v1743.1c0 53.67 12.93 104.25 35.31 149.35 55.23 111.27 169.75 188.11 302.15 188.11h1887.5c127.52 0 238.7-71.13 296.07-175.77 26.34-48.05 41.37-103.13 41.37-161.69v-1406.7m192.03 1893.7c-131.49 142.89-319.99 232.49-529.47 232.49h-1887.5c-213.71 0-405.54-93.27-537.3-241.21-113.23-127.12-182.11-294.58-182.11-478.21v-3963.2c0-397.32 322.08-719.41 719.41-719.41h1887.5c397.33 0 719.42 322.09 719.42 719.41v3963.2c0 187.84-72.06 358.81-189.95 486.93" style="fill:#7289da"/>
|
||||
<path d="m6888.8 6026.3h-4.6v-2389.2c0-450.14-364.91-815.04-815.05-815.04h-2138.4c-450.12 0-815.03 364.9-815.03 815.03v2389.2h-4.6c-251.43 0-455.26-203.82-455.26-455.26v-2091.9c0-619.63 502.3-1121.9 1121.9-1121.9h781.45v-478.83h-554.15c-310.59 0-562.35-251.77-562.35-562.36v-5.68h3114.5v5.68c0 310.59-251.79 562.36-562.37 562.36h-554.14v478.83h781.44c619.63 0 1121.9 502.3 1121.9 1121.9v2091.9c0 251.44-203.82 455.26-455.26 455.26" style="fill:#7289da"/>
|
||||
<g style="stroke-width:9.9975">
|
||||
<g clip-path="url(#clipPath42)" style="stroke-width:9.9975">
|
||||
<path d="m6016.8 6227.1h-230.38c23.09 48.64 36.01 103.04 36.01 160.47 0 206.89-167.71 374.62-374.62 374.62-206.89 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.02-160.47h-230.36c23.1 48.64 36.02 103.04 36.02 160.47 0 206.89-167.73 374.62-374.62 374.62-206.9 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.01-160.47h-230.43c23.14 48.61 36.09 103.01 36.09 160.47 0 177.2-123.17 325.3-288.46 364.26v502.08c33.37-172.7 185.17-303.19 367.63-303.19 206.89 0 374.62 167.72 374.62 374.62 0 206.89-167.73 374.61-374.62 374.61-182.46 0-334.26-130.49-367.63-303.2v502.08c165.29 38.98 288.46 187.07 288.46 364.27 0 13.74-.75 27.3-2.21 40.65 16.7 2.56 33.8 3.89 51.21 3.89h111.99c-1.76-14.6-2.66-29.46-2.66-44.54 0-206.89 167.72-374.62 374.62-374.62 206.89 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.67 44.54h163.66c-1.76-14.6-2.67-29.46-2.67-44.54 0-206.89 167.73-374.62 374.62-374.62 206.91 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.66 44.54h124.03c13.31 0 26.44-.78 39.35-2.28-1.57-13.87-2.38-27.97-2.38-42.26 0-181.51 129.1-332.8 300.47-367.22v-1141.1c-171.37-34.42-300.47-185.71-300.47-367.21 0-57.43 12.93-111.84 36.03-160.47m-115.2 1472.8c-206.9 0-374.61-167.72-374.61-374.61 0-206.9 167.71-374.62 374.61-374.62 206.89 0 374.61 167.72 374.61 374.62 0 206.89-167.72 374.61-374.61 374.61m-907.58 0c-206.88 0-374.61-167.72-374.61-374.61 0-206.9 167.73-374.62 374.61-374.62 206.9 0 374.63 167.72 374.63 374.62 0 206.89-167.73 374.61-374.63 374.61" style="fill:#ccc"/>
|
||||
</g>
|
||||
</g>
|
||||
<path d="m6281.2 5879.8h-2562.4v143.48c111.69 26.34 204.15 102.5 252.37 203.8h230.43c60.12-126.6 189.15-214.15 338.61-214.15s278.49 87.55 338.6 214.15h230.36c60.11-126.6 189.14-214.15 338.6-214.15 149.47 0 278.5 87.55 338.61 214.15h230.38c49.9-105.08 147.29-183.22 264.44-206.74v-140.54m0 874.96v1141.1-1141.1m-2276.2 1549c-6.08 55.61-24.47 107.47-52.18 153.05h267.13c-27.01-44.55-45.22-95.05-51.75-149.16h-111.99c-17.41 0-34.51-1.33-51.21-3.89m1978.1 1.61c-12.91 1.5-26.04 2.28-39.35 2.28h-124.03c-6.52 54.11-24.73 104.61-51.75 149.16h266.97c-27.37-45.16-45.61-96.47-51.84-151.44m-907.29 2.28h-163.66c-6.52 54.11-24.73 104.61-51.75 149.16h267.16c-27.02-44.55-45.23-95.05-51.75-149.16" style="fill:#7289da"/>
|
||||
<path d="m5043.9 3891-211.6 376.82 211.6 376.8-211.6 376.81 211.6 376.83-118.8 211.54c-443.67 334.93-1110 69.17-1145.4-542.46-34.61-597.4 711.68-1068.5 1189.8-1308.9l74.44 132.52" style="fill:#fff"/>
|
||||
<path d="m6220.3 5067.3c-35.91 620-720.11 884.62-1163.5 528.38l110.9-197.46-211.62-376.83 211.62-376.81-211.62-376.8 211.62-376.82-48.87-87.02c476.15 251.6 1134 701.64 1101.5 1263.4" style="fill:#fff"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 7.3 KiB |
|
@ -38,6 +38,7 @@ import {PluginCmdRegistry} from "tc-shared/connection/PluginCmdHandler";
|
|||
import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
|
||||
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
|
||||
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
||||
import {getRecorderBackend} from "tc-shared/audio/recorder";
|
||||
|
||||
export enum DisconnectReason {
|
||||
HANDLER_DESTROYED,
|
||||
|
@ -738,6 +739,8 @@ export class ConnectionHandler {
|
|||
const support_record = basic_voice_support && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
|
||||
const support_playback = basic_voice_support && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
|
||||
|
||||
const hasInputDevice = getRecorderBackend().getDeviceList().getPermissionState() === "granted" && !!vconnection.voice_recorder();
|
||||
|
||||
const property_update = {
|
||||
client_input_muted: this.client_status.input_muted,
|
||||
client_output_muted: this.client_status.output_muted
|
||||
|
@ -749,12 +752,11 @@ export class ConnectionHandler {
|
|||
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
|
||||
property_update["client_input_hardware"] = false;
|
||||
property_update["client_output_hardware"] = false;
|
||||
this.client_status.input_hardware = true; /* IDK if we have input hardware or not, but it dosn't matter at all so */
|
||||
this.client_status.input_hardware = hasInputDevice;
|
||||
|
||||
/* no icons are shown so no update at all */
|
||||
} else {
|
||||
const audio_source = vconnection.voice_recorder();
|
||||
const recording_supported = typeof(audio_source) !== "undefined" && audio_source.record_supported && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
|
||||
const recording_supported = hasInputDevice && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
|
||||
const playback_supported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
|
||||
|
||||
property_update["client_input_hardware"] = recording_supported;
|
||||
|
@ -807,7 +809,7 @@ export class ConnectionHandler {
|
|||
this.client_status.sound_record_supported = support_record;
|
||||
this.client_status.sound_playback_supported = support_playback;
|
||||
|
||||
if(vconnection && vconnection.voice_recorder() && vconnection.voice_recorder().record_supported) {
|
||||
if(vconnection && vconnection.voice_recorder()) {
|
||||
const active = !this.client_status.input_muted && !this.client_status.output_muted;
|
||||
/* No need to start the microphone when we're not even connected */
|
||||
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
import {AbstractInput, LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||
import {Registry} from "tc-shared/events";
|
||||
|
||||
export type DeviceQueryResult = {}
|
||||
|
||||
export interface AudioRecorderBacked {
|
||||
createInput() : AbstractInput;
|
||||
createLevelMeter(device: IDevice) : Promise<LevelMeter>;
|
||||
|
||||
getDeviceList() : DeviceList;
|
||||
}
|
||||
|
||||
export interface DeviceListEvents {
|
||||
/*
|
||||
* Should only trigger if the list really changed.
|
||||
*/
|
||||
notify_list_updated: {
|
||||
removedDeviceCount: number,
|
||||
addedDeviceCount: number
|
||||
},
|
||||
|
||||
notify_state_changed: {
|
||||
oldState: DeviceListState;
|
||||
newState: DeviceListState;
|
||||
},
|
||||
|
||||
notify_permissions_changed: {
|
||||
oldState: PermissionState,
|
||||
newState: PermissionState
|
||||
}
|
||||
}
|
||||
|
||||
export type DeviceListState = "healthy" | "uninitialized" | "no-permissions" | "error";
|
||||
|
||||
export interface IDevice {
|
||||
deviceId: string;
|
||||
|
||||
driver: string;
|
||||
name: string;
|
||||
}
|
||||
export namespace IDevice {
|
||||
export const NoDeviceId = "none";
|
||||
}
|
||||
|
||||
export type PermissionState = "granted" | "denied" | "unknown";
|
||||
|
||||
export interface DeviceList {
|
||||
getEvents() : Registry<DeviceListEvents>;
|
||||
|
||||
isRefreshAvailable() : boolean;
|
||||
refresh() : Promise<void>;
|
||||
|
||||
/* implicitly update our own permission state */
|
||||
requestPermissions() : Promise<PermissionState>;
|
||||
getPermissionState() : PermissionState;
|
||||
|
||||
getStatus() : DeviceListState;
|
||||
getDevices() : IDevice[];
|
||||
|
||||
getDefaultDeviceId() : string;
|
||||
|
||||
awaitHealthy(): Promise<void>;
|
||||
awaitInitialized() : Promise<void>;
|
||||
}
|
||||
|
||||
export abstract class AbstractDeviceList implements DeviceList {
|
||||
protected readonly events: Registry<DeviceListEvents>;
|
||||
protected listState: DeviceListState;
|
||||
protected permissionState: PermissionState;
|
||||
|
||||
protected constructor() {
|
||||
this.events = new Registry<DeviceListEvents>();
|
||||
this.permissionState = "unknown";
|
||||
this.listState = "uninitialized";
|
||||
}
|
||||
|
||||
getStatus(): DeviceListState {
|
||||
return this.listState;
|
||||
}
|
||||
|
||||
getPermissionState(): PermissionState {
|
||||
return this.permissionState;
|
||||
}
|
||||
|
||||
protected setState(state: DeviceListState) {
|
||||
if(this.listState === state)
|
||||
return;
|
||||
|
||||
const oldState = this.listState;
|
||||
this.listState = state;
|
||||
this.events.fire("notify_state_changed", { oldState: oldState, newState: state });
|
||||
}
|
||||
|
||||
protected setPermissionState(state: PermissionState) {
|
||||
if(this.permissionState === state)
|
||||
return;
|
||||
|
||||
const oldState = this.permissionState;
|
||||
this.permissionState = state;
|
||||
this.events.fire("notify_permissions_changed", { oldState: oldState, newState: state });
|
||||
}
|
||||
|
||||
awaitInitialized(): Promise<void> {
|
||||
if(this.listState !== "uninitialized")
|
||||
return Promise.resolve();
|
||||
|
||||
return new Promise<void>(resolve => {
|
||||
const callback = (event: DeviceListEvents["notify_state_changed"]) => {
|
||||
if(event.newState !== "uninitialized")
|
||||
return;
|
||||
|
||||
this.events.off("notify_state_changed", callback);
|
||||
resolve();
|
||||
};
|
||||
this.events.on("notify_state_changed", callback);
|
||||
});
|
||||
}
|
||||
|
||||
awaitHealthy(): Promise<void> {
|
||||
if(this.listState === "healthy")
|
||||
return Promise.resolve();
|
||||
|
||||
return new Promise<void>(resolve => {
|
||||
const callback = (event: DeviceListEvents["notify_state_changed"]) => {
|
||||
if(event.newState !== "healthy")
|
||||
return;
|
||||
|
||||
this.events.off("notify_state_changed", callback);
|
||||
resolve();
|
||||
};
|
||||
this.events.on("notify_state_changed", callback);
|
||||
});
|
||||
}
|
||||
|
||||
abstract getDefaultDeviceId(): string;
|
||||
abstract getDevices(): IDevice[];
|
||||
abstract getEvents(): Registry<DeviceListEvents>;
|
||||
abstract isRefreshAvailable(): boolean;
|
||||
abstract refresh(): Promise<void>;
|
||||
abstract requestPermissions(): Promise<PermissionState>;
|
||||
}
|
||||
|
||||
let recorderBackend: AudioRecorderBacked;
|
||||
|
||||
export function getRecorderBackend() : AudioRecorderBacked {
|
||||
if(typeof recorderBackend === "undefined")
|
||||
throw tr("the recorder backend hasn't been set yet");
|
||||
|
||||
return recorderBackend;
|
||||
}
|
||||
|
||||
export function setRecorderBackend(instance: AudioRecorderBacked) {
|
||||
if(typeof recorderBackend !== "undefined")
|
||||
throw tr("a recorder backend has already been initialized");
|
||||
|
||||
recorderBackend = instance;
|
||||
}
|
|
@ -21,7 +21,6 @@ import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
|||
import {formatMessage} from "tc-shared/ui/frames/chat";
|
||||
import {openModalNewcomer} from "tc-shared/ui/modal/ModalNewcomer";
|
||||
import * as aplayer from "tc-backend/audio/player";
|
||||
import * as arecorder from "tc-backend/audio/recorder";
|
||||
import * as ppt from "tc-backend/ppt";
|
||||
import * as keycontrol from "./KeyControl";
|
||||
import * as React from "react";
|
||||
|
@ -182,8 +181,6 @@ async function initialize_app() {
|
|||
aplayer.on_ready(() => aplayer.set_master_volume(settings.global(Settings.KEY_SOUND_MASTER) / 100));
|
||||
else
|
||||
log.warn(LogCategory.GENERAL, tr("Client does not support aplayer.set_master_volume()... May client is too old?"));
|
||||
if(arecorder.device_refresh_available())
|
||||
arecorder.refresh_devices();
|
||||
});
|
||||
|
||||
set_default_recorder(new RecorderProfile("default"));
|
||||
|
@ -512,7 +509,7 @@ const task_teaweb_starter: loader.Task = {
|
|||
if(!aplayer.initializeFromGesture) {
|
||||
console.error(tr("Missing aplayer.initializeFromGesture"));
|
||||
} else
|
||||
$(document).one('click', event => aplayer.initializeFromGesture());
|
||||
$(document).one('click', () => aplayer.initializeFromGesture());
|
||||
}
|
||||
} catch (ex) {
|
||||
console.error(ex.stack);
|
||||
|
|
|
@ -241,7 +241,7 @@
|
|||
}
|
||||
|
||||
.header {
|
||||
height: 2.6em;
|
||||
height: 3em;
|
||||
|
||||
flex-grow: 0;
|
||||
flex-shrink: 0;
|
||||
|
@ -266,7 +266,7 @@
|
|||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.btn {
|
||||
button {
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
|
||||
|
@ -452,10 +452,24 @@
|
|||
text-align: center;
|
||||
}
|
||||
|
||||
button {
|
||||
width: 10em;
|
||||
align-self: center;
|
||||
margin-top: 2em;
|
||||
}
|
||||
|
||||
&.hidden {
|
||||
pointer-events: none;
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:global(.icon_em) {
|
||||
align-self: center;
|
||||
font-size: 10em;
|
||||
|
||||
margin-bottom: .25em;
|
||||
margin-top: -.25em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ import * as aplayer from "tc-backend/audio/player";
|
|||
import * as React from "react";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||
import * as arecorder from "tc-backend/audio/recorder";
|
||||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logWarn} from "tc-shared/log";
|
||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||
|
@ -12,6 +11,7 @@ import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
|
|||
import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller";
|
||||
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
||||
import {MicrophoneSettings} from "tc-shared/ui/modal/settings/MicrophoneRenderer";
|
||||
import {DeviceListState, getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
|
||||
|
||||
export type MicrophoneSetting = "volume" | "vad-type" | "ppt-key" | "ppt-release-delay" | "ppt-release-delay-active" | "threshold-threshold";
|
||||
|
||||
|
@ -29,6 +29,7 @@ export interface MicrophoneSettingsEvents {
|
|||
setting: MicrophoneSetting
|
||||
},
|
||||
|
||||
"action_request_permissions": {},
|
||||
"action_set_selected_device": { deviceId: string },
|
||||
"action_set_selected_device_result": {
|
||||
deviceId: string, /* on error it will contain the current selected device */
|
||||
|
@ -48,9 +49,11 @@ export interface MicrophoneSettingsEvents {
|
|||
}
|
||||
|
||||
"notify_devices": {
|
||||
status: "success" | "error" | "audio-not-initialized",
|
||||
status: "success" | "error" | "audio-not-initialized" | "no-permissions",
|
||||
|
||||
error?: string,
|
||||
shouldAsk?: boolean,
|
||||
|
||||
devices?: MicrophoneDevice[]
|
||||
selectedDevice?: string;
|
||||
},
|
||||
|
@ -62,13 +65,17 @@ export interface MicrophoneSettingsEvents {
|
|||
|
||||
level?: number,
|
||||
error?: string
|
||||
}}
|
||||
}},
|
||||
|
||||
status: Exclude<DeviceListState, "error">
|
||||
},
|
||||
|
||||
notify_destroy: {}
|
||||
}
|
||||
|
||||
export function initialize_audio_microphone_controller(events: Registry<MicrophoneSettingsEvents>) {
|
||||
const recorderBackend = getRecorderBackend();
|
||||
|
||||
/* level meters */
|
||||
{
|
||||
const level_meters: {[key: string]:Promise<LevelMeter>} = {};
|
||||
|
@ -80,7 +87,7 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
const meter = level_meters[e];
|
||||
delete level_meters[e];
|
||||
|
||||
meter.then(e => e.destory());
|
||||
meter.then(e => e.destroy());
|
||||
});
|
||||
Object.keys(level_info).forEach(e => delete level_info[e]);
|
||||
};
|
||||
|
@ -88,37 +95,42 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
const update_level_meter = () => {
|
||||
destroy_meters();
|
||||
|
||||
for(const device of arecorder.devices()) {
|
||||
let promise = arecorder.create_levelmeter(device).then(meter => {
|
||||
meter.set_observer(level => {
|
||||
if(level_meters[device.unique_id] !== promise) return; /* old level meter */
|
||||
level_info["none"] = { deviceId: "none", status: "success", level: 0 };
|
||||
|
||||
level_info[device.unique_id] = {
|
||||
deviceId: device.unique_id,
|
||||
for(const device of recorderBackend.getDeviceList().getDevices()) {
|
||||
let promise = recorderBackend.createLevelMeter(device).then(meter => {
|
||||
meter.set_observer(level => {
|
||||
if(level_meters[device.deviceId] !== promise) return; /* old level meter */
|
||||
|
||||
level_info[device.deviceId] = {
|
||||
deviceId: device.deviceId,
|
||||
status: "success",
|
||||
level: level
|
||||
};
|
||||
});
|
||||
return Promise.resolve(meter);
|
||||
}).catch(error => {
|
||||
if(level_meters[device.unique_id] !== promise) return; /* old level meter */
|
||||
level_info[device.unique_id] = {
|
||||
deviceId: device.unique_id,
|
||||
if(level_meters[device.deviceId] !== promise) return; /* old level meter */
|
||||
level_info[device.deviceId] = {
|
||||
deviceId: device.deviceId,
|
||||
status: "error",
|
||||
|
||||
error: error
|
||||
};
|
||||
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize a level meter for device %s (%s): %o"), device.unique_id, device.driver + ":" + device.name, error);
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize a level meter for device %s (%s): %o"), device.deviceId, device.driver + ":" + device.name, error);
|
||||
return Promise.reject(error);
|
||||
});
|
||||
level_meters[device.unique_id] = promise;
|
||||
level_meters[device.deviceId] = promise;
|
||||
}
|
||||
};
|
||||
|
||||
level_update_task = setInterval(() => {
|
||||
const deviceListStatus = recorderBackend.getDeviceList().getStatus();
|
||||
|
||||
events.fire("notify_device_level", {
|
||||
level: level_info
|
||||
level: level_info,
|
||||
status: deviceListStatus === "error" ? "uninitialized" : deviceListStatus
|
||||
});
|
||||
}, 50);
|
||||
|
||||
|
@ -142,34 +154,43 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
return;
|
||||
}
|
||||
|
||||
Promise.resolve().then(() => {
|
||||
return arecorder.device_refresh_available() && event.refresh_list ? arecorder.refresh_devices() : Promise.resolve();
|
||||
}).catch(error => {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to refresh device list: %o"), error);
|
||||
return Promise.resolve();
|
||||
}).then(() => {
|
||||
const devices = arecorder.devices();
|
||||
const deviceList = recorderBackend.getDeviceList();
|
||||
switch (deviceList.getStatus()) {
|
||||
case "no-permissions":
|
||||
events.fire_async("notify_devices", { status: "no-permissions", shouldAsk: deviceList.getPermissionState() === "denied" });
|
||||
return;
|
||||
|
||||
case "uninitialized":
|
||||
events.fire_async("notify_devices", { status: "audio-not-initialized" });
|
||||
return;
|
||||
}
|
||||
|
||||
if(event.refresh_list && deviceList.isRefreshAvailable()) {
|
||||
/* will automatically trigger a device list changed event if something has changed */
|
||||
deviceList.refresh().then(() => {});
|
||||
} else {
|
||||
const devices = deviceList.getDevices();
|
||||
|
||||
events.fire_async("notify_devices", {
|
||||
status: "success",
|
||||
selectedDevice: default_recorder.current_device() ? default_recorder.current_device().unique_id : "none",
|
||||
devices: devices.map(e => { return { id: e.unique_id, name: e.name, driver: e.driver }})
|
||||
selectedDevice: default_recorder.getDeviceId(),
|
||||
devices: devices.map(e => { return { id: e.deviceId, name: e.name, driver: e.driver }})
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
events.on("action_set_selected_device", event => {
|
||||
const device = arecorder.devices().find(e => e.unique_id === event.deviceId);
|
||||
if(!device && event.deviceId !== "none") {
|
||||
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: default_recorder.current_device().unique_id });
|
||||
const device = recorderBackend.getDeviceList().getDevices().find(e => e.deviceId === event.deviceId);
|
||||
if(!device && event.deviceId !== IDevice.NoDeviceId) {
|
||||
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: default_recorder.getDeviceId() });
|
||||
return;
|
||||
}
|
||||
|
||||
default_recorder.set_device(device).then(() => {
|
||||
console.debug(tr("Changed default microphone device"));
|
||||
console.debug(tr("Changed default microphone device to %s"), event.deviceId);
|
||||
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
||||
}).catch((error) => {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to change microphone to device %s: %o"), device ? device.unique_id : "none", error);
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to change microphone to device %s: %o"), device ? device.deviceId : IDevice.NoDeviceId, error);
|
||||
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
||||
});
|
||||
});
|
||||
|
@ -265,7 +286,59 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
|||
});
|
||||
}
|
||||
|
||||
events.on("action_request_permissions", () => recorderBackend.getDeviceList().requestPermissions().then(result => {
|
||||
console.error("Permission request result: %o", result);
|
||||
|
||||
if(result === "granted") {
|
||||
/* we've nothing to do, the device change event will already update out list */
|
||||
} else {
|
||||
events.fire_async("notify_devices", { status: "no-permissions", shouldAsk: result === "denied" });
|
||||
return;
|
||||
}
|
||||
}));
|
||||
|
||||
events.on("notify_destroy", recorderBackend.getDeviceList().getEvents().on("notify_list_updated", () => {
|
||||
events.fire("query_devices");
|
||||
}));
|
||||
|
||||
events.on("notify_destroy", recorderBackend.getDeviceList().getEvents().on("notify_state_changed", () => {
|
||||
events.fire("query_devices");
|
||||
}));
|
||||
|
||||
if(!aplayer.initialized()) {
|
||||
aplayer.on_ready(() => { events.fire_async("query_devices"); });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
loader.register_task(Stage.LOADED, {
|
||||
name: "test",
|
||||
function: async () => {
|
||||
aplayer.on_ready(() => {
|
||||
const modal = spawnReactModal(class extends InternalModal {
|
||||
settings = new Registry<MicrophoneSettingsEvents>();
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
initialize_audio_microphone_controller(this.settings);
|
||||
}
|
||||
|
||||
renderBody(): React.ReactElement {
|
||||
return <div style={{
|
||||
padding: "1em",
|
||||
backgroundColor: "#2f2f35"
|
||||
}}>
|
||||
<MicrophoneSettings events={this.settings} />
|
||||
</div>;
|
||||
}
|
||||
|
||||
title(): string | React.ReactElement<Translatable> {
|
||||
return "test";
|
||||
}
|
||||
});
|
||||
|
||||
modal.show();
|
||||
});
|
||||
},
|
||||
priority: -2
|
||||
})
|
||||
|
|
|
@ -9,13 +9,13 @@ import {ClientIcon} from "svg-sprites/client-icons";
|
|||
import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
|
||||
import {createErrorModal} from "tc-shared/ui/elements/Modal";
|
||||
import {Slider} from "tc-shared/ui/react-elements/Slider";
|
||||
import MicrophoneSettings = modal.settings.MicrophoneSettings;
|
||||
import {RadioButton} from "tc-shared/ui/react-elements/RadioButton";
|
||||
import {VadType} from "tc-shared/voice/RecorderProfile";
|
||||
import {key_description, KeyDescriptor} from "tc-shared/PPTListener";
|
||||
import {spawnKeySelect} from "tc-shared/ui/modal/ModalKeySelect";
|
||||
import {Checkbox} from "tc-shared/ui/react-elements/Checkbox";
|
||||
import {BoxedInputField} from "tc-shared/ui/react-elements/InputField";
|
||||
import {IDevice} from "tc-shared/audio/recorder";
|
||||
|
||||
const cssStyle = require("./Microphone.scss");
|
||||
|
||||
|
@ -37,28 +37,41 @@ const MicrophoneStatus = (props: { state: MicrophoneSelectedState }) => {
|
|||
}
|
||||
}
|
||||
|
||||
type ActivityBarStatus = { mode: "success" } | { mode: "error", message: string } | { mode: "loading" };
|
||||
type ActivityBarStatus = { mode: "success" } | { mode: "error", message: string } | { mode: "loading" } | { mode: "uninitialized" };
|
||||
const ActivityBar = (props: { events: Registry<MicrophoneSettingsEvents>, deviceId: string, disabled?: boolean }) => {
|
||||
const refHider = useRef<HTMLDivElement>();
|
||||
const [ status, setStatus ] = useState<ActivityBarStatus>({ mode: "loading" });
|
||||
|
||||
props.events.reactUse("notify_device_level", event => {
|
||||
const device = event.level[props.deviceId];
|
||||
if(!device) {
|
||||
if(status.mode === "loading")
|
||||
if(event.status === "uninitialized") {
|
||||
if(status.mode === "uninitialized")
|
||||
return;
|
||||
|
||||
setStatus({ mode: "loading" });
|
||||
} else if(device.status === "success") {
|
||||
if(status.mode !== "success") {
|
||||
setStatus({ mode: "success" });
|
||||
}
|
||||
refHider.current.style.width = (100 - device.level) + "%";
|
||||
setStatus({ mode: "uninitialized" });
|
||||
} else if(event.status === "no-permissions") {
|
||||
const noPermissionsMessage = tr("no permissions");
|
||||
if(status.mode === "error" && status.message === noPermissionsMessage)
|
||||
return;
|
||||
|
||||
setStatus({ mode: "error", message: noPermissionsMessage });
|
||||
} else {
|
||||
if(status.mode === "error" && status.message === device.error)
|
||||
return;
|
||||
const device = event.level[props.deviceId];
|
||||
if(!device) {
|
||||
if(status.mode === "loading")
|
||||
return;
|
||||
|
||||
setStatus({ mode: "error", message: device.error });
|
||||
setStatus({ mode: "loading" });
|
||||
} else if(device.status === "success") {
|
||||
if(status.mode !== "success") {
|
||||
setStatus({ mode: "success" });
|
||||
}
|
||||
refHider.current.style.width = (100 - device.level) + "%";
|
||||
} else {
|
||||
if(status.mode === "error" && status.message === device.error)
|
||||
return;
|
||||
|
||||
setStatus({ mode: "error", message: device.error + "" });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -96,16 +109,51 @@ const Microphone = (props: { events: Registry<MicrophoneSettingsEvents>, device:
|
|||
<div className={cssStyle.name}>{props.device.name}</div>
|
||||
</div>
|
||||
<div className={cssStyle.containerActivity}>
|
||||
<ActivityBar events={props.events} deviceId={props.device.id} />
|
||||
{props.device.id === IDevice.NoDeviceId ? undefined :
|
||||
<ActivityBar key={"a"} events={props.events} deviceId={props.device.id} />
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
type MicrophoneListState = {
|
||||
type: "normal" | "loading" | "audio-not-initialized"
|
||||
} | {
|
||||
type: "error",
|
||||
message: string
|
||||
} | {
|
||||
type: "no-permissions",
|
||||
bySystem: boolean
|
||||
}
|
||||
|
||||
const PermissionDeniedOverlay = (props: { bySystem: boolean, shown: boolean, onRequestPermission: () => void }) => {
|
||||
if(props.bySystem) {
|
||||
return (
|
||||
<div key={"system"} className={cssStyle.overlay + " " + (props.shown ? undefined : cssStyle.hidden)}>
|
||||
<ClientIconRenderer icon={ClientIcon.MicrophoneBroken} />
|
||||
<a><Translatable>Microphone access has been blocked by your browser.</Translatable></a>
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<div key={"user"} className={cssStyle.overlay + " " + (props.shown ? undefined : cssStyle.hidden)}>
|
||||
<a><Translatable>Please grant access to your microphone.</Translatable></a>
|
||||
<Button
|
||||
key={"request"}
|
||||
color={"green"}
|
||||
type={"small"}
|
||||
onClick={props.onRequestPermission}
|
||||
><Translatable>Request access</Translatable></Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) => {
|
||||
const [ state, setState ] = useState<"normal" | "loading" | "error" | "audio-not-initialized">(() => {
|
||||
const [ state, setState ] = useState<MicrophoneListState>(() => {
|
||||
props.events.fire("query_devices");
|
||||
return "loading";
|
||||
return { type: "loading" };
|
||||
});
|
||||
const [ selectedDevice, setSelectedDevice ] = useState<{ deviceId: string, mode: "selected" | "selecting" }>();
|
||||
const [ deviceList, setDeviceList ] = useState<MicrophoneDevice[]>([]);
|
||||
|
@ -116,17 +164,20 @@ const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) =
|
|||
switch (event.status) {
|
||||
case "success":
|
||||
setDeviceList(event.devices.slice(0));
|
||||
setState("normal");
|
||||
setState({ type: "normal" });
|
||||
setSelectedDevice({ mode: "selected", deviceId: event.selectedDevice });
|
||||
break;
|
||||
|
||||
case "error":
|
||||
setError(event.error || tr("Unknown error"));
|
||||
setState("error");
|
||||
setState({ type: "error", message: event.error || tr("Unknown error") });
|
||||
break;
|
||||
|
||||
case "audio-not-initialized":
|
||||
setState("audio-not-initialized");
|
||||
setState({ type: "audio-not-initialized" });
|
||||
break;
|
||||
|
||||
case "no-permissions":
|
||||
setState({ type: "no-permissions", bySystem: event.shouldAsk });
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
@ -144,25 +195,50 @@ const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) =
|
|||
|
||||
return (
|
||||
<div className={cssStyle.body + " " + cssStyle.containerDevices}>
|
||||
<div className={cssStyle.overlay + " " + (state !== "audio-not-initialized" ? cssStyle.hidden : undefined)}>
|
||||
<div className={cssStyle.overlay + " " + (state.type !== "audio-not-initialized" ? cssStyle.hidden : undefined)}>
|
||||
<a>
|
||||
<Translatable>The web audio play hasn't been initialized yet.</Translatable>
|
||||
<Translatable>Click somewhere on the base to initialize it.</Translatable>
|
||||
</a>
|
||||
</div>
|
||||
<div className={cssStyle.overlay + " " + (state !== "error" ? cssStyle.hidden : undefined)}>
|
||||
<div className={cssStyle.overlay + " " + (state.type !== "error" ? cssStyle.hidden : undefined)}>
|
||||
<a>{error}</a>
|
||||
</div>
|
||||
<div className={cssStyle.overlay + " " + (state !== "loading" ? cssStyle.hidden : undefined)}>
|
||||
<div className={cssStyle.overlay + " " + (state.type !== "no-permissions" ? cssStyle.hidden : undefined)}>
|
||||
<a><Translatable>Please grant access to your microphone.</Translatable></a>
|
||||
<Button
|
||||
color={"green"}
|
||||
type={"small"}
|
||||
onClick={() => props.events.fire("action_request_permissions") }
|
||||
><Translatable>Request access</Translatable></Button>
|
||||
</div>
|
||||
<PermissionDeniedOverlay
|
||||
bySystem={state.type === "no-permissions" ? state.bySystem : false}
|
||||
shown={state.type === "no-permissions"}
|
||||
onRequestPermission={() => props.events.fire("action_request_permissions")}
|
||||
/>
|
||||
<div className={cssStyle.overlay + " " + (state.type !== "loading" ? cssStyle.hidden : undefined)}>
|
||||
<a><Translatable>Loading</Translatable> <LoadingDots/></a>
|
||||
</div>
|
||||
<Microphone key={"d-default"}
|
||||
device={{ id: IDevice.NoDeviceId, driver: tr("No device"), name: tr("No device") }}
|
||||
events={props.events}
|
||||
state={IDevice.NoDeviceId === selectedDevice?.deviceId ? selectedDevice.mode === "selecting" ? "applying" : "selected" : "unselected"}
|
||||
onClick={() => {
|
||||
if(state.type !== "normal" || selectedDevice?.mode === "selecting")
|
||||
return;
|
||||
|
||||
props.events.fire("action_set_selected_device", { deviceId: IDevice.NoDeviceId });
|
||||
}}
|
||||
/>
|
||||
|
||||
{deviceList.map(e => <Microphone
|
||||
key={"d-" + e.id}
|
||||
device={e}
|
||||
events={props.events}
|
||||
state={e.id === selectedDevice?.deviceId ? selectedDevice.mode === "selecting" ? "applying" : "selected" : "unselected"}
|
||||
onClick={() => {
|
||||
if(state !== "normal" || selectedDevice?.mode === "selecting")
|
||||
if(state.type !== "normal" || selectedDevice?.mode === "selecting")
|
||||
return;
|
||||
|
||||
props.events.fire("action_set_selected_device", { deviceId: e.id });
|
||||
|
@ -187,7 +263,7 @@ const ListRefreshButton = (props: { events: Registry<MicrophoneSettingsEvents> }
|
|||
|
||||
props.events.reactUse("query_devices", () => setUpdateTimeout(Date.now() + 2000));
|
||||
|
||||
return <Button disabled={updateTimeout > 0} type={"small"} color={"blue"} onClick={() => props.events.fire("query_devices", { refresh_list: true })}>
|
||||
return <Button disabled={updateTimeout > 0} color={"blue"} onClick={() => props.events.fire("query_devices", { refresh_list: true })}>
|
||||
<Translatable>Update</Translatable>
|
||||
</Button>;
|
||||
}
|
||||
|
@ -203,7 +279,6 @@ const VolumeSettings = (props: { events: Registry<MicrophoneSettingsEvents> }) =
|
|||
if(event.setting !== "volume")
|
||||
return;
|
||||
|
||||
console.error("Set value: %o", event.value);
|
||||
refSlider.current?.setState({ value: event.value });
|
||||
setValue(event.value);
|
||||
});
|
||||
|
@ -386,6 +461,7 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
|||
return "loading";
|
||||
});
|
||||
|
||||
const [ currentDevice, setCurrentDevice ] = useState(undefined);
|
||||
const [ isActive, setActive ] = useState(false);
|
||||
|
||||
props.events.reactUse("notify_setting", event => {
|
||||
|
@ -397,10 +473,18 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
|||
}
|
||||
});
|
||||
|
||||
props.events.reactUse("notify_devices", event => {
|
||||
setCurrentDevice(event.selectedDevice);
|
||||
});
|
||||
|
||||
props.events.reactUse("action_set_selected_device_result", event => {
|
||||
setCurrentDevice(event.deviceId);
|
||||
});
|
||||
|
||||
return (
|
||||
<div className={cssStyle.containerSensitivity}>
|
||||
<div className={cssStyle.containerBar}>
|
||||
<ActivityBar events={props.events} deviceId={"default"} disabled={!isActive} />
|
||||
<ActivityBar events={props.events} deviceId={currentDevice} disabled={!isActive || !currentDevice} />
|
||||
</div>
|
||||
<Slider
|
||||
ref={refSlider}
|
||||
|
@ -416,7 +500,7 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
|||
|
||||
disabled={value === "loading" || !isActive}
|
||||
|
||||
onChange={value => {}}
|
||||
onChange={value => { props.events.fire("action_set_setting", { setting: "threshold-threshold", value: value })}}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
export enum FilterType {
|
||||
THRESHOLD,
|
||||
VOICE_LEVEL,
|
||||
STATE
|
||||
}
|
||||
|
||||
export interface FilterBase {
|
||||
readonly priority: number;
|
||||
|
||||
set_enabled(flag: boolean) : void;
|
||||
is_enabled() : boolean;
|
||||
}
|
||||
|
||||
export interface MarginedFilter {
|
||||
get_margin_frames() : number;
|
||||
set_margin_frames(value: number);
|
||||
}
|
||||
|
||||
export interface ThresholdFilter extends FilterBase, MarginedFilter {
|
||||
readonly type: FilterType.THRESHOLD;
|
||||
|
||||
get_threshold() : number;
|
||||
set_threshold(value: number) : Promise<void>;
|
||||
|
||||
get_attack_smooth() : number;
|
||||
get_release_smooth() : number;
|
||||
|
||||
set_attack_smooth(value: number);
|
||||
set_release_smooth(value: number);
|
||||
|
||||
callback_level?: (value: number) => any;
|
||||
}
|
||||
|
||||
export interface VoiceLevelFilter extends FilterBase, MarginedFilter {
|
||||
type: FilterType.VOICE_LEVEL;
|
||||
|
||||
get_level() : number;
|
||||
}
|
||||
|
||||
export interface StateFilter extends FilterBase {
|
||||
type: FilterType.STATE;
|
||||
|
||||
set_state(state: boolean) : Promise<void>;
|
||||
is_active() : boolean; /* if true the the filter allows data to pass */
|
||||
}
|
||||
|
||||
export type FilterTypeClass<T extends FilterType> =
|
||||
T extends FilterType.STATE ? StateFilter :
|
||||
T extends FilterType.VOICE_LEVEL ? VoiceLevelFilter :
|
||||
T extends FilterType.THRESHOLD ? ThresholdFilter :
|
||||
never;
|
||||
|
||||
export type Filter = ThresholdFilter | VoiceLevelFilter | StateFilter;
|
|
@ -1,14 +1,6 @@
|
|||
export interface InputDevice {
|
||||
unique_id: string;
|
||||
driver: string;
|
||||
name: string;
|
||||
default_input: boolean;
|
||||
|
||||
supported: boolean;
|
||||
|
||||
sample_rate: number;
|
||||
channels: number;
|
||||
}
|
||||
import {IDevice} from "tc-shared/audio/recorder";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
|
||||
|
||||
export enum InputConsumerType {
|
||||
CALLBACK,
|
||||
|
@ -31,47 +23,6 @@ export interface NodeInputConsumer extends InputConsumer {
|
|||
}
|
||||
|
||||
|
||||
export namespace filter {
|
||||
export enum Type {
|
||||
THRESHOLD,
|
||||
VOICE_LEVEL,
|
||||
STATE
|
||||
}
|
||||
|
||||
export interface Filter {
|
||||
type: Type;
|
||||
|
||||
is_enabled() : boolean;
|
||||
}
|
||||
|
||||
export interface MarginedFilter {
|
||||
get_margin_frames() : number;
|
||||
set_margin_frames(value: number);
|
||||
}
|
||||
|
||||
export interface ThresholdFilter extends Filter, MarginedFilter {
|
||||
get_threshold() : number;
|
||||
set_threshold(value: number) : Promise<void>;
|
||||
|
||||
get_attack_smooth() : number;
|
||||
get_release_smooth() : number;
|
||||
|
||||
set_attack_smooth(value: number);
|
||||
set_release_smooth(value: number);
|
||||
|
||||
callback_level?: (value: number) => any;
|
||||
}
|
||||
|
||||
export interface VoiceLevelFilter extends Filter, MarginedFilter {
|
||||
get_level() : number;
|
||||
}
|
||||
|
||||
export interface StateFilter extends Filter {
|
||||
set_state(state: boolean) : Promise<void>;
|
||||
is_active() : boolean; /* if true the the filter allows data to pass */
|
||||
}
|
||||
}
|
||||
|
||||
export enum InputState {
|
||||
PAUSED,
|
||||
INITIALIZING,
|
||||
|
@ -87,36 +38,39 @@ export enum InputStartResult {
|
|||
ENOTSUPPORTED = "enotsupported"
|
||||
}
|
||||
|
||||
export interface AbstractInput {
|
||||
callback_begin: () => any;
|
||||
callback_end: () => any;
|
||||
export interface InputEvents {
|
||||
notify_voice_start: {},
|
||||
notify_voice_end: {}
|
||||
}
|
||||
|
||||
current_state() : InputState;
|
||||
export interface AbstractInput {
|
||||
readonly events: Registry<InputEvents>;
|
||||
|
||||
currentState() : InputState;
|
||||
|
||||
start() : Promise<InputStartResult>;
|
||||
stop() : Promise<void>;
|
||||
|
||||
current_device() : InputDevice | undefined;
|
||||
set_device(device: InputDevice | undefined) : Promise<void>;
|
||||
currentDevice() : IDevice | undefined;
|
||||
setDevice(device: IDevice | undefined) : Promise<void>;
|
||||
|
||||
current_consumer() : InputConsumer | undefined;
|
||||
set_consumer(consumer: InputConsumer) : Promise<void>;
|
||||
currentConsumer() : InputConsumer | undefined;
|
||||
setConsumer(consumer: InputConsumer) : Promise<void>;
|
||||
|
||||
get_filter(type: filter.Type) : filter.Filter | undefined;
|
||||
supports_filter(type: filter.Type) : boolean;
|
||||
supportsFilter(type: FilterType) : boolean;
|
||||
createFilter<T extends FilterType>(type: T, priority: number) : FilterTypeClass<T>;
|
||||
|
||||
clear_filter();
|
||||
disable_filter(type: filter.Type);
|
||||
enable_filter(type: filter.Type);
|
||||
removeFilter(filter: Filter);
|
||||
resetFilter();
|
||||
|
||||
get_volume() : number;
|
||||
set_volume(volume: number);
|
||||
getVolume() : number;
|
||||
setVolume(volume: number);
|
||||
}
|
||||
|
||||
export interface LevelMeter {
|
||||
device() : InputDevice;
|
||||
device() : IDevice;
|
||||
|
||||
set_observer(callback: (value: number) => any);
|
||||
|
||||
destory();
|
||||
destroy();
|
||||
}
|
|
@ -1,12 +1,13 @@
|
|||
import * as log from "tc-shared/log";
|
||||
import {AbstractInput, filter, InputDevice} from "tc-shared/voice/RecorderBase";
|
||||
import {LogCategory, logWarn} from "tc-shared/log";
|
||||
import {AbstractInput} from "tc-shared/voice/RecorderBase";
|
||||
import {KeyDescriptor, KeyHook} from "tc-shared/PPTListener";
|
||||
import {LogCategory} from "tc-shared/log";
|
||||
import {Settings, settings} from "tc-shared/settings";
|
||||
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
||||
import * as aplayer from "tc-backend/audio/player";
|
||||
import * as arecorder from "tc-backend/audio/recorder";
|
||||
import * as ppt from "tc-backend/ppt";
|
||||
import {getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
|
||||
import {FilterType, StateFilter} from "tc-shared/voice/Filter";
|
||||
|
||||
export type VadType = "threshold" | "push_to_talk" | "active";
|
||||
export interface RecorderProfileConfig {
|
||||
|
@ -46,18 +47,20 @@ export class RecorderProfile {
|
|||
|
||||
current_handler: ConnectionHandler;
|
||||
|
||||
callback_input_change: (old_input: AbstractInput, new_input: AbstractInput) => Promise<void>;
|
||||
callback_input_change: (oldInput: AbstractInput | undefined, newInput: AbstractInput | undefined) => Promise<void>;
|
||||
callback_start: () => any;
|
||||
callback_stop: () => any;
|
||||
|
||||
callback_unmount: () => any; /* called if somebody else takes the ownership */
|
||||
|
||||
record_supported: boolean;
|
||||
|
||||
private pptHook: KeyHook;
|
||||
private readonly pptHook: KeyHook;
|
||||
private pptTimeout: number;
|
||||
private pptHookRegistered: boolean;
|
||||
|
||||
private registeredFilter = {
|
||||
"ppt-gate": undefined as StateFilter
|
||||
}
|
||||
|
||||
constructor(name: string, volatile?: boolean) {
|
||||
this.name = name;
|
||||
this.volatile = typeof(volatile) === "boolean" ? volatile : false;
|
||||
|
@ -68,84 +71,96 @@ export class RecorderProfile {
|
|||
clearTimeout(this.pptTimeout);
|
||||
|
||||
this.pptTimeout = setTimeout(() => {
|
||||
const f = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
||||
if(f) f.set_state(true);
|
||||
this.registeredFilter["ppt-gate"]?.set_state(true);
|
||||
}, Math.max(this.config.vad_push_to_talk.delay, 0));
|
||||
},
|
||||
|
||||
callback_press: () => {
|
||||
if(this.pptTimeout)
|
||||
clearTimeout(this.pptTimeout);
|
||||
|
||||
const f = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
||||
if(f) f.set_state(false);
|
||||
this.registeredFilter["ppt-gate"]?.set_state(false);
|
||||
},
|
||||
|
||||
cancel: false
|
||||
} as KeyHook;
|
||||
this.pptHookRegistered = false;
|
||||
this.record_supported = true;
|
||||
}
|
||||
|
||||
async initialize() : Promise<void> {
|
||||
{
|
||||
let config = {};
|
||||
try {
|
||||
config = settings.static_global(Settings.FN_PROFILE_RECORD(this.name), {}) as RecorderProfileConfig;
|
||||
} catch (error) {
|
||||
logWarn(LogCategory.AUDIO, tr("Failed to load old recorder profile config for %s"), this.name);
|
||||
}
|
||||
|
||||
/* default values */
|
||||
this.config = {
|
||||
version: 1,
|
||||
device_id: undefined,
|
||||
volume: 100,
|
||||
|
||||
vad_threshold: {
|
||||
threshold: 25
|
||||
},
|
||||
vad_type: "threshold",
|
||||
vad_push_to_talk: {
|
||||
delay: 300,
|
||||
key_alt: false,
|
||||
key_ctrl: false,
|
||||
key_shift: false,
|
||||
key_windows: false,
|
||||
key_code: 't'
|
||||
}
|
||||
};
|
||||
|
||||
Object.assign(this.config, config || {});
|
||||
}
|
||||
|
||||
aplayer.on_ready(async () => {
|
||||
await getRecorderBackend().getDeviceList().awaitHealthy();
|
||||
|
||||
this.initialize_input();
|
||||
await this.load();
|
||||
await this.reinitialize_filter();
|
||||
await this.reinitializeFilter();
|
||||
});
|
||||
}
|
||||
|
||||
private initialize_input() {
|
||||
this.input = arecorder.create_input();
|
||||
this.input.callback_begin = () => {
|
||||
this.input = getRecorderBackend().createInput();
|
||||
|
||||
this.input.events.on("notify_voice_start", () => {
|
||||
log.debug(LogCategory.VOICE, "Voice start");
|
||||
if(this.callback_start)
|
||||
this.callback_start();
|
||||
};
|
||||
});
|
||||
|
||||
this.input.callback_end = () => {
|
||||
this.input.events.on("notify_voice_end", () => {
|
||||
log.debug(LogCategory.VOICE, "Voice end");
|
||||
if(this.callback_stop)
|
||||
this.callback_stop();
|
||||
};
|
||||
});
|
||||
|
||||
//TODO: Await etc?
|
||||
this.callback_input_change && this.callback_input_change(undefined, this.input);
|
||||
}
|
||||
|
||||
private async load() {
|
||||
const config = settings.static_global(Settings.FN_PROFILE_RECORD(this.name), {}) as RecorderProfileConfig;
|
||||
|
||||
/* default values */
|
||||
this.config = {
|
||||
version: 1,
|
||||
device_id: undefined,
|
||||
volume: 100,
|
||||
|
||||
vad_threshold: {
|
||||
threshold: 25
|
||||
},
|
||||
vad_type: "threshold",
|
||||
vad_push_to_talk: {
|
||||
delay: 300,
|
||||
key_alt: false,
|
||||
key_ctrl: false,
|
||||
key_shift: false,
|
||||
key_windows: false,
|
||||
key_code: 't'
|
||||
}
|
||||
};
|
||||
|
||||
Object.assign(this.config, config || {});
|
||||
this.input.set_volume(this.config.volume / 100);
|
||||
this.input.setVolume(this.config.volume / 100);
|
||||
|
||||
{
|
||||
const all_devices = arecorder.devices();
|
||||
const devices = all_devices.filter(e => e.default_input || e.unique_id === this.config.device_id);
|
||||
const device = devices.find(e => e.unique_id === this.config.device_id) || devices[0];
|
||||
const allDevices = getRecorderBackend().getDeviceList().getDevices();
|
||||
const defaultDeviceId = getRecorderBackend().getDeviceList().getDefaultDeviceId();
|
||||
console.error("Devices: %o | Searching: %s", allDevices, this.config.device_id);
|
||||
|
||||
log.info(LogCategory.VOICE, tr("Loaded record profile device %s | %o (%o)"), this.config.device_id, device, all_devices);
|
||||
const devices = allDevices.filter(e => e.deviceId === defaultDeviceId || e.deviceId === this.config.device_id);
|
||||
const device = devices.find(e => e.deviceId === this.config.device_id) || devices[0];
|
||||
|
||||
log.info(LogCategory.VOICE, tr("Loaded record profile device %s | %o (%o)"), this.config.device_id, device, allDevices);
|
||||
try {
|
||||
await this.input.set_device(device);
|
||||
await this.input.setDevice(device);
|
||||
} catch(error) {
|
||||
log.error(LogCategory.VOICE, tr("Failed to set input device (%o)"), error);
|
||||
}
|
||||
|
@ -157,38 +172,36 @@ export class RecorderProfile {
|
|||
settings.changeGlobal(Settings.FN_PROFILE_RECORD(this.name), this.config);
|
||||
}
|
||||
|
||||
private async reinitialize_filter() {
|
||||
private async reinitializeFilter() {
|
||||
if(!this.input) return;
|
||||
|
||||
this.input.clear_filter();
|
||||
/* TODO: Really required? If still same input we can just use the registered filters */
|
||||
|
||||
this.input.resetFilter();
|
||||
delete this.registeredFilter["ppt-gate"];
|
||||
|
||||
if(this.pptHookRegistered) {
|
||||
ppt.unregister_key_hook(this.pptHook);
|
||||
this.pptHookRegistered = false;
|
||||
}
|
||||
|
||||
if(this.config.vad_type === "threshold") {
|
||||
const filter_ = this.input.get_filter(filter.Type.THRESHOLD) as filter.ThresholdFilter;
|
||||
await filter_.set_threshold(this.config.vad_threshold.threshold);
|
||||
await filter_.set_margin_frames(10); /* 500ms */
|
||||
const filter = this.input.createFilter(FilterType.THRESHOLD, 100);
|
||||
await filter.set_threshold(this.config.vad_threshold.threshold);
|
||||
|
||||
/* legacy client support */
|
||||
if('set_attack_smooth' in filter_)
|
||||
filter_.set_attack_smooth(.25);
|
||||
|
||||
if('set_release_smooth' in filter_)
|
||||
filter_.set_release_smooth(.9);
|
||||
|
||||
this.input.enable_filter(filter.Type.THRESHOLD);
|
||||
filter.set_margin_frames(10); /* 500ms */
|
||||
filter.set_attack_smooth(.25);
|
||||
filter.set_release_smooth(.9);
|
||||
} else if(this.config.vad_type === "push_to_talk") {
|
||||
const filter_ = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
||||
await filter_.set_state(true);
|
||||
const filter = this.input.createFilter(FilterType.STATE, 100);
|
||||
await filter.set_state(true);
|
||||
this.registeredFilter["ppt-gate"] = filter;
|
||||
|
||||
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||
this.pptHook[key] = this.config.vad_push_to_talk[key];
|
||||
|
||||
ppt.register_key_hook(this.pptHook);
|
||||
this.pptHookRegistered = true;
|
||||
|
||||
this.input.enable_filter(filter.Type.STATE);
|
||||
} else if(this.config.vad_type === "active") {}
|
||||
}
|
||||
|
||||
|
@ -199,7 +212,7 @@ export class RecorderProfile {
|
|||
|
||||
if(this.input) {
|
||||
try {
|
||||
await this.input.set_consumer(undefined);
|
||||
await this.input.setConsumer(undefined);
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.VOICE, tr("Failed to unmount input consumer for profile (%o)"), error);
|
||||
}
|
||||
|
@ -220,7 +233,7 @@ export class RecorderProfile {
|
|||
return false;
|
||||
|
||||
this.config.vad_type = type;
|
||||
this.reinitialize_filter();
|
||||
this.reinitializeFilter();
|
||||
this.save();
|
||||
return true;
|
||||
}
|
||||
|
@ -231,7 +244,7 @@ export class RecorderProfile {
|
|||
return;
|
||||
|
||||
this.config.vad_threshold.threshold = value;
|
||||
this.reinitialize_filter();
|
||||
this.reinitializeFilter();
|
||||
this.save();
|
||||
}
|
||||
|
||||
|
@ -240,7 +253,7 @@ export class RecorderProfile {
|
|||
for(const _key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||
this.config.vad_push_to_talk[_key] = key[_key];
|
||||
|
||||
this.reinitialize_filter();
|
||||
this.reinitializeFilter();
|
||||
this.save();
|
||||
}
|
||||
|
||||
|
@ -250,25 +263,24 @@ export class RecorderProfile {
|
|||
return;
|
||||
|
||||
this.config.vad_push_to_talk.delay = value;
|
||||
this.reinitialize_filter();
|
||||
this.reinitializeFilter();
|
||||
this.save();
|
||||
}
|
||||
|
||||
|
||||
current_device() : InputDevice | undefined { return this.input?.current_device(); }
|
||||
set_device(device: InputDevice | undefined) : Promise<void> {
|
||||
this.config.device_id = device ? device.unique_id : undefined;
|
||||
getDeviceId() : string { return this.config.device_id; }
|
||||
set_device(device: IDevice | undefined) : Promise<void> {
|
||||
this.config.device_id = device ? device.deviceId : IDevice.NoDeviceId;
|
||||
this.save();
|
||||
return this.input.set_device(device);
|
||||
return this.input?.setDevice(device) || Promise.resolve();
|
||||
}
|
||||
|
||||
get_volume() : number { return this.input ? (this.input.get_volume() * 100) : this.config.volume; }
|
||||
get_volume() : number { return this.input ? (this.input.getVolume() * 100) : this.config.volume; }
|
||||
set_volume(volume: number) {
|
||||
if(this.config.volume === volume)
|
||||
return;
|
||||
|
||||
this.config.volume = volume;
|
||||
this.input && this.input.set_volume(volume / 100);
|
||||
this.input && this.input.setVolume(volume / 100);
|
||||
this.save();
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,766 @@
|
|||
import {
|
||||
AbstractDeviceList,
|
||||
AudioRecorderBacked,
|
||||
DeviceList,
|
||||
DeviceListEvents,
|
||||
DeviceListState,
|
||||
IDevice,
|
||||
PermissionState
|
||||
} from "tc-shared/audio/recorder";
|
||||
import {Registry} from "tc-shared/events";
|
||||
import * as rbase from "tc-shared/voice/RecorderBase";
|
||||
import {
|
||||
AbstractInput,
|
||||
CallbackInputConsumer,
|
||||
InputConsumer,
|
||||
InputConsumerType, InputEvents,
|
||||
InputStartResult,
|
||||
InputState,
|
||||
LevelMeter,
|
||||
NodeInputConsumer
|
||||
} from "tc-shared/voice/RecorderBase";
|
||||
import * as log from "tc-shared/log";
|
||||
import {LogCategory, logWarn} from "tc-shared/log";
|
||||
import * as aplayer from "./player";
|
||||
import {JAbstractFilter, JStateFilter, JThresholdFilter} from "./RecorderFilter";
|
||||
import * as loader from "tc-loader";
|
||||
import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
|
||||
|
||||
declare global {
|
||||
interface MediaStream {
|
||||
stop();
|
||||
}
|
||||
}
|
||||
|
||||
export interface WebIDevice extends IDevice {
|
||||
groupId: string;
|
||||
}
|
||||
|
||||
function getUserMediaFunctionPromise() : (constraints: MediaStreamConstraints) => Promise<MediaStream> {
|
||||
if('mediaDevices' in navigator && 'getUserMedia' in navigator.mediaDevices)
|
||||
return constraints => navigator.mediaDevices.getUserMedia(constraints);
|
||||
|
||||
const _callbacked_function = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
|
||||
if(!_callbacked_function)
|
||||
return undefined;
|
||||
|
||||
return constraints => new Promise<MediaStream>((resolve, reject) => _callbacked_function(constraints, resolve, reject));
|
||||
}
|
||||
|
||||
async function requestMicrophoneMediaStream(constraints: MediaTrackConstraints, updateDeviceList: boolean) : Promise<InputStartResult | MediaStream> {
|
||||
const mediaFunction = getUserMediaFunctionPromise();
|
||||
if(!mediaFunction) return InputStartResult.ENOTSUPPORTED;
|
||||
|
||||
try {
|
||||
log.info(LogCategory.AUDIO, tr("Requesting a microphone stream for device %s in group %s"), constraints.deviceId, constraints.groupId);
|
||||
const stream = mediaFunction({ audio: constraints });
|
||||
|
||||
if(updateDeviceList && inputDeviceList.getStatus() === "no-permissions") {
|
||||
inputDeviceList.refresh().then(() => {}); /* added the then body to avoid a inspection warning... */
|
||||
}
|
||||
|
||||
return stream;
|
||||
} catch(error) {
|
||||
if('name' in error) {
|
||||
if(error.name === "NotAllowedError") {
|
||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed (No permissions). Browser message: %o"), error.message);
|
||||
return InputStartResult.ENOTALLOWED;
|
||||
} else {
|
||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed. Request resulted in error: %o: %o"), error.name, error);
|
||||
}
|
||||
} else {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize recording stream (%o)"), error);
|
||||
}
|
||||
|
||||
return InputStartResult.EUNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
async function requestMicrophonePermissions() : Promise<PermissionState> {
|
||||
const begin = Date.now();
|
||||
try {
|
||||
await getUserMediaFunctionPromise()({ audio: { deviceId: "default" }, video: false });
|
||||
return "granted";
|
||||
} catch (error) {
|
||||
const end = Date.now();
|
||||
const isSystem = (end - begin) < 250;
|
||||
log.debug(LogCategory.AUDIO, tr("Microphone device request took %d milliseconds. System answered: %s"), end - begin, isSystem);
|
||||
return "denied";
|
||||
}
|
||||
}
|
||||
|
||||
let inputDeviceList: WebInputDeviceList;
|
||||
class WebInputDeviceList extends AbstractDeviceList {
|
||||
private devices: WebIDevice[];
|
||||
|
||||
private deviceListQueryPromise: Promise<void>;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.devices = [];
|
||||
}
|
||||
|
||||
getDefaultDeviceId(): string {
|
||||
return "default";
|
||||
}
|
||||
|
||||
getDevices(): IDevice[] {
|
||||
return this.devices;
|
||||
}
|
||||
|
||||
getEvents(): Registry<DeviceListEvents> {
|
||||
return this.events;
|
||||
}
|
||||
|
||||
getStatus(): DeviceListState {
|
||||
return this.listState;
|
||||
}
|
||||
|
||||
isRefreshAvailable(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
refresh(askPermissions?: boolean): Promise<void> {
|
||||
return this.queryDevices(askPermissions === true);
|
||||
}
|
||||
|
||||
async requestPermissions(): Promise<PermissionState> {
|
||||
if(this.permissionState !== "unknown")
|
||||
return this.permissionState;
|
||||
|
||||
let result = await requestMicrophonePermissions();
|
||||
if(result === "granted" && this.listState === "no-permissions") {
|
||||
/* if called within doQueryDevices, queryDevices will just return the promise */
|
||||
this.queryDevices(false).then(() => {});
|
||||
}
|
||||
this.setPermissionState(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private queryDevices(askPermissions: boolean) : Promise<void> {
|
||||
if(this.deviceListQueryPromise)
|
||||
return this.deviceListQueryPromise;
|
||||
|
||||
this.deviceListQueryPromise = this.doQueryDevices(askPermissions).catch(error => {
|
||||
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
|
||||
|
||||
if(this.listState !== "healthy")
|
||||
this.listState = "error";
|
||||
}).then(() => {
|
||||
this.deviceListQueryPromise = undefined;
|
||||
});
|
||||
|
||||
return this.deviceListQueryPromise || Promise.resolve();
|
||||
}
|
||||
|
||||
private async doQueryDevices(askPermissions: boolean) {
|
||||
let devices = await navigator.mediaDevices.enumerateDevices();
|
||||
let hasPermissions = devices.findIndex(e => e.label !== "") !== -1;
|
||||
|
||||
if(!hasPermissions && askPermissions) {
|
||||
this.setState("no-permissions");
|
||||
|
||||
let skipPermissionAsk = false;
|
||||
if('permissions' in navigator && 'query' in navigator.permissions) {
|
||||
try {
|
||||
const result = await navigator.permissions.query({ name: "microphone" });
|
||||
if(result.state === "denied") {
|
||||
this.setPermissionState("denied");
|
||||
skipPermissionAsk = true;
|
||||
}
|
||||
} catch (error) {
|
||||
logWarn(LogCategory.GENERAL, tr("Failed to query for microphone permissions: %s"), error);
|
||||
}
|
||||
}
|
||||
|
||||
if(skipPermissionAsk) {
|
||||
/* request permissions */
|
||||
hasPermissions = await this.requestPermissions() === "granted";
|
||||
if(hasPermissions) {
|
||||
devices = await navigator.mediaDevices.enumerateDevices();
|
||||
}
|
||||
}
|
||||
}
|
||||
if(hasPermissions) {
|
||||
this.setPermissionState("granted");
|
||||
}
|
||||
|
||||
if(window.detectedBrowser?.name === "firefox") {
|
||||
devices = [{
|
||||
label: tr("Default Firefox device"),
|
||||
groupId: "default",
|
||||
deviceId: "default",
|
||||
kind: "audioinput",
|
||||
|
||||
toJSON: undefined
|
||||
}];
|
||||
}
|
||||
|
||||
const inputDevices = devices.filter(e => e.kind === "audioinput");
|
||||
|
||||
const oldDeviceList = this.devices;
|
||||
this.devices = [];
|
||||
|
||||
let devicesAdded = 0;
|
||||
for(const device of inputDevices) {
|
||||
const oldIndex = oldDeviceList.findIndex(e => e.deviceId === device.deviceId);
|
||||
if(oldIndex === -1) {
|
||||
devicesAdded++;
|
||||
} else {
|
||||
oldDeviceList.splice(oldIndex, 1);
|
||||
}
|
||||
|
||||
this.devices.push({
|
||||
deviceId: device.deviceId,
|
||||
driver: "WebAudio",
|
||||
groupId: device.groupId,
|
||||
name: device.label
|
||||
});
|
||||
}
|
||||
|
||||
this.events.fire("notify_list_updated", { addedDeviceCount: devicesAdded, removedDeviceCount: oldDeviceList.length });
|
||||
if(hasPermissions) {
|
||||
this.setState("healthy");
|
||||
} else {
|
||||
this.setState("no-permissions");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class WebAudioRecorder implements AudioRecorderBacked {
|
||||
createInput(): AbstractInput {
|
||||
return new JavascriptInput();
|
||||
}
|
||||
|
||||
async createLevelMeter(device: IDevice): Promise<LevelMeter> {
|
||||
const meter = new JavascriptLevelmeter(device as any);
|
||||
await meter.initialize();
|
||||
return meter;
|
||||
}
|
||||
|
||||
getDeviceList(): DeviceList {
|
||||
return inputDeviceList;
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptInput implements AbstractInput {
|
||||
public readonly events: Registry<InputEvents>;
|
||||
|
||||
private _state: InputState = InputState.PAUSED;
|
||||
private _current_device: WebIDevice | undefined;
|
||||
private _current_consumer: InputConsumer;
|
||||
|
||||
private _current_stream: MediaStream;
|
||||
private _current_audio_stream: MediaStreamAudioSourceNode;
|
||||
|
||||
private _audio_context: AudioContext;
|
||||
private _source_node: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
|
||||
private _consumer_callback_node: ScriptProcessorNode;
|
||||
private readonly _consumer_audio_callback;
|
||||
private _volume_node: GainNode;
|
||||
private _mute_node: GainNode;
|
||||
|
||||
private registeredFilters: (Filter & JAbstractFilter<AudioNode>)[] = [];
|
||||
private _filter_active: boolean = false;
|
||||
|
||||
private _volume: number = 1;
|
||||
|
||||
callback_begin: () => any = undefined;
|
||||
callback_end: () => any = undefined;
|
||||
|
||||
constructor() {
|
||||
this.events = new Registry<InputEvents>();
|
||||
|
||||
aplayer.on_ready(() => this._audio_initialized());
|
||||
this._consumer_audio_callback = this._audio_callback.bind(this);
|
||||
}
|
||||
|
||||
private _audio_initialized() {
|
||||
this._audio_context = aplayer.context();
|
||||
if(!this._audio_context)
|
||||
return;
|
||||
|
||||
this._mute_node = this._audio_context.createGain();
|
||||
this._mute_node.gain.value = 0;
|
||||
this._mute_node.connect(this._audio_context.destination);
|
||||
|
||||
this._consumer_callback_node = this._audio_context.createScriptProcessor(1024 * 4);
|
||||
this._consumer_callback_node.connect(this._mute_node);
|
||||
|
||||
this._volume_node = this._audio_context.createGain();
|
||||
this._volume_node.gain.value = this._volume;
|
||||
|
||||
this.initializeFilters();
|
||||
if(this._state === InputState.INITIALIZING)
|
||||
this.start();
|
||||
}
|
||||
|
||||
private initializeFilters() {
|
||||
for(const filter of this.registeredFilters) {
|
||||
if(filter.is_enabled())
|
||||
filter.finalize();
|
||||
}
|
||||
|
||||
this.registeredFilters.sort((a, b) => a.priority - b.priority);
|
||||
if(this._audio_context && this._volume_node) {
|
||||
const active_filter = this.registeredFilters.filter(e => e.is_enabled());
|
||||
let stream: AudioNode = this._volume_node;
|
||||
for(const f of active_filter) {
|
||||
f.initialize(this._audio_context, stream);
|
||||
stream = f.audio_node;
|
||||
}
|
||||
this._switch_source_node(stream);
|
||||
}
|
||||
}
|
||||
|
||||
private _audio_callback(event: AudioProcessingEvent) {
|
||||
if(!this._current_consumer || this._current_consumer.type !== InputConsumerType.CALLBACK)
|
||||
return;
|
||||
|
||||
const callback = this._current_consumer as CallbackInputConsumer;
|
||||
if(callback.callback_audio)
|
||||
callback.callback_audio(event.inputBuffer);
|
||||
|
||||
if(callback.callback_buffer) {
|
||||
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
|
||||
}
|
||||
}
|
||||
|
||||
current_state() : InputState { return this._state; };
|
||||
|
||||
private _start_promise: Promise<InputStartResult>;
|
||||
async start() : Promise<InputStartResult> {
|
||||
if(this._start_promise) {
|
||||
try {
|
||||
await this._start_promise;
|
||||
if(this._state != InputState.PAUSED)
|
||||
return;
|
||||
} catch(error) {
|
||||
log.debug(LogCategory.AUDIO, tr("JavascriptInput:start() Start promise await resulted in an error: %o"), error);
|
||||
}
|
||||
}
|
||||
|
||||
return await (this._start_promise = this._start());
|
||||
}
|
||||
|
||||
/* request permission for devices only one per time! */
|
||||
private static _running_request: Promise<MediaStream | InputStartResult>;
|
||||
static async request_media_stream(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
|
||||
while(this._running_request) {
|
||||
try {
|
||||
await this._running_request;
|
||||
} catch(error) { }
|
||||
}
|
||||
|
||||
const audio_constrains: MediaTrackConstraints = {};
|
||||
if(window.detectedBrowser?.name === "firefox") {
|
||||
/*
|
||||
* Firefox only allows to open one mic as well deciding whats the input device it.
|
||||
* It does not respect the deviceId nor the groupId
|
||||
*/
|
||||
} else {
|
||||
audio_constrains.deviceId = device_id;
|
||||
audio_constrains.groupId = group_id;
|
||||
}
|
||||
|
||||
audio_constrains.echoCancellation = true;
|
||||
audio_constrains.autoGainControl = true;
|
||||
audio_constrains.noiseSuppression = true;
|
||||
|
||||
const promise = (this._running_request = requestMicrophoneMediaStream(audio_constrains, true));
|
||||
try {
|
||||
return await this._running_request;
|
||||
} finally {
|
||||
if(this._running_request === promise)
|
||||
this._running_request = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
private async _start() : Promise<InputStartResult> {
|
||||
try {
|
||||
if(this._state != InputState.PAUSED)
|
||||
throw tr("recorder already started");
|
||||
|
||||
this._state = InputState.INITIALIZING;
|
||||
if(!this._current_device)
|
||||
throw tr("invalid device");
|
||||
|
||||
if(!this._audio_context) {
|
||||
debugger;
|
||||
throw tr("missing audio context");
|
||||
}
|
||||
|
||||
const _result = await JavascriptInput.request_media_stream(this._current_device.deviceId, this._current_device.groupId);
|
||||
if(!(_result instanceof MediaStream)) {
|
||||
this._state = InputState.PAUSED;
|
||||
return _result;
|
||||
}
|
||||
this._current_stream = _result;
|
||||
|
||||
for(const f of this.registeredFilters) {
|
||||
if(f.is_enabled()) {
|
||||
f.set_pause(false);
|
||||
}
|
||||
}
|
||||
this._consumer_callback_node.addEventListener('audioprocess', this._consumer_audio_callback);
|
||||
|
||||
this._current_audio_stream = this._audio_context.createMediaStreamSource(this._current_stream);
|
||||
this._current_audio_stream.connect(this._volume_node);
|
||||
this._state = InputState.RECORDING;
|
||||
return InputStartResult.EOK;
|
||||
} catch(error) {
|
||||
if(this._state == InputState.INITIALIZING) {
|
||||
this._state = InputState.PAUSED;
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
this._start_promise = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async stop() {
|
||||
/* await all starts */
|
||||
try {
|
||||
if(this._start_promise)
|
||||
await this._start_promise;
|
||||
} catch(error) {}
|
||||
|
||||
this._state = InputState.PAUSED;
|
||||
if(this._current_audio_stream) {
|
||||
this._current_audio_stream.disconnect();
|
||||
}
|
||||
|
||||
if(this._current_stream) {
|
||||
if(this._current_stream.stop) {
|
||||
this._current_stream.stop();
|
||||
} else {
|
||||
this._current_stream.getTracks().forEach(value => {
|
||||
value.stop();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this._current_stream = undefined;
|
||||
this._current_audio_stream = undefined;
|
||||
for(const f of this.registeredFilters) {
|
||||
if(f.is_enabled()) {
|
||||
f.set_pause(true);
|
||||
}
|
||||
}
|
||||
|
||||
if(this._consumer_callback_node) {
|
||||
this._consumer_callback_node.removeEventListener('audioprocess', this._consumer_audio_callback);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
current_device(): IDevice | undefined {
|
||||
return this._current_device;
|
||||
}
|
||||
|
||||
async set_device(device: IDevice | undefined) {
|
||||
if(this._current_device === device)
|
||||
return;
|
||||
|
||||
const savedState = this._state;
|
||||
try {
|
||||
await this.stop();
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to stop previous record session (%o)"), error);
|
||||
}
|
||||
|
||||
this._current_device = device as any;
|
||||
if(!device) {
|
||||
this._state = savedState === InputState.PAUSED ? InputState.PAUSED : InputState.DRY;
|
||||
return;
|
||||
}
|
||||
|
||||
if(savedState !== InputState.PAUSED) {
|
||||
try {
|
||||
await this.start()
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to start new recording stream (%o)"), error);
|
||||
throw "failed to start record";
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
createFilter<T extends FilterType>(type: T, priority: number): FilterTypeClass<T> {
|
||||
let filter: JAbstractFilter<AudioNode> & Filter;
|
||||
switch (type) {
|
||||
case FilterType.STATE:
|
||||
filter = new JStateFilter(priority);
|
||||
break;
|
||||
|
||||
case FilterType.THRESHOLD:
|
||||
filter = new JThresholdFilter(priority);
|
||||
break;
|
||||
|
||||
case FilterType.VOICE_LEVEL:
|
||||
throw tr("voice filter isn't supported!");
|
||||
|
||||
default:
|
||||
throw tr("unknown filter type");
|
||||
}
|
||||
|
||||
filter.callback_active_change = () => this._recalculate_filter_status();
|
||||
this.registeredFilters.push(filter);
|
||||
this.initializeFilters();
|
||||
this._recalculate_filter_status();
|
||||
return filter as any;
|
||||
}
|
||||
|
||||
supportsFilter(type: FilterType): boolean {
|
||||
switch (type) {
|
||||
case FilterType.THRESHOLD:
|
||||
case FilterType.STATE:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
resetFilter() {
|
||||
for(const filter of this.registeredFilters) {
|
||||
filter.finalize();
|
||||
filter.enabled = false;
|
||||
}
|
||||
|
||||
this.registeredFilters = [];
|
||||
this.initializeFilters();
|
||||
this._recalculate_filter_status();
|
||||
}
|
||||
|
||||
removeFilter(filterInstance: Filter) {
|
||||
const index = this.registeredFilters.indexOf(filterInstance as any);
|
||||
if(index === -1) return;
|
||||
|
||||
const [ filter ] = this.registeredFilters.splice(index, 1);
|
||||
filter.finalize();
|
||||
filter.enabled = false;
|
||||
|
||||
this.initializeFilters();
|
||||
this._recalculate_filter_status();
|
||||
}
|
||||
|
||||
private _recalculate_filter_status() {
|
||||
let filtered = this.registeredFilters.filter(e => e.is_enabled()).filter(e => (e as JAbstractFilter<AudioNode>).active).length > 0;
|
||||
if(filtered === this._filter_active)
|
||||
return;
|
||||
|
||||
this._filter_active = filtered;
|
||||
if(filtered) {
|
||||
if(this.callback_end)
|
||||
this.callback_end();
|
||||
} else {
|
||||
if(this.callback_begin)
|
||||
this.callback_begin();
|
||||
}
|
||||
}
|
||||
|
||||
current_consumer(): InputConsumer | undefined {
|
||||
return this._current_consumer;
|
||||
}
|
||||
|
||||
async set_consumer(consumer: InputConsumer) {
|
||||
if(this._current_consumer) {
|
||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
||||
if(this._source_node)
|
||||
(this._current_consumer as NodeInputConsumer).callback_disconnect(this._source_node)
|
||||
} else if(this._current_consumer.type === InputConsumerType.CALLBACK) {
|
||||
if(this._source_node)
|
||||
this._source_node.disconnect(this._consumer_callback_node);
|
||||
}
|
||||
}
|
||||
|
||||
if(consumer) {
|
||||
if(consumer.type == InputConsumerType.CALLBACK) {
|
||||
if(this._source_node)
|
||||
this._source_node.connect(this._consumer_callback_node);
|
||||
} else if(consumer.type == InputConsumerType.NODE) {
|
||||
if(this._source_node)
|
||||
(consumer as NodeInputConsumer).callback_node(this._source_node);
|
||||
} else {
|
||||
throw "native callback consumers are not supported!";
|
||||
}
|
||||
}
|
||||
this._current_consumer = consumer;
|
||||
}
|
||||
|
||||
private _switch_source_node(new_node: AudioNode) {
|
||||
if(this._current_consumer) {
|
||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
||||
const node_consumer = this._current_consumer as NodeInputConsumer;
|
||||
if(this._source_node)
|
||||
node_consumer.callback_disconnect(this._source_node);
|
||||
if(new_node)
|
||||
node_consumer.callback_node(new_node);
|
||||
} else if(this._current_consumer.type == InputConsumerType.CALLBACK) {
|
||||
this._source_node.disconnect(this._consumer_callback_node);
|
||||
if(new_node)
|
||||
new_node.connect(this._consumer_callback_node);
|
||||
}
|
||||
}
|
||||
this._source_node = new_node;
|
||||
}
|
||||
|
||||
get_volume(): number {
|
||||
return this._volume;
|
||||
}
|
||||
|
||||
set_volume(volume: number) {
|
||||
if(volume === this._volume)
|
||||
return;
|
||||
this._volume = volume;
|
||||
this._volume_node.gain.value = volume;
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptLevelmeter implements LevelMeter {
|
||||
private static _instances: JavascriptLevelmeter[] = [];
|
||||
private static _update_task: number;
|
||||
|
||||
readonly _device: WebIDevice;
|
||||
|
||||
private _callback: (num: number) => any;
|
||||
|
||||
private _context: AudioContext;
|
||||
private _gain_node: GainNode;
|
||||
private _source_node: MediaStreamAudioSourceNode;
|
||||
private _analyser_node: AnalyserNode;
|
||||
|
||||
private _media_stream: MediaStream;
|
||||
|
||||
private _analyse_buffer: Uint8Array;
|
||||
|
||||
private _current_level = 0;
|
||||
|
||||
constructor(device: WebIDevice) {
|
||||
this._device = device;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const timeout = setTimeout(reject, 5000);
|
||||
aplayer.on_ready(() => {
|
||||
clearTimeout(timeout);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
} catch(error) {
|
||||
throw tr("audio context timeout");
|
||||
}
|
||||
this._context = aplayer.context();
|
||||
if(!this._context) throw tr("invalid context");
|
||||
|
||||
this._gain_node = this._context.createGain();
|
||||
this._gain_node.gain.setValueAtTime(0, 0);
|
||||
|
||||
/* analyser node */
|
||||
this._analyser_node = this._context.createAnalyser();
|
||||
|
||||
const optimal_ftt_size = Math.ceil(this._context.sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
||||
this._analyser_node.fftSize = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||
|
||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser_node.fftSize)
|
||||
this._analyse_buffer = new Uint8Array(this._analyser_node.fftSize);
|
||||
|
||||
/* starting stream */
|
||||
const _result = await JavascriptInput.request_media_stream(this._device.deviceId, this._device.groupId);
|
||||
if(!(_result instanceof MediaStream)){
|
||||
if(_result === InputStartResult.ENOTALLOWED)
|
||||
throw tr("No permissions");
|
||||
if(_result === InputStartResult.ENOTSUPPORTED)
|
||||
throw tr("Not supported");
|
||||
if(_result === InputStartResult.EBUSY)
|
||||
throw tr("Device busy");
|
||||
if(_result === InputStartResult.EUNKNOWN)
|
||||
throw tr("an error occurred");
|
||||
throw _result;
|
||||
}
|
||||
this._media_stream = _result;
|
||||
|
||||
this._source_node = this._context.createMediaStreamSource(this._media_stream);
|
||||
this._source_node.connect(this._analyser_node);
|
||||
this._analyser_node.connect(this._gain_node);
|
||||
this._gain_node.connect(this._context.destination);
|
||||
|
||||
JavascriptLevelmeter._instances.push(this);
|
||||
if(JavascriptLevelmeter._instances.length == 1) {
|
||||
clearInterval(JavascriptLevelmeter._update_task);
|
||||
JavascriptLevelmeter._update_task = setInterval(() => JavascriptLevelmeter._analyse_all(), JThresholdFilter.update_task_interval) as any;
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
JavascriptLevelmeter._instances.remove(this);
|
||||
if(JavascriptLevelmeter._instances.length == 0) {
|
||||
clearInterval(JavascriptLevelmeter._update_task);
|
||||
JavascriptLevelmeter._update_task = 0;
|
||||
}
|
||||
|
||||
if(this._source_node) {
|
||||
this._source_node.disconnect();
|
||||
this._source_node = undefined;
|
||||
}
|
||||
if(this._media_stream) {
|
||||
if(this._media_stream.stop)
|
||||
this._media_stream.stop();
|
||||
else
|
||||
this._media_stream.getTracks().forEach(value => {
|
||||
value.stop();
|
||||
});
|
||||
this._media_stream = undefined;
|
||||
}
|
||||
if(this._gain_node) {
|
||||
this._gain_node.disconnect();
|
||||
this._gain_node = undefined;
|
||||
}
|
||||
if(this._analyser_node) {
|
||||
this._analyser_node.disconnect();
|
||||
this._analyser_node = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
device(): IDevice {
|
||||
return this._device;
|
||||
}
|
||||
|
||||
set_observer(callback: (value: number) => any) {
|
||||
this._callback = callback;
|
||||
}
|
||||
|
||||
private static _analyse_all() {
|
||||
for(const instance of [...this._instances])
|
||||
instance._analyse();
|
||||
}
|
||||
|
||||
private _analyse() {
|
||||
this._analyser_node.getByteTimeDomainData(this._analyse_buffer);
|
||||
|
||||
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
|
||||
if(this._callback)
|
||||
this._callback(this._current_level);
|
||||
}
|
||||
}
|
||||
|
||||
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
||||
function: async () => {
|
||||
inputDeviceList = new WebInputDeviceList();
|
||||
},
|
||||
priority: 80,
|
||||
name: "initialize media devices"
|
||||
});
|
||||
|
||||
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
||||
function: async () => {
|
||||
inputDeviceList.refresh().then(() => {});
|
||||
},
|
||||
priority: 10,
|
||||
name: "query media devices"
|
||||
});
|
|
@ -0,0 +1,242 @@
|
|||
import {FilterType, StateFilter, ThresholdFilter} from "tc-shared/voice/Filter";
|
||||
|
||||
export abstract class JAbstractFilter<NodeType extends AudioNode> {
|
||||
readonly priority: number;
|
||||
|
||||
source_node: AudioNode;
|
||||
audio_node: NodeType;
|
||||
|
||||
context: AudioContext;
|
||||
enabled: boolean = false;
|
||||
|
||||
active: boolean = false; /* if true the filter filters! */
|
||||
callback_active_change: (new_state: boolean) => any;
|
||||
|
||||
paused: boolean = true;
|
||||
|
||||
constructor(priority: number) {
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
abstract initialize(context: AudioContext, source_node: AudioNode);
|
||||
abstract finalize();
|
||||
|
||||
/* whatever the input has been paused and we don't expect any input */
|
||||
abstract set_pause(flag: boolean);
|
||||
|
||||
is_enabled(): boolean {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
set_enabled(flag: boolean) {
|
||||
this.enabled = flag;
|
||||
}
|
||||
}
|
||||
|
||||
export class JThresholdFilter extends JAbstractFilter<GainNode> implements ThresholdFilter {
|
||||
public static update_task_interval = 20; /* 20ms */
|
||||
|
||||
readonly type = FilterType.THRESHOLD;
|
||||
callback_level?: (value: number) => any;
|
||||
|
||||
private _threshold = 50;
|
||||
|
||||
private _update_task: any;
|
||||
private _analyser: AnalyserNode;
|
||||
private _analyse_buffer: Uint8Array;
|
||||
|
||||
private _silence_count = 0;
|
||||
private _margin_frames = 5;
|
||||
|
||||
private _current_level = 0;
|
||||
private _smooth_release = 0;
|
||||
private _smooth_attack = 0;
|
||||
|
||||
finalize() {
|
||||
this.set_pause(true);
|
||||
|
||||
if(this.source_node) {
|
||||
try { this.source_node.disconnect(this._analyser) } catch (error) {}
|
||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
||||
}
|
||||
|
||||
this._analyser = undefined;
|
||||
this.source_node = undefined;
|
||||
this.audio_node = undefined;
|
||||
this.context = undefined;
|
||||
}
|
||||
|
||||
initialize(context: AudioContext, source_node: AudioNode) {
|
||||
this.context = context;
|
||||
this.source_node = source_node;
|
||||
|
||||
this.audio_node = context.createGain();
|
||||
this._analyser = context.createAnalyser();
|
||||
|
||||
const optimal_ftt_size = Math.ceil((source_node.context || context).sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
||||
const base2_ftt = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||
this._analyser.fftSize = base2_ftt;
|
||||
|
||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser.fftSize)
|
||||
this._analyse_buffer = new Uint8Array(this._analyser.fftSize);
|
||||
|
||||
this.active = false;
|
||||
this.audio_node.gain.value = 1;
|
||||
|
||||
this.source_node.connect(this.audio_node);
|
||||
this.source_node.connect(this._analyser);
|
||||
|
||||
/* force update paused state */
|
||||
this.set_pause(!(this.paused = !this.paused));
|
||||
}
|
||||
|
||||
get_margin_frames(): number { return this._margin_frames; }
|
||||
set_margin_frames(value: number) {
|
||||
this._margin_frames = value;
|
||||
}
|
||||
|
||||
get_attack_smooth(): number {
|
||||
return this._smooth_attack;
|
||||
}
|
||||
|
||||
get_release_smooth(): number {
|
||||
return this._smooth_release;
|
||||
}
|
||||
|
||||
set_attack_smooth(value: number) {
|
||||
this._smooth_attack = value;
|
||||
}
|
||||
|
||||
set_release_smooth(value: number) {
|
||||
this._smooth_release = value;
|
||||
}
|
||||
|
||||
get_threshold(): number {
|
||||
return this._threshold;
|
||||
}
|
||||
|
||||
set_threshold(value: number): Promise<void> {
|
||||
this._threshold = value;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
public static process(buffer: Uint8Array, ftt_size: number, previous: number, smooth: number) {
|
||||
let level;
|
||||
{
|
||||
let total = 0, float, rms;
|
||||
|
||||
for(let index = 0; index < ftt_size; index++) {
|
||||
float = ( buffer[index++] / 0x7f ) - 1;
|
||||
total += (float * float);
|
||||
}
|
||||
rms = Math.sqrt(total / ftt_size);
|
||||
let db = 20 * ( Math.log(rms) / Math.log(10) );
|
||||
// sanity check
|
||||
|
||||
db = Math.max(-192, Math.min(db, 0));
|
||||
level = 100 + ( db * 1.92 );
|
||||
}
|
||||
|
||||
return previous * smooth + level * (1 - smooth);
|
||||
}
|
||||
|
||||
private _analyse() {
|
||||
this._analyser.getByteTimeDomainData(this._analyse_buffer);
|
||||
|
||||
let smooth;
|
||||
if(this._silence_count == 0)
|
||||
smooth = this._smooth_release;
|
||||
else
|
||||
smooth = this._smooth_attack;
|
||||
|
||||
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser.fftSize, this._current_level, smooth);
|
||||
|
||||
this._update_gain_node();
|
||||
if(this.callback_level)
|
||||
this.callback_level(this._current_level);
|
||||
}
|
||||
|
||||
private _update_gain_node() {
|
||||
let state;
|
||||
if(this._current_level > this._threshold) {
|
||||
this._silence_count = 0;
|
||||
state = true;
|
||||
} else {
|
||||
state = this._silence_count++ < this._margin_frames;
|
||||
}
|
||||
if(state) {
|
||||
this.audio_node.gain.value = 1;
|
||||
if(this.active) {
|
||||
this.active = false;
|
||||
this.callback_active_change(false);
|
||||
}
|
||||
} else {
|
||||
this.audio_node.gain.value = 0;
|
||||
if(!this.active) {
|
||||
this.active = true;
|
||||
this.callback_active_change(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set_pause(flag: boolean) {
|
||||
if(flag === this.paused) return;
|
||||
this.paused = flag;
|
||||
|
||||
if(this.paused) {
|
||||
clearInterval(this._update_task);
|
||||
this._update_task = undefined;
|
||||
|
||||
if(this.active) {
|
||||
this.active = false;
|
||||
this.callback_active_change(false);
|
||||
}
|
||||
} else {
|
||||
if(!this._update_task && this._analyser)
|
||||
this._update_task = setInterval(() => this._analyse(), JThresholdFilter.update_task_interval);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class JStateFilter extends JAbstractFilter<GainNode> implements StateFilter {
|
||||
public readonly type = FilterType.STATE;
|
||||
|
||||
finalize() {
|
||||
if(this.source_node) {
|
||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
||||
}
|
||||
|
||||
this.source_node = undefined;
|
||||
this.audio_node = undefined;
|
||||
this.context = undefined;
|
||||
}
|
||||
|
||||
initialize(context: AudioContext, source_node: AudioNode) {
|
||||
this.context = context;
|
||||
this.source_node = source_node;
|
||||
|
||||
this.audio_node = context.createGain();
|
||||
this.audio_node.gain.value = this.active ? 0 : 1;
|
||||
|
||||
this.source_node.connect(this.audio_node);
|
||||
}
|
||||
|
||||
is_active(): boolean {
|
||||
return this.active;
|
||||
}
|
||||
|
||||
set_state(state: boolean): Promise<void> {
|
||||
if(this.active === state)
|
||||
return Promise.resolve();
|
||||
|
||||
this.active = state;
|
||||
if(this.audio_node)
|
||||
this.audio_node.gain.value = state ? 0 : 1;
|
||||
this.callback_active_change(state);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
set_pause(flag: boolean) {
|
||||
this.paused = flag;
|
||||
}
|
||||
}
|
|
@ -1,25 +1,3 @@
|
|||
/*
|
||||
import {Device} from "tc-shared/audio/player";
|
||||
|
||||
export function initialize() : boolean;
|
||||
export function initialized() : boolean;
|
||||
|
||||
export function context() : AudioContext;
|
||||
export function get_master_volume() : number;
|
||||
export function set_master_volume(volume: number);
|
||||
|
||||
export function destination() : AudioNode;
|
||||
|
||||
export function on_ready(cb: () => any);
|
||||
|
||||
export function available_devices() : Promise<Device[]>;
|
||||
export function set_device(device_id: string) : Promise<void>;
|
||||
|
||||
export function current_device() : Device;
|
||||
|
||||
export function initializeFromGesture();
|
||||
*/
|
||||
|
||||
import {Device} from "tc-shared/audio/player";
|
||||
import * as log from "tc-shared/log";
|
||||
import {LogCategory} from "tc-shared/log";
|
||||
|
@ -52,6 +30,10 @@ function fire_initialized() {
|
|||
|
||||
function createNewContext() {
|
||||
audioContextInstance = new (window.webkitAudioContext || window.AudioContext)();
|
||||
audioContextInstance.onstatechange = () => {
|
||||
if(audioContextInstance.state === "running")
|
||||
fire_initialized();
|
||||
};
|
||||
|
||||
audioContextInitializeCallbacks.unshift(() => {
|
||||
globalAudioGainInstance = audioContextInstance.createGain();
|
||||
|
@ -128,9 +110,7 @@ export function current_device() : Device {
|
|||
export function initializeFromGesture() {
|
||||
if(audioContextInstance) {
|
||||
if(audioContextInstance.state !== "running") {
|
||||
audioContextInstance.resume().then(() => {
|
||||
fire_initialized();
|
||||
}).catch(error => {
|
||||
audioContextInstance.resume().catch(error => {
|
||||
log.error(LogCategory.AUDIO, tr("Failed to initialize audio context instance from gesture: %o"), error);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,877 +0,0 @@
|
|||
import {
|
||||
AbstractInput, CallbackInputConsumer,
|
||||
InputConsumer,
|
||||
InputConsumerType,
|
||||
InputDevice, InputStartResult,
|
||||
InputState,
|
||||
LevelMeter, NodeInputConsumer
|
||||
} from "tc-shared/voice/RecorderBase";
|
||||
import * as log from "tc-shared/log";
|
||||
import * as loader from "tc-loader";
|
||||
import {LogCategory} from "tc-shared/log";
|
||||
import * as aplayer from "./player";
|
||||
import * as rbase from "tc-shared/voice/RecorderBase";
|
||||
|
||||
declare global {
|
||||
interface MediaStream {
|
||||
stop();
|
||||
}
|
||||
}
|
||||
|
||||
let _queried_devices: JavascriptInputDevice[];
|
||||
let _queried_permissioned: boolean = false;
|
||||
|
||||
export interface JavascriptInputDevice extends InputDevice {
|
||||
device_id: string;
|
||||
group_id: string;
|
||||
}
|
||||
|
||||
function getUserMediaFunctionPromise() : (constraints: MediaStreamConstraints) => Promise<MediaStream> {
|
||||
if('mediaDevices' in navigator && 'getUserMedia' in navigator.mediaDevices)
|
||||
return constraints => navigator.mediaDevices.getUserMedia(constraints);
|
||||
|
||||
const _callbacked_function = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
|
||||
if(!_callbacked_function)
|
||||
return undefined;
|
||||
|
||||
return constraints => new Promise<MediaStream>((resolve, reject) => _callbacked_function(constraints, resolve, reject));
|
||||
}
|
||||
|
||||
async function query_devices() {
|
||||
const general_supported = !!getUserMediaFunctionPromise();
|
||||
|
||||
try {
|
||||
const context = aplayer.context();
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
|
||||
_queried_permissioned = false;
|
||||
if(devices.filter(e => !!e.label).length > 0)
|
||||
_queried_permissioned = true;
|
||||
|
||||
_queried_devices = devices.filter(e => e.kind === "audioinput").map((e: MediaDeviceInfo): JavascriptInputDevice => {
|
||||
return {
|
||||
channels: context ? context.destination.channelCount : 2,
|
||||
sample_rate: context ? context.sampleRate : 44100,
|
||||
|
||||
default_input: e.deviceId == "default",
|
||||
|
||||
driver: "WebAudio",
|
||||
name: e.label || "device-id{" + e.deviceId+ "}",
|
||||
|
||||
supported: general_supported,
|
||||
|
||||
device_id: e.deviceId,
|
||||
group_id: e.groupId,
|
||||
|
||||
unique_id: e.deviceId
|
||||
}
|
||||
});
|
||||
if(_queried_devices.length > 0 && _queried_devices.filter(e => e.default_input).length == 0)
|
||||
_queried_devices[0].default_input = true;
|
||||
} catch(error) {
|
||||
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
|
||||
_queried_devices = [];
|
||||
}
|
||||
}
|
||||
|
||||
export function devices() : InputDevice[] {
|
||||
if(typeof(_queried_devices) === "undefined")
|
||||
query_devices();
|
||||
|
||||
return _queried_devices || [];
|
||||
}
|
||||
|
||||
|
||||
export function device_refresh_available() : boolean { return true; }
|
||||
export function refresh_devices() : Promise<void> { return query_devices(); }
|
||||
|
||||
export function create_input() : AbstractInput { return new JavascriptInput(); }
|
||||
|
||||
export async function create_levelmeter(device: InputDevice) : Promise<LevelMeter> {
|
||||
const meter = new JavascriptLevelmeter(device as any);
|
||||
await meter.initialize();
|
||||
return meter;
|
||||
}
|
||||
|
||||
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
||||
function: async () => { query_devices(); }, /* May wait for it? */
|
||||
priority: 10,
|
||||
name: "query media devices"
|
||||
});
|
||||
|
||||
export namespace filter {
|
||||
export abstract class JAbstractFilter<NodeType extends AudioNode> implements rbase.filter.Filter {
|
||||
type;
|
||||
|
||||
source_node: AudioNode;
|
||||
audio_node: NodeType;
|
||||
|
||||
context: AudioContext;
|
||||
enabled: boolean = false;
|
||||
|
||||
active: boolean = false; /* if true the filter filters! */
|
||||
callback_active_change: (new_state: boolean) => any;
|
||||
|
||||
paused: boolean = true;
|
||||
|
||||
abstract initialize(context: AudioContext, source_node: AudioNode);
|
||||
abstract finalize();
|
||||
|
||||
/* whatever the input has been paused and we don't expect any input */
|
||||
abstract set_pause(flag: boolean);
|
||||
|
||||
is_enabled(): boolean {
|
||||
return this.enabled;
|
||||
}
|
||||
}
|
||||
|
||||
export class JThresholdFilter extends JAbstractFilter<GainNode> implements rbase.filter.ThresholdFilter {
|
||||
public static update_task_interval = 20; /* 20ms */
|
||||
|
||||
type = rbase.filter.Type.THRESHOLD;
|
||||
callback_level?: (value: number) => any;
|
||||
|
||||
private _threshold = 50;
|
||||
|
||||
private _update_task: any;
|
||||
private _analyser: AnalyserNode;
|
||||
private _analyse_buffer: Uint8Array;
|
||||
|
||||
private _silence_count = 0;
|
||||
private _margin_frames = 5;
|
||||
|
||||
private _current_level = 0;
|
||||
private _smooth_release = 0;
|
||||
private _smooth_attack = 0;
|
||||
|
||||
finalize() {
|
||||
this.set_pause(true);
|
||||
|
||||
if(this.source_node) {
|
||||
try { this.source_node.disconnect(this._analyser) } catch (error) {}
|
||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
||||
}
|
||||
|
||||
this._analyser = undefined;
|
||||
this.source_node = undefined;
|
||||
this.audio_node = undefined;
|
||||
this.context = undefined;
|
||||
}
|
||||
|
||||
initialize(context: AudioContext, source_node: AudioNode) {
|
||||
this.context = context;
|
||||
this.source_node = source_node;
|
||||
|
||||
this.audio_node = context.createGain();
|
||||
this._analyser = context.createAnalyser();
|
||||
|
||||
const optimal_ftt_size = Math.ceil((source_node.context || context).sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
||||
const base2_ftt = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||
this._analyser.fftSize = base2_ftt;
|
||||
|
||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser.fftSize)
|
||||
this._analyse_buffer = new Uint8Array(this._analyser.fftSize);
|
||||
|
||||
this.active = false;
|
||||
this.audio_node.gain.value = 1;
|
||||
|
||||
this.source_node.connect(this.audio_node);
|
||||
this.source_node.connect(this._analyser);
|
||||
|
||||
/* force update paused state */
|
||||
this.set_pause(!(this.paused = !this.paused));
|
||||
}
|
||||
|
||||
get_margin_frames(): number { return this._margin_frames; }
|
||||
set_margin_frames(value: number) {
|
||||
this._margin_frames = value;
|
||||
}
|
||||
|
||||
get_attack_smooth(): number {
|
||||
return this._smooth_attack;
|
||||
}
|
||||
|
||||
get_release_smooth(): number {
|
||||
return this._smooth_release;
|
||||
}
|
||||
|
||||
set_attack_smooth(value: number) {
|
||||
this._smooth_attack = value;
|
||||
}
|
||||
|
||||
set_release_smooth(value: number) {
|
||||
this._smooth_release = value;
|
||||
}
|
||||
|
||||
get_threshold(): number {
|
||||
return this._threshold;
|
||||
}
|
||||
|
||||
set_threshold(value: number): Promise<void> {
|
||||
this._threshold = value;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
public static process(buffer: Uint8Array, ftt_size: number, previous: number, smooth: number) {
|
||||
let level;
|
||||
{
|
||||
let total = 0, float, rms;
|
||||
|
||||
for(let index = 0; index < ftt_size; index++) {
|
||||
float = ( buffer[index++] / 0x7f ) - 1;
|
||||
total += (float * float);
|
||||
}
|
||||
rms = Math.sqrt(total / ftt_size);
|
||||
let db = 20 * ( Math.log(rms) / Math.log(10) );
|
||||
// sanity check
|
||||
|
||||
db = Math.max(-192, Math.min(db, 0));
|
||||
level = 100 + ( db * 1.92 );
|
||||
}
|
||||
|
||||
return previous * smooth + level * (1 - smooth);
|
||||
}
|
||||
|
||||
private _analyse() {
|
||||
this._analyser.getByteTimeDomainData(this._analyse_buffer);
|
||||
|
||||
let smooth;
|
||||
if(this._silence_count == 0)
|
||||
smooth = this._smooth_release;
|
||||
else
|
||||
smooth = this._smooth_attack;
|
||||
|
||||
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser.fftSize, this._current_level, smooth);
|
||||
|
||||
this._update_gain_node();
|
||||
if(this.callback_level)
|
||||
this.callback_level(this._current_level);
|
||||
}
|
||||
|
||||
private _update_gain_node() {
|
||||
let state;
|
||||
if(this._current_level > this._threshold) {
|
||||
this._silence_count = 0;
|
||||
state = true;
|
||||
} else {
|
||||
state = this._silence_count++ < this._margin_frames;
|
||||
}
|
||||
if(state) {
|
||||
this.audio_node.gain.value = 1;
|
||||
if(this.active) {
|
||||
this.active = false;
|
||||
this.callback_active_change(false);
|
||||
}
|
||||
} else {
|
||||
this.audio_node.gain.value = 0;
|
||||
if(!this.active) {
|
||||
this.active = true;
|
||||
this.callback_active_change(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set_pause(flag: boolean) {
|
||||
if(flag === this.paused) return;
|
||||
this.paused = flag;
|
||||
|
||||
if(this.paused) {
|
||||
clearInterval(this._update_task);
|
||||
this._update_task = undefined;
|
||||
|
||||
if(this.active) {
|
||||
this.active = false;
|
||||
this.callback_active_change(false);
|
||||
}
|
||||
} else {
|
||||
if(!this._update_task && this._analyser)
|
||||
this._update_task = setInterval(() => this._analyse(), JThresholdFilter.update_task_interval);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class JStateFilter extends JAbstractFilter<GainNode> implements rbase.filter.StateFilter {
|
||||
type = rbase.filter.Type.STATE;
|
||||
|
||||
finalize() {
|
||||
if(this.source_node) {
|
||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
||||
}
|
||||
|
||||
this.source_node = undefined;
|
||||
this.audio_node = undefined;
|
||||
this.context = undefined;
|
||||
}
|
||||
|
||||
initialize(context: AudioContext, source_node: AudioNode) {
|
||||
this.context = context;
|
||||
this.source_node = source_node;
|
||||
|
||||
this.audio_node = context.createGain();
|
||||
this.audio_node.gain.value = this.active ? 0 : 1;
|
||||
|
||||
this.source_node.connect(this.audio_node);
|
||||
}
|
||||
|
||||
is_active(): boolean {
|
||||
return this.active;
|
||||
}
|
||||
|
||||
set_state(state: boolean): Promise<void> {
|
||||
if(this.active === state)
|
||||
return Promise.resolve();
|
||||
|
||||
this.active = state;
|
||||
if(this.audio_node)
|
||||
this.audio_node.gain.value = state ? 0 : 1;
|
||||
this.callback_active_change(state);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
set_pause(flag: boolean) {
|
||||
this.paused = flag;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptInput implements AbstractInput {
|
||||
private _state: InputState = InputState.PAUSED;
|
||||
private _current_device: JavascriptInputDevice | undefined;
|
||||
private _current_consumer: InputConsumer;
|
||||
|
||||
private _current_stream: MediaStream;
|
||||
private _current_audio_stream: MediaStreamAudioSourceNode;
|
||||
|
||||
private _audio_context: AudioContext;
|
||||
private _source_node: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
|
||||
private _consumer_callback_node: ScriptProcessorNode;
|
||||
private readonly _consumer_audio_callback;
|
||||
private _volume_node: GainNode;
|
||||
private _mute_node: GainNode;
|
||||
|
||||
private _filters: rbase.filter.Filter[] = [];
|
||||
private _filter_active: boolean = false;
|
||||
|
||||
private _volume: number = 1;
|
||||
|
||||
callback_begin: () => any = undefined;
|
||||
callback_end: () => any = undefined;
|
||||
|
||||
constructor() {
|
||||
aplayer.on_ready(() => this._audio_initialized());
|
||||
this._consumer_audio_callback = this._audio_callback.bind(this);
|
||||
}
|
||||
|
||||
private _audio_initialized() {
|
||||
this._audio_context = aplayer.context();
|
||||
if(!this._audio_context)
|
||||
return;
|
||||
|
||||
this._mute_node = this._audio_context.createGain();
|
||||
this._mute_node.gain.value = 0;
|
||||
this._mute_node.connect(this._audio_context.destination);
|
||||
|
||||
this._consumer_callback_node = this._audio_context.createScriptProcessor(1024 * 4);
|
||||
this._consumer_callback_node.connect(this._mute_node);
|
||||
|
||||
this._volume_node = this._audio_context.createGain();
|
||||
this._volume_node.gain.value = this._volume;
|
||||
|
||||
this._initialize_filters();
|
||||
if(this._state === InputState.INITIALIZING)
|
||||
this.start();
|
||||
}
|
||||
|
||||
private _initialize_filters() {
|
||||
const filters = this._filters as any as filter.JAbstractFilter<AudioNode>[];
|
||||
for(const filter of filters) {
|
||||
if(filter.is_enabled())
|
||||
filter.finalize();
|
||||
}
|
||||
|
||||
if(this._audio_context && this._volume_node) {
|
||||
const active_filter = filters.filter(e => e.is_enabled());
|
||||
let stream: AudioNode = this._volume_node;
|
||||
for(const f of active_filter) {
|
||||
f.initialize(this._audio_context, stream);
|
||||
stream = f.audio_node;
|
||||
}
|
||||
this._switch_source_node(stream);
|
||||
}
|
||||
}
|
||||
|
||||
private _audio_callback(event: AudioProcessingEvent) {
|
||||
if(!this._current_consumer || this._current_consumer.type !== InputConsumerType.CALLBACK)
|
||||
return;
|
||||
|
||||
const callback = this._current_consumer as CallbackInputConsumer;
|
||||
if(callback.callback_audio)
|
||||
callback.callback_audio(event.inputBuffer);
|
||||
|
||||
if(callback.callback_buffer) {
|
||||
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
|
||||
}
|
||||
}
|
||||
|
||||
current_state() : InputState { return this._state; };
|
||||
|
||||
private _start_promise: Promise<InputStartResult>;
|
||||
async start() : Promise<InputStartResult> {
|
||||
if(this._start_promise) {
|
||||
try {
|
||||
await this._start_promise;
|
||||
if(this._state != InputState.PAUSED)
|
||||
return;
|
||||
} catch(error) {
|
||||
log.debug(LogCategory.AUDIO, tr("JavascriptInput:start() Start promise await resulted in an error: %o"), error);
|
||||
}
|
||||
}
|
||||
|
||||
return await (this._start_promise = this._start());
|
||||
}
|
||||
|
||||
/* request permission for devices only one per time! */
|
||||
private static _running_request: Promise<MediaStream | InputStartResult>;
|
||||
static async request_media_stream(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
|
||||
while(this._running_request) {
|
||||
try {
|
||||
await this._running_request;
|
||||
} catch(error) { }
|
||||
}
|
||||
const promise = (this._running_request = this.request_media_stream0(device_id, group_id));
|
||||
try {
|
||||
return await this._running_request;
|
||||
} finally {
|
||||
if(this._running_request === promise)
|
||||
this._running_request = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
static async request_media_stream0(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
|
||||
const media_function = getUserMediaFunctionPromise();
|
||||
if(!media_function) return InputStartResult.ENOTSUPPORTED;
|
||||
|
||||
try {
|
||||
log.info(LogCategory.AUDIO, tr("Requesting a microphone stream for device %s in group %s"), device_id, group_id);
|
||||
|
||||
const audio_constrains: MediaTrackConstraints = {};
|
||||
audio_constrains.deviceId = device_id;
|
||||
audio_constrains.groupId = group_id;
|
||||
|
||||
audio_constrains.echoCancellation = true;
|
||||
audio_constrains.autoGainControl = true;
|
||||
audio_constrains.noiseSuppression = true;
|
||||
/* disabled because most the time we get a OverconstrainedError */ //audio_constrains.sampleSize = {min: 420, max: 960 * 10, ideal: 960};
|
||||
|
||||
const stream = await media_function({
|
||||
audio: audio_constrains,
|
||||
video: undefined
|
||||
});
|
||||
if(!_queried_permissioned) query_devices(); /* we now got permissions, requery devices */
|
||||
return stream;
|
||||
} catch(error) {
|
||||
if('name' in error) {
|
||||
if(error.name === "NotAllowedError") {
|
||||
//createErrorModal(tr("Failed to create microphone"), tr("Microphone recording failed. Please allow TeaWeb access to your microphone")).open();
|
||||
//FIXME: Move this to somewhere else!
|
||||
|
||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed (No permissions). Browser message: %o"), error.message);
|
||||
return InputStartResult.ENOTALLOWED;
|
||||
} else {
|
||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed. Request resulted in error: %o: %o"), error.name, error);
|
||||
}
|
||||
} else {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize recording stream (%o)"), error);
|
||||
}
|
||||
return InputStartResult.EUNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
private async _start() : Promise<InputStartResult> {
|
||||
try {
|
||||
if(this._state != InputState.PAUSED)
|
||||
throw tr("recorder already started");
|
||||
|
||||
this._state = InputState.INITIALIZING;
|
||||
if(!this._current_device)
|
||||
throw tr("invalid device");
|
||||
|
||||
if(!this._audio_context) {
|
||||
debugger;
|
||||
throw tr("missing audio context");
|
||||
}
|
||||
|
||||
const _result = await JavascriptInput.request_media_stream(this._current_device.device_id, this._current_device.group_id);
|
||||
if(!(_result instanceof MediaStream)) {
|
||||
this._state = InputState.PAUSED;
|
||||
return _result;
|
||||
}
|
||||
this._current_stream = _result;
|
||||
|
||||
for(const f of this._filters)
|
||||
if(f.is_enabled() && f instanceof filter.JAbstractFilter)
|
||||
f.set_pause(false);
|
||||
this._consumer_callback_node.addEventListener('audioprocess', this._consumer_audio_callback);
|
||||
|
||||
this._current_audio_stream = this._audio_context.createMediaStreamSource(this._current_stream);
|
||||
this._current_audio_stream.connect(this._volume_node);
|
||||
this._state = InputState.RECORDING;
|
||||
return InputStartResult.EOK;
|
||||
} catch(error) {
|
||||
if(this._state == InputState.INITIALIZING) {
|
||||
this._state = InputState.PAUSED;
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
this._start_promise = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async stop() {
|
||||
/* await all starts */
|
||||
try {
|
||||
if(this._start_promise)
|
||||
await this._start_promise;
|
||||
} catch(error) {}
|
||||
|
||||
this._state = InputState.PAUSED;
|
||||
if(this._current_audio_stream)
|
||||
this._current_audio_stream.disconnect();
|
||||
|
||||
if(this._current_stream) {
|
||||
if(this._current_stream.stop)
|
||||
this._current_stream.stop();
|
||||
else
|
||||
this._current_stream.getTracks().forEach(value => {
|
||||
value.stop();
|
||||
});
|
||||
}
|
||||
|
||||
this._current_stream = undefined;
|
||||
this._current_audio_stream = undefined;
|
||||
for(const f of this._filters)
|
||||
if(f.is_enabled() && f instanceof filter.JAbstractFilter)
|
||||
f.set_pause(true);
|
||||
if(this._consumer_callback_node)
|
||||
this._consumer_callback_node.removeEventListener('audioprocess', this._consumer_audio_callback);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
current_device(): InputDevice | undefined {
|
||||
return this._current_device;
|
||||
}
|
||||
|
||||
async set_device(device: InputDevice | undefined) {
|
||||
if(this._current_device === device)
|
||||
return;
|
||||
|
||||
|
||||
const saved_state = this._state;
|
||||
try {
|
||||
await this.stop();
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to stop previous record session (%o)"), error);
|
||||
}
|
||||
|
||||
this._current_device = device as any; /* TODO: Test for device_id and device_group */
|
||||
if(!device) {
|
||||
this._state = saved_state === InputState.PAUSED ? InputState.PAUSED : InputState.DRY;
|
||||
return;
|
||||
}
|
||||
|
||||
if(saved_state !== InputState.PAUSED) {
|
||||
try {
|
||||
await this.start()
|
||||
} catch(error) {
|
||||
log.warn(LogCategory.AUDIO, tr("Failed to start new recording stream (%o)"), error);
|
||||
throw "failed to start record";
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
get_filter(type: rbase.filter.Type): rbase.filter.Filter | undefined {
|
||||
for(const filter of this._filters)
|
||||
if(filter.type == type)
|
||||
return filter;
|
||||
|
||||
let new_filter: filter.JAbstractFilter<AudioNode>;
|
||||
switch (type) {
|
||||
case rbase.filter.Type.STATE:
|
||||
new_filter = new filter.JStateFilter();
|
||||
break;
|
||||
case rbase.filter.Type.VOICE_LEVEL:
|
||||
throw "voice filter isn't supported!";
|
||||
case rbase.filter.Type.THRESHOLD:
|
||||
new_filter = new filter.JThresholdFilter();
|
||||
break;
|
||||
default:
|
||||
throw "invalid filter type, or type isn't implemented! (" + type + ")";
|
||||
}
|
||||
|
||||
new_filter.callback_active_change = () => this._recalculate_filter_status();
|
||||
this._filters.push(new_filter as any);
|
||||
this.enable_filter(type);
|
||||
return new_filter as any;
|
||||
}
|
||||
|
||||
supports_filter(type: rbase.filter.Type) : boolean {
|
||||
switch (type) {
|
||||
case rbase.filter.Type.THRESHOLD:
|
||||
case rbase.filter.Type.STATE:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private find_filter(type: rbase.filter.Type) : filter.JAbstractFilter<AudioNode> | undefined {
|
||||
for(const filter of this._filters)
|
||||
if(filter.type == type)
|
||||
return filter as any;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
clear_filter() {
|
||||
for(const _filter of this._filters) {
|
||||
if(!_filter.is_enabled())
|
||||
continue;
|
||||
|
||||
const c_filter = _filter as any as filter.JAbstractFilter<AudioNode>;
|
||||
c_filter.finalize();
|
||||
c_filter.enabled = false;
|
||||
}
|
||||
|
||||
this._initialize_filters();
|
||||
this._recalculate_filter_status();
|
||||
}
|
||||
|
||||
disable_filter(type: rbase.filter.Type) {
|
||||
const filter = this.find_filter(type);
|
||||
if(!filter) return;
|
||||
|
||||
/* test if the filter is active */
|
||||
if(!filter.is_enabled())
|
||||
return;
|
||||
|
||||
filter.enabled = false;
|
||||
filter.set_pause(true);
|
||||
filter.finalize();
|
||||
this._initialize_filters();
|
||||
this._recalculate_filter_status();
|
||||
}
|
||||
|
||||
enable_filter(type: rbase.filter.Type) {
|
||||
const filter = this.get_filter(type) as any as filter.JAbstractFilter<AudioNode>;
|
||||
if(filter.is_enabled())
|
||||
return;
|
||||
|
||||
filter.enabled = true;
|
||||
filter.set_pause(typeof this._current_audio_stream !== "object");
|
||||
this._initialize_filters();
|
||||
this._recalculate_filter_status();
|
||||
}
|
||||
|
||||
private _recalculate_filter_status() {
|
||||
let filtered = this._filters.filter(e => e.is_enabled()).filter(e => (e as any as filter.JAbstractFilter<AudioNode>).active).length > 0;
|
||||
if(filtered === this._filter_active)
|
||||
return;
|
||||
|
||||
this._filter_active = filtered;
|
||||
if(filtered) {
|
||||
if(this.callback_end)
|
||||
this.callback_end();
|
||||
} else {
|
||||
if(this.callback_begin)
|
||||
this.callback_begin();
|
||||
}
|
||||
}
|
||||
|
||||
current_consumer(): InputConsumer | undefined {
|
||||
return this._current_consumer;
|
||||
}
|
||||
|
||||
async set_consumer(consumer: InputConsumer) {
|
||||
if(this._current_consumer) {
|
||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
||||
if(this._source_node)
|
||||
(this._current_consumer as NodeInputConsumer).callback_disconnect(this._source_node)
|
||||
} else if(this._current_consumer.type === InputConsumerType.CALLBACK) {
|
||||
if(this._source_node)
|
||||
this._source_node.disconnect(this._consumer_callback_node);
|
||||
}
|
||||
}
|
||||
|
||||
if(consumer) {
|
||||
if(consumer.type == InputConsumerType.CALLBACK) {
|
||||
if(this._source_node)
|
||||
this._source_node.connect(this._consumer_callback_node);
|
||||
} else if(consumer.type == InputConsumerType.NODE) {
|
||||
if(this._source_node)
|
||||
(consumer as NodeInputConsumer).callback_node(this._source_node);
|
||||
} else {
|
||||
throw "native callback consumers are not supported!";
|
||||
}
|
||||
}
|
||||
this._current_consumer = consumer;
|
||||
}
|
||||
|
||||
private _switch_source_node(new_node: AudioNode) {
|
||||
if(this._current_consumer) {
|
||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
||||
const node_consumer = this._current_consumer as NodeInputConsumer;
|
||||
if(this._source_node)
|
||||
node_consumer.callback_disconnect(this._source_node);
|
||||
if(new_node)
|
||||
node_consumer.callback_node(new_node);
|
||||
} else if(this._current_consumer.type == InputConsumerType.CALLBACK) {
|
||||
this._source_node.disconnect(this._consumer_callback_node);
|
||||
if(new_node)
|
||||
new_node.connect(this._consumer_callback_node);
|
||||
}
|
||||
}
|
||||
this._source_node = new_node;
|
||||
}
|
||||
|
||||
get_volume(): number {
|
||||
return this._volume;
|
||||
}
|
||||
|
||||
set_volume(volume: number) {
|
||||
if(volume === this._volume)
|
||||
return;
|
||||
this._volume = volume;
|
||||
this._volume_node.gain.value = volume;
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptLevelmeter implements LevelMeter {
|
||||
private static _instances: JavascriptLevelmeter[] = [];
|
||||
private static _update_task: number;
|
||||
|
||||
readonly _device: JavascriptInputDevice;
|
||||
|
||||
private _callback: (num: number) => any;
|
||||
|
||||
private _context: AudioContext;
|
||||
private _gain_node: GainNode;
|
||||
private _source_node: MediaStreamAudioSourceNode;
|
||||
private _analyser_node: AnalyserNode;
|
||||
|
||||
private _media_stream: MediaStream;
|
||||
|
||||
private _analyse_buffer: Uint8Array;
|
||||
|
||||
private _current_level = 0;
|
||||
|
||||
constructor(device: JavascriptInputDevice) {
|
||||
this._device = device;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const timeout = setTimeout(reject, 5000);
|
||||
aplayer.on_ready(() => {
|
||||
clearTimeout(timeout);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
} catch(error) {
|
||||
throw tr("audio context timeout");
|
||||
}
|
||||
this._context = aplayer.context();
|
||||
if(!this._context) throw tr("invalid context");
|
||||
|
||||
this._gain_node = this._context.createGain();
|
||||
this._gain_node.gain.setValueAtTime(0, 0);
|
||||
|
||||
/* analyser node */
|
||||
this._analyser_node = this._context.createAnalyser();
|
||||
|
||||
const optimal_ftt_size = Math.ceil(this._context.sampleRate * (filter.JThresholdFilter.update_task_interval / 1000));
|
||||
this._analyser_node.fftSize = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||
|
||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser_node.fftSize)
|
||||
this._analyse_buffer = new Uint8Array(this._analyser_node.fftSize);
|
||||
|
||||
/* starting stream */
|
||||
const _result = await JavascriptInput.request_media_stream(this._device.device_id, this._device.group_id);
|
||||
if(!(_result instanceof MediaStream)){
|
||||
if(_result === InputStartResult.ENOTALLOWED)
|
||||
throw tr("No permissions");
|
||||
if(_result === InputStartResult.ENOTSUPPORTED)
|
||||
throw tr("Not supported");
|
||||
if(_result === InputStartResult.EBUSY)
|
||||
throw tr("Device busy");
|
||||
if(_result === InputStartResult.EUNKNOWN)
|
||||
throw tr("an error occurred");
|
||||
throw _result;
|
||||
}
|
||||
this._media_stream = _result;
|
||||
|
||||
this._source_node = this._context.createMediaStreamSource(this._media_stream);
|
||||
this._source_node.connect(this._analyser_node);
|
||||
this._analyser_node.connect(this._gain_node);
|
||||
this._gain_node.connect(this._context.destination);
|
||||
|
||||
JavascriptLevelmeter._instances.push(this);
|
||||
if(JavascriptLevelmeter._instances.length == 1) {
|
||||
clearInterval(JavascriptLevelmeter._update_task);
|
||||
JavascriptLevelmeter._update_task = setInterval(() => JavascriptLevelmeter._analyse_all(), filter.JThresholdFilter.update_task_interval) as any;
|
||||
}
|
||||
}
|
||||
|
||||
destory() {
|
||||
JavascriptLevelmeter._instances.remove(this);
|
||||
if(JavascriptLevelmeter._instances.length == 0) {
|
||||
clearInterval(JavascriptLevelmeter._update_task);
|
||||
JavascriptLevelmeter._update_task = 0;
|
||||
}
|
||||
|
||||
if(this._source_node) {
|
||||
this._source_node.disconnect();
|
||||
this._source_node = undefined;
|
||||
}
|
||||
if(this._media_stream) {
|
||||
if(this._media_stream.stop)
|
||||
this._media_stream.stop();
|
||||
else
|
||||
this._media_stream.getTracks().forEach(value => {
|
||||
value.stop();
|
||||
});
|
||||
this._media_stream = undefined;
|
||||
}
|
||||
if(this._gain_node) {
|
||||
this._gain_node.disconnect();
|
||||
this._gain_node = undefined;
|
||||
}
|
||||
if(this._analyser_node) {
|
||||
this._analyser_node.disconnect();
|
||||
this._analyser_node = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
device(): InputDevice {
|
||||
return this._device;
|
||||
}
|
||||
|
||||
set_observer(callback: (value: number) => any) {
|
||||
this._callback = callback;
|
||||
}
|
||||
|
||||
private static _analyse_all() {
|
||||
for(const instance of [...this._instances])
|
||||
instance._analyse();
|
||||
}
|
||||
|
||||
private _analyse() {
|
||||
this._analyser_node.getByteTimeDomainData(this._analyse_buffer);
|
||||
|
||||
this._current_level = filter.JThresholdFilter.process(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
|
||||
if(this._callback)
|
||||
this._callback(this._current_level);
|
||||
}
|
||||
}
|
|
@ -19,7 +19,6 @@ import {EventType} from "tc-shared/ui/frames/log/Definitions";
|
|||
import {WrappedWebSocket} from "tc-backend/web/connection/WrappedWebSocket";
|
||||
import {AbstractVoiceConnection} from "tc-shared/connection/VoiceConnection";
|
||||
import {DummyVoiceConnection} from "tc-shared/connection/DummyVoiceConnection";
|
||||
import {ServerConnectionFactory, setServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
||||
|
||||
class ReturnListener<T> {
|
||||
resolve: (value?: T | PromiseLike<T>) => void;
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
import {setRecorderBackend} from "tc-shared/audio/recorder";
|
||||
import {WebAudioRecorder} from "../audio/Recorder";
|
||||
|
||||
setRecorderBackend(new WebAudioRecorder());
|
|
@ -2,7 +2,8 @@ import "webrtc-adapter";
|
|||
import "./index.scss";
|
||||
import "./FileTransfer";
|
||||
|
||||
import "./factories/ServerConnection";
|
||||
import "./factories/ExternalModal";
|
||||
import "./hooks/ServerConnection";
|
||||
import "./hooks/ExternalModal";
|
||||
import "./hooks/AudioRecorder";
|
||||
|
||||
export = require("tc-shared/main");
|
Loading…
Reference in New Issue