Merge branch 'canary'
This commit is contained in:
commit
53210ec4e6
47 changed files with 2349 additions and 1621 deletions
|
@ -1,6 +1,9 @@
|
||||||
# Changelog:
|
# Changelog:
|
||||||
* **11.08.20**
|
* **11.08.20**
|
||||||
- Fixed the voice push to talk delay
|
- Fixed the voice push to talk delay
|
||||||
|
- Improved the microphone setting controller
|
||||||
|
- Heavily reworked the input recorder API
|
||||||
|
- Improved denied audio permission handling
|
||||||
|
|
||||||
* **09.08.20**
|
* **09.08.20**
|
||||||
- Added a "watch to gather" context menu entry for clients
|
- Added a "watch to gather" context menu entry for clients
|
||||||
|
|
|
@ -8,7 +8,7 @@ function load_template_url(url: string) : Promise<void> {
|
||||||
return _template_promises[url];
|
return _template_promises[url];
|
||||||
|
|
||||||
return (_template_promises[url] = (async () => {
|
return (_template_promises[url] = (async () => {
|
||||||
const response = await $.ajax(config.baseUrl + url);
|
const response = await (await fetch(config.baseUrl + url)).text();
|
||||||
|
|
||||||
let node = document.createElement("html");
|
let node = document.createElement("html");
|
||||||
node.innerHTML = response;
|
node.innerHTML = response;
|
||||||
|
|
|
@ -1,6 +1,15 @@
|
||||||
import * as loader from "../loader/loader";
|
import * as loader from "../loader/loader";
|
||||||
import {Stage} from "../loader/loader";
|
import {Stage} from "../loader/loader";
|
||||||
import {detect as detectBrowser} from "detect-browser";
|
import {
|
||||||
|
BrowserInfo,
|
||||||
|
detect as detectBrowser,
|
||||||
|
} from "detect-browser";
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface Window {
|
||||||
|
detectedBrowser: BrowserInfo
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if(__build.target === "web") {
|
if(__build.target === "web") {
|
||||||
loader.register_task(Stage.SETUP, {
|
loader.register_task(Stage.SETUP, {
|
||||||
|
@ -13,14 +22,15 @@ if(__build.target === "web") {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
console.log("Resolved browser manufacturer to \"%s\" version \"%s\" on %s", browser.name, browser.version, browser.os);
|
console.log("Resolved browser manufacturer to \"%s\" version \"%s\" on %s", browser.name, browser.version, browser.os);
|
||||||
if(browser.type && browser.type !== "browser") {
|
if(browser.type !== "browser") {
|
||||||
loader.critical_error("Your device isn't supported.", "User agent type " + browser.type + " isn't supported.");
|
loader.critical_error("Your device isn't supported.", "User agent type " + browser.type + " isn't supported.");
|
||||||
throw "unsupported user type";
|
throw "unsupported user type";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
window.detectedBrowser = browser;
|
||||||
|
|
||||||
switch (browser?.name) {
|
switch (browser?.name) {
|
||||||
case "aol":
|
case "aol":
|
||||||
case "bot":
|
|
||||||
case "crios":
|
case "crios":
|
||||||
case "ie":
|
case "ie":
|
||||||
loader.critical_error("Browser not supported", "We're sorry, but your browser isn't supported.");
|
loader.critical_error("Browser not supported", "We're sorry, but your browser isn't supported.");
|
||||||
|
|
9
shared/backend.d/audio/recorder.d.ts
vendored
9
shared/backend.d/audio/recorder.d.ts
vendored
|
@ -1,9 +0,0 @@
|
||||||
import {AbstractInput, InputDevice, LevelMeter} from "tc-shared/voice/RecorderBase";
|
|
||||||
|
|
||||||
export function devices() : InputDevice[];
|
|
||||||
|
|
||||||
export function device_refresh_available() : boolean;
|
|
||||||
export function refresh_devices() : Promise<void>;
|
|
||||||
|
|
||||||
export function create_input() : AbstractInput;
|
|
||||||
export function create_levelmeter(device: InputDevice) : Promise<LevelMeter>;
|
|
|
@ -14,7 +14,7 @@ html:root {
|
||||||
padding: 0!important;
|
padding: 0!important;
|
||||||
|
|
||||||
min-width: 20em;
|
min-width: 20em;
|
||||||
width: 50em;
|
width: 60em;
|
||||||
|
|
||||||
@include user-select(none);
|
@include user-select(none);
|
||||||
|
|
||||||
|
@ -70,6 +70,8 @@ html:root {
|
||||||
@include chat-scrollbar-horizontal();
|
@include chat-scrollbar-horizontal();
|
||||||
@include chat-scrollbar-vertical();
|
@include chat-scrollbar-vertical();
|
||||||
|
|
||||||
|
background-color: #19191b;
|
||||||
|
|
||||||
.body {
|
.body {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
|
@ -86,7 +88,7 @@ html:root {
|
||||||
&.step-welcome, &.step-finish {
|
&.step-welcome, &.step-finish {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
justify-content: stretch;
|
justify-content: center;
|
||||||
|
|
||||||
.text {
|
.text {
|
||||||
align-self: center;
|
align-self: center;
|
||||||
|
@ -119,7 +121,7 @@ html:root {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* for step-identity or step-microphone */
|
/* for step-identity or step-microphone */
|
||||||
.container-settings-identity-profile, .container-settings-audio-microphone {
|
.container-settings-identity-profile {
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
|
|
||||||
.left .body {
|
.left .body {
|
||||||
|
@ -136,8 +138,6 @@ html:root {
|
||||||
|
|
||||||
&.step-identity { }
|
&.step-identity { }
|
||||||
|
|
||||||
&.step-microphone { }
|
|
||||||
|
|
||||||
&.hidden {
|
&.hidden {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,9 +37,7 @@
|
||||||
{{tr "It is save to exit this guide at any point and directly jump ahead using the client." /}}
|
{{tr "It is save to exit this guide at any point and directly jump ahead using the client." /}}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="step step-microphone">
|
<div class="step step-microphone"></div>
|
||||||
{{include tmpl="tmpl_settings-microphone" /}}
|
|
||||||
</div>
|
|
||||||
<div class="step step-identity">
|
<div class="step step-identity">
|
||||||
{{include tmpl="tmpl_settings-profiles" /}}
|
{{include tmpl="tmpl_settings-profiles" /}}
|
||||||
</div>
|
</div>
|
||||||
|
|
25
shared/img/client-icons/microphone_broken.svg
Normal file
25
shared/img/client-icons/microphone_broken.svg
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
<svg version="1.1" viewBox="0 0 10.165 16" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<defs>
|
||||||
|
<clipPath>
|
||||||
|
<path d="m1e4 0h-1e4v1e4h1e4v-1e4m-5943.8 8689.6c-213.71 0-405.54-93.27-537.3-241.21-113.23-127.12-182.11-294.58-182.11-478.21v-3963.2c0-397.32 322.08-719.41 719.41-719.41h1887.5c397.33 0 719.42 322.09 719.42 719.41v3963.2c0 187.84-72.06 358.81-189.95 486.93-131.49 142.89-319.99 232.49-529.47 232.49h-1887.5m2225-2462.5h-264.44c-23.1 48.63-36.03 103.04-36.03 160.47 0 181.5 129.1 332.79 300.47 367.21v-527.68m-494.82 0h-677.21c-23.1 48.64-36.02 103.04-36.02 160.47 0 206.89 167.73 374.62 374.62 374.62 206.91 0 374.62-167.73 374.62-374.62 0-57.43-12.92-111.83-36.01-160.47m-907.57 0h-677.21c-23.09 48.64-36.01 103.04-36.01 160.47 0 206.89 167.72 374.62 374.62 374.62 206.89 0 374.62-167.73 374.62-374.62 0-57.43-12.92-111.83-36.02-160.47m-907.64 0h-252.37v1743.1c0 53.67 12.93 104.25 35.31 149.35 47.97 96.64 140.65 167.31 250.94 184.22 1.46-13.35 2.21-26.91 2.21-40.65 0-177.2-123.17-325.29-288.46-364.27v-502.08c33.37 172.71 185.17 303.2 367.63 303.2 206.89 0 374.62-167.72 374.62-374.61 0-206.9-167.73-374.62-374.62-374.62-182.46 0-334.26 130.49-367.63 303.19v-502.08c165.29-38.96 288.46-187.06 288.46-364.26 0-57.46-12.95-111.86-36.09-160.47m1930.4 723.62c-206.9 0-374.61 167.72-374.61 374.62 0 206.89 167.71 374.61 374.61 374.61 206.89 0 374.61-167.72 374.61-374.61 0-206.9-167.72-374.62-374.61-374.62m-907.58 0c-206.88 0-374.61 167.72-374.61 374.62 0 206.89 167.73 374.61 374.61 374.61 206.9 0 374.63-167.72 374.63-374.61 0-206.9-167.73-374.62-374.63-374.62m453.79 937.76c-206.89 0-374.62 167.73-374.62 374.62 0 15.08.91 29.94 2.67 44.54h743.91c1.75-14.6 2.66-29.46 2.66-44.54 0-206.89-167.71-374.62-374.62-374.62m-907.56 0c-206.9 0-374.62 167.73-374.62 374.62 0 15.08.9 29.94 2.66 44.54h743.91c1.76-14.6 2.67-29.46 2.67-44.54 0-206.89-167.73-374.62-374.62-374.62m1741 7.4c-171.37 34.42-300.47 185.71-300.47 367.22 0 14.29.81 28.39 2.38 42.26 110.79-12.93 205.34-79.77 256.72-173.49 26.34-48.05 41.37-103.13 41.37-161.69v-74.3"/>
|
||||||
|
</clipPath>
|
||||||
|
<clipPath id="clipPath42">
|
||||||
|
<path d="m6016.8 6227.1h-230.38c23.09 48.64 36.01 103.04 36.01 160.47 0 206.89-167.71 374.62-374.62 374.62-206.89 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.02-160.47h-230.36c23.1 48.64 36.02 103.04 36.02 160.47 0 206.89-167.73 374.62-374.62 374.62-206.9 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.01-160.47h-230.43c23.14 48.61 36.09 103.01 36.09 160.47 0 177.2-123.17 325.3-288.46 364.26v502.08c33.37-172.7 185.17-303.19 367.63-303.19 206.89 0 374.62 167.72 374.62 374.62 0 206.89-167.73 374.61-374.62 374.61-182.46 0-334.26-130.49-367.63-303.2v502.08c165.29 38.98 288.46 187.07 288.46 364.27 0 13.74-.75 27.3-2.21 40.65 16.7 2.56 33.8 3.89 51.21 3.89h111.99c-1.76-14.6-2.66-29.46-2.66-44.54 0-206.89 167.72-374.62 374.62-374.62 206.89 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.67 44.54h163.66c-1.76-14.6-2.67-29.46-2.67-44.54 0-206.89 167.73-374.62 374.62-374.62 206.91 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.66 44.54h124.03c13.31 0 26.44-.78 39.35-2.28-1.57-13.87-2.38-27.97-2.38-42.26 0-181.51 129.1-332.8 300.47-367.22v-1141.1c-171.37-34.42-300.47-185.71-300.47-367.21 0-57.43 12.93-111.84 36.03-160.47m-115.2 1472.8c-206.9 0-374.61-167.72-374.61-374.61 0-206.9 167.71-374.62 374.61-374.62 206.89 0 374.61 167.72 374.61 374.62 0 206.89-167.72 374.61-374.61 374.61m-907.58 0c-206.88 0-374.61-167.72-374.61-374.61 0-206.9 167.73-374.62 374.61-374.62 206.9 0 374.63 167.72 374.63 374.62 0 206.89-167.73 374.61-374.63 374.61"/>
|
||||||
|
</clipPath>
|
||||||
|
</defs>
|
||||||
|
<g transform="matrix(1.3333 0 0 -1.3333 -661.58 674.67)" style="stroke-width:.99975">
|
||||||
|
<g transform="matrix(.0016262 0 0 .0016262 491.87 491.87)" style="stroke-width:9.9975">
|
||||||
|
<g style="stroke-width:9.9975"/>
|
||||||
|
<path d="m6281.2 6563.5v-336.35h-2562.4v1743.1c0 53.67 12.93 104.25 35.31 149.35 55.23 111.27 169.75 188.11 302.15 188.11h1887.5c127.52 0 238.7-71.13 296.07-175.77 26.34-48.05 41.37-103.13 41.37-161.69v-1406.7m192.03 1893.7c-131.49 142.89-319.99 232.49-529.47 232.49h-1887.5c-213.71 0-405.54-93.27-537.3-241.21-113.23-127.12-182.11-294.58-182.11-478.21v-3963.2c0-397.32 322.08-719.41 719.41-719.41h1887.5c397.33 0 719.42 322.09 719.42 719.41v3963.2c0 187.84-72.06 358.81-189.95 486.93" style="fill:#7289da"/>
|
||||||
|
<path d="m6888.8 6026.3h-4.6v-2389.2c0-450.14-364.91-815.04-815.05-815.04h-2138.4c-450.12 0-815.03 364.9-815.03 815.03v2389.2h-4.6c-251.43 0-455.26-203.82-455.26-455.26v-2091.9c0-619.63 502.3-1121.9 1121.9-1121.9h781.45v-478.83h-554.15c-310.59 0-562.35-251.77-562.35-562.36v-5.68h3114.5v5.68c0 310.59-251.79 562.36-562.37 562.36h-554.14v478.83h781.44c619.63 0 1121.9 502.3 1121.9 1121.9v2091.9c0 251.44-203.82 455.26-455.26 455.26" style="fill:#7289da"/>
|
||||||
|
<g style="stroke-width:9.9975">
|
||||||
|
<g clip-path="url(#clipPath42)" style="stroke-width:9.9975">
|
||||||
|
<path d="m6016.8 6227.1h-230.38c23.09 48.64 36.01 103.04 36.01 160.47 0 206.89-167.71 374.62-374.62 374.62-206.89 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.02-160.47h-230.36c23.1 48.64 36.02 103.04 36.02 160.47 0 206.89-167.73 374.62-374.62 374.62-206.9 0-374.62-167.73-374.62-374.62 0-57.43 12.92-111.83 36.01-160.47h-230.43c23.14 48.61 36.09 103.01 36.09 160.47 0 177.2-123.17 325.3-288.46 364.26v502.08c33.37-172.7 185.17-303.19 367.63-303.19 206.89 0 374.62 167.72 374.62 374.62 0 206.89-167.73 374.61-374.62 374.61-182.46 0-334.26-130.49-367.63-303.2v502.08c165.29 38.98 288.46 187.07 288.46 364.27 0 13.74-.75 27.3-2.21 40.65 16.7 2.56 33.8 3.89 51.21 3.89h111.99c-1.76-14.6-2.66-29.46-2.66-44.54 0-206.89 167.72-374.62 374.62-374.62 206.89 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.67 44.54h163.66c-1.76-14.6-2.67-29.46-2.67-44.54 0-206.89 167.73-374.62 374.62-374.62 206.91 0 374.62 167.73 374.62 374.62 0 15.08-.91 29.94-2.66 44.54h124.03c13.31 0 26.44-.78 39.35-2.28-1.57-13.87-2.38-27.97-2.38-42.26 0-181.51 129.1-332.8 300.47-367.22v-1141.1c-171.37-34.42-300.47-185.71-300.47-367.21 0-57.43 12.93-111.84 36.03-160.47m-115.2 1472.8c-206.9 0-374.61-167.72-374.61-374.61 0-206.9 167.71-374.62 374.61-374.62 206.89 0 374.61 167.72 374.61 374.62 0 206.89-167.72 374.61-374.61 374.61m-907.58 0c-206.88 0-374.61-167.72-374.61-374.61 0-206.9 167.73-374.62 374.61-374.62 206.9 0 374.63 167.72 374.63 374.62 0 206.89-167.73 374.61-374.63 374.61" style="fill:#ccc"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<path d="m6281.2 5879.8h-2562.4v143.48c111.69 26.34 204.15 102.5 252.37 203.8h230.43c60.12-126.6 189.15-214.15 338.61-214.15s278.49 87.55 338.6 214.15h230.36c60.11-126.6 189.14-214.15 338.6-214.15 149.47 0 278.5 87.55 338.61 214.15h230.38c49.9-105.08 147.29-183.22 264.44-206.74v-140.54m0 874.96v1141.1-1141.1m-2276.2 1549c-6.08 55.61-24.47 107.47-52.18 153.05h267.13c-27.01-44.55-45.22-95.05-51.75-149.16h-111.99c-17.41 0-34.51-1.33-51.21-3.89m1978.1 1.61c-12.91 1.5-26.04 2.28-39.35 2.28h-124.03c-6.52 54.11-24.73 104.61-51.75 149.16h266.97c-27.37-45.16-45.61-96.47-51.84-151.44m-907.29 2.28h-163.66c-6.52 54.11-24.73 104.61-51.75 149.16h267.16c-27.02-44.55-45.23-95.05-51.75-149.16" style="fill:#7289da"/>
|
||||||
|
<path d="m5043.9 3891-211.6 376.82 211.6 376.8-211.6 376.81 211.6 376.83-118.8 211.54c-443.67 334.93-1110 69.17-1145.4-542.46-34.61-597.4 711.68-1068.5 1189.8-1308.9l74.44 132.52" style="fill:#fff"/>
|
||||||
|
<path d="m6220.3 5067.3c-35.91 620-720.11 884.62-1163.5 528.38l110.9-197.46-211.62-376.83 211.62-376.81-211.62-376.8 211.62-376.82-48.87-87.02c476.15 251.6 1134 701.64 1101.5 1263.4" style="fill:#fff"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 7.3 KiB |
|
@ -5,10 +5,10 @@ import {GroupManager} from "tc-shared/permission/GroupManager";
|
||||||
import {ServerSettings, Settings, settings, StaticSettings} from "tc-shared/settings";
|
import {ServerSettings, Settings, settings, StaticSettings} from "tc-shared/settings";
|
||||||
import {Sound, SoundManager} from "tc-shared/sound/Sounds";
|
import {Sound, SoundManager} from "tc-shared/sound/Sounds";
|
||||||
import {LocalClientEntry} from "tc-shared/ui/client";
|
import {LocalClientEntry} from "tc-shared/ui/client";
|
||||||
import {ConnectionProfile, default_profile, find_profile} from "tc-shared/profiles/ConnectionProfile";
|
import {ConnectionProfile} from "tc-shared/profiles/ConnectionProfile";
|
||||||
import {ServerAddress} from "tc-shared/ui/server";
|
import {ServerAddress} from "tc-shared/ui/server";
|
||||||
import * as log from "tc-shared/log";
|
import * as log from "tc-shared/log";
|
||||||
import {LogCategory} from "tc-shared/log";
|
import {LogCategory, logError} from "tc-shared/log";
|
||||||
import {createErrorModal, createInfoModal, createInputModal, Modal} from "tc-shared/ui/elements/Modal";
|
import {createErrorModal, createInfoModal, createInputModal, Modal} from "tc-shared/ui/elements/Modal";
|
||||||
import {hashPassword} from "tc-shared/utils/helpers";
|
import {hashPassword} from "tc-shared/utils/helpers";
|
||||||
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
|
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
|
||||||
|
@ -16,8 +16,7 @@ import * as htmltags from "./ui/htmltags";
|
||||||
import {ChannelEntry} from "tc-shared/ui/channel";
|
import {ChannelEntry} from "tc-shared/ui/channel";
|
||||||
import {InputStartResult, InputState} from "tc-shared/voice/RecorderBase";
|
import {InputStartResult, InputState} from "tc-shared/voice/RecorderBase";
|
||||||
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||||
import * as bipc from "./ipc/BrowserIPC";
|
import {default_recorder, RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
||||||
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
|
|
||||||
import {Frame} from "tc-shared/ui/frames/chat_frame";
|
import {Frame} from "tc-shared/ui/frames/chat_frame";
|
||||||
import {Hostbanner} from "tc-shared/ui/frames/hostbanner";
|
import {Hostbanner} from "tc-shared/ui/frames/hostbanner";
|
||||||
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
|
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
|
||||||
|
@ -39,6 +38,12 @@ import {W2GPluginCmdHandler} from "tc-shared/video-viewer/W2GPlugin";
|
||||||
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
|
import {VoiceConnectionStatus} from "tc-shared/connection/VoiceConnection";
|
||||||
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
import {getServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
||||||
|
|
||||||
|
export enum InputHardwareState {
|
||||||
|
MISSING,
|
||||||
|
START_FAILED,
|
||||||
|
VALID
|
||||||
|
}
|
||||||
|
|
||||||
export enum DisconnectReason {
|
export enum DisconnectReason {
|
||||||
HANDLER_DESTROYED,
|
HANDLER_DESTROYED,
|
||||||
REQUESTED,
|
REQUESTED,
|
||||||
|
@ -101,7 +106,6 @@ export enum ViewReasonId {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LocalClientStatus {
|
export interface LocalClientStatus {
|
||||||
input_hardware: boolean;
|
|
||||||
input_muted: boolean;
|
input_muted: boolean;
|
||||||
output_muted: boolean;
|
output_muted: boolean;
|
||||||
|
|
||||||
|
@ -128,7 +132,6 @@ export interface ConnectParameters {
|
||||||
auto_reconnect_attempt?: boolean;
|
auto_reconnect_attempt?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
declare const native_client;
|
|
||||||
export class ConnectionHandler {
|
export class ConnectionHandler {
|
||||||
readonly handlerId: string;
|
readonly handlerId: string;
|
||||||
|
|
||||||
|
@ -163,8 +166,8 @@ export class ConnectionHandler {
|
||||||
private pluginCmdRegistry: PluginCmdRegistry;
|
private pluginCmdRegistry: PluginCmdRegistry;
|
||||||
|
|
||||||
private client_status: LocalClientStatus = {
|
private client_status: LocalClientStatus = {
|
||||||
input_hardware: false,
|
|
||||||
input_muted: false,
|
input_muted: false,
|
||||||
|
|
||||||
output_muted: false,
|
output_muted: false,
|
||||||
away: false,
|
away: false,
|
||||||
channel_subscribe_all: true,
|
channel_subscribe_all: true,
|
||||||
|
@ -176,7 +179,8 @@ export class ConnectionHandler {
|
||||||
channel_codec_decoding_supported: undefined
|
channel_codec_decoding_supported: undefined
|
||||||
};
|
};
|
||||||
|
|
||||||
invoke_resized_on_activate: boolean = false;
|
private inputHardwareState: InputHardwareState = InputHardwareState.MISSING;
|
||||||
|
|
||||||
log: ServerEventLog;
|
log: ServerEventLog;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
|
@ -189,7 +193,10 @@ export class ConnectionHandler {
|
||||||
this.serverConnection = getServerConnectionFactory().create(this);
|
this.serverConnection = getServerConnectionFactory().create(this);
|
||||||
this.serverConnection.events.on("notify_connection_state_changed", event => this.on_connection_state_changed(event.oldState, event.newState));
|
this.serverConnection.events.on("notify_connection_state_changed", event => this.on_connection_state_changed(event.oldState, event.newState));
|
||||||
|
|
||||||
this.serverConnection.getVoiceConnection().events.on("notify_recorder_changed", () => this.update_voice_status());
|
this.serverConnection.getVoiceConnection().events.on("notify_recorder_changed", () => {
|
||||||
|
this.setInputHardwareState(this.getVoiceRecorder() ? InputHardwareState.VALID : InputHardwareState.MISSING);
|
||||||
|
this.update_voice_status();
|
||||||
|
});
|
||||||
this.serverConnection.getVoiceConnection().events.on("notify_connection_status_changed", () => this.update_voice_status());
|
this.serverConnection.getVoiceConnection().events.on("notify_connection_status_changed", () => this.update_voice_status());
|
||||||
|
|
||||||
this.channelTree = new ChannelTree(this);
|
this.channelTree = new ChannelTree(this);
|
||||||
|
@ -237,7 +244,7 @@ export class ConnectionHandler {
|
||||||
this.update_voice_status();
|
this.update_voice_status();
|
||||||
|
|
||||||
this.setSubscribeToAllChannels(source ? source.client_status.channel_subscribe_all : settings.global(Settings.KEY_CLIENT_STATE_SUBSCRIBE_ALL_CHANNELS));
|
this.setSubscribeToAllChannels(source ? source.client_status.channel_subscribe_all : settings.global(Settings.KEY_CLIENT_STATE_SUBSCRIBE_ALL_CHANNELS));
|
||||||
this.setAway_(source ? source.client_status.away : (settings.global(Settings.KEY_CLIENT_STATE_AWAY) ? settings.global(Settings.KEY_CLIENT_AWAY_MESSAGE) : false), false);
|
this.doSetAway(source ? source.client_status.away : (settings.global(Settings.KEY_CLIENT_STATE_AWAY) ? settings.global(Settings.KEY_CLIENT_AWAY_MESSAGE) : false), false);
|
||||||
this.setQueriesShown(source ? source.client_status.queries_visible : settings.global(Settings.KEY_CLIENT_STATE_QUERY_SHOWN));
|
this.setQueriesShown(source ? source.client_status.queries_visible : settings.global(Settings.KEY_CLIENT_STATE_QUERY_SHOWN));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,11 +258,10 @@ export class ConnectionHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
async startConnection(addr: string, profile: ConnectionProfile, user_action: boolean, parameters: ConnectParameters) {
|
async startConnection(addr: string, profile: ConnectionProfile, user_action: boolean, parameters: ConnectParameters) {
|
||||||
this.tab_set_name(tr("Connecting"));
|
|
||||||
this.cancel_reconnect(false);
|
this.cancel_reconnect(false);
|
||||||
this._reconnect_attempt = parameters.auto_reconnect_attempt || false;
|
this._reconnect_attempt = parameters.auto_reconnect_attempt || false;
|
||||||
if(this.serverConnection)
|
this.handleDisconnect(DisconnectReason.REQUESTED);
|
||||||
this.handleDisconnect(DisconnectReason.REQUESTED);
|
this.tab_set_name(tr("Connecting"));
|
||||||
|
|
||||||
let server_address: ServerAddress = {
|
let server_address: ServerAddress = {
|
||||||
host: "",
|
host: "",
|
||||||
|
@ -344,7 +350,7 @@ export class ConnectionHandler {
|
||||||
this.cancel_reconnect(true);
|
this.cancel_reconnect(true);
|
||||||
if(!this.connected) return;
|
if(!this.connected) return;
|
||||||
|
|
||||||
this.handleDisconnect(DisconnectReason.REQUESTED); //TODO message?
|
this.handleDisconnect(DisconnectReason.REQUESTED);
|
||||||
try {
|
try {
|
||||||
await this.serverConnection.disconnect();
|
await this.serverConnection.disconnect();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -369,42 +375,44 @@ export class ConnectionHandler {
|
||||||
|
|
||||||
|
|
||||||
@EventHandler<ConnectionEvents>("notify_connection_state_changed")
|
@EventHandler<ConnectionEvents>("notify_connection_state_changed")
|
||||||
private handleConnectionConnected(event: ConnectionEvents["notify_connection_state_changed"]) {
|
private handleConnectionStateChanged(event: ConnectionEvents["notify_connection_state_changed"]) {
|
||||||
if(event.new_state !== ConnectionState.CONNECTED) return;
|
|
||||||
this.connection_state = event.new_state;
|
this.connection_state = event.new_state;
|
||||||
|
if(event.new_state === ConnectionState.CONNECTED) {
|
||||||
|
log.info(LogCategory.CLIENT, tr("Client connected"));
|
||||||
|
this.log.log(EventType.CONNECTION_CONNECTED, {
|
||||||
|
serverAddress: {
|
||||||
|
server_port: this.channelTree.server.remote_address.port,
|
||||||
|
server_hostname: this.channelTree.server.remote_address.host
|
||||||
|
},
|
||||||
|
serverName: this.channelTree.server.properties.virtualserver_name,
|
||||||
|
own_client: this.getClient().log_data()
|
||||||
|
});
|
||||||
|
this.sound.play(Sound.CONNECTION_CONNECTED);
|
||||||
|
|
||||||
log.info(LogCategory.CLIENT, tr("Client connected"));
|
this.permissions.requestPermissionList();
|
||||||
this.log.log(EventType.CONNECTION_CONNECTED, {
|
if(this.groups.serverGroups.length == 0)
|
||||||
serverAddress: {
|
this.groups.requestGroups();
|
||||||
server_port: this.channelTree.server.remote_address.port,
|
|
||||||
server_hostname: this.channelTree.server.remote_address.host
|
|
||||||
},
|
|
||||||
serverName: this.channelTree.server.properties.virtualserver_name,
|
|
||||||
own_client: this.getClient().log_data()
|
|
||||||
});
|
|
||||||
this.sound.play(Sound.CONNECTION_CONNECTED);
|
|
||||||
|
|
||||||
this.permissions.requestPermissionList();
|
this.settings.setServer(this.channelTree.server.properties.virtualserver_unique_identifier);
|
||||||
if(this.groups.serverGroups.length == 0)
|
|
||||||
this.groups.requestGroups();
|
|
||||||
|
|
||||||
this.settings.setServer(this.channelTree.server.properties.virtualserver_unique_identifier);
|
/* apply the server settings */
|
||||||
|
if(this.client_status.channel_subscribe_all)
|
||||||
|
this.channelTree.subscribe_all_channels();
|
||||||
|
else
|
||||||
|
this.channelTree.unsubscribe_all_channels();
|
||||||
|
this.channelTree.toggle_server_queries(this.client_status.queries_visible);
|
||||||
|
|
||||||
/* apply the server settings */
|
this.sync_status_with_server();
|
||||||
if(this.client_status.channel_subscribe_all)
|
this.channelTree.server.updateProperties();
|
||||||
this.channelTree.subscribe_all_channels();
|
/*
|
||||||
else
|
No need to update the voice stuff because as soon we see ourself we're doing it
|
||||||
this.channelTree.unsubscribe_all_channels();
|
this.update_voice_status();
|
||||||
this.channelTree.toggle_server_queries(this.client_status.queries_visible);
|
if(control_bar.current_connection_handler() === this)
|
||||||
|
control_bar.apply_server_voice_state();
|
||||||
this.sync_status_with_server();
|
*/
|
||||||
this.channelTree.server.updateProperties();
|
} else {
|
||||||
/*
|
this.setInputHardwareState(this.getVoiceRecorder() ? InputHardwareState.VALID : InputHardwareState.MISSING);
|
||||||
No need to update the voice stuff because as soon we see ourself we're doing it
|
}
|
||||||
this.update_voice_status();
|
|
||||||
if(control_bar.current_connection_handler() === this)
|
|
||||||
control_bar.apply_server_voice_state();
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get connected() : boolean {
|
get connected() : boolean {
|
||||||
|
@ -439,52 +447,7 @@ export class ConnectionHandler {
|
||||||
if(pathname.endsWith(".php"))
|
if(pathname.endsWith(".php"))
|
||||||
pathname = pathname.substring(0, pathname.lastIndexOf("/"));
|
pathname = pathname.substring(0, pathname.lastIndexOf("/"));
|
||||||
|
|
||||||
/* certaccept is currently not working! */
|
tag.attr('href', build_url(document.location.origin + pathname, document.location.search, properties));
|
||||||
if(bipc.supported() && false) {
|
|
||||||
tag.attr('href', "#");
|
|
||||||
let popup: Window;
|
|
||||||
tag.on('click', event => {
|
|
||||||
const features = {
|
|
||||||
status: "no",
|
|
||||||
location: "no",
|
|
||||||
toolbar: "no",
|
|
||||||
menubar: "no",
|
|
||||||
width: 600,
|
|
||||||
height: 400
|
|
||||||
};
|
|
||||||
|
|
||||||
if(popup)
|
|
||||||
popup.close();
|
|
||||||
|
|
||||||
properties["certificate_callback"] = bipc.getInstance().register_certificate_accept_callback(() => {
|
|
||||||
log.info(LogCategory.GENERAL, tr("Received notification that the certificate has been accepted! Attempting reconnect!"));
|
|
||||||
if(this._certificate_modal)
|
|
||||||
this._certificate_modal.close();
|
|
||||||
|
|
||||||
popup.close(); /* no need, but nicer */
|
|
||||||
|
|
||||||
const profile = find_profile(properties.connect_profile) || default_profile();
|
|
||||||
const cprops = this.reconnect_properties(profile);
|
|
||||||
this.startConnection(properties.connect_address, profile, true, cprops);
|
|
||||||
});
|
|
||||||
|
|
||||||
const url = build_url(document.location.origin + pathname + "/popup/certaccept/", "", properties);
|
|
||||||
const features_string = Object.keys(features).map(e => e + "=" + features[e]).join(",");
|
|
||||||
popup = window.open(url, "TeaWeb certificate accept", features_string);
|
|
||||||
try {
|
|
||||||
popup.focus();
|
|
||||||
} catch(e) {
|
|
||||||
log.warn(LogCategory.GENERAL, tr("Certificate accept popup has been blocked. Trying a blank page and replacing href"));
|
|
||||||
|
|
||||||
window.open(url, "TeaWeb certificate accept"); /* trying without features */
|
|
||||||
tag.attr("target", "_blank");
|
|
||||||
tag.attr("href", url);
|
|
||||||
tag.unbind('click');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
tag.attr('href', build_url(document.location.origin + pathname, document.location.search, properties));
|
|
||||||
}
|
|
||||||
return tag;
|
return tag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -526,7 +489,7 @@ export class ConnectionHandler {
|
||||||
else
|
else
|
||||||
log.error(LogCategory.CLIENT, tr("Could not connect to remote host!"), data);
|
log.error(LogCategory.CLIENT, tr("Could not connect to remote host!"), data);
|
||||||
|
|
||||||
if(native_client || !dns.resolve_address_ipv4) {
|
if(__build.target === "client" || !dns.resolve_address_ipv4) {
|
||||||
createErrorModal(
|
createErrorModal(
|
||||||
tr("Could not connect"),
|
tr("Could not connect"),
|
||||||
tr("Could not connect to remote host (Connection refused)")
|
tr("Could not connect to remote host (Connection refused)")
|
||||||
|
@ -726,42 +689,47 @@ export class ConnectionHandler {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private _last_record_error_popup: number;
|
private _last_record_error_popup: number = 0;
|
||||||
update_voice_status(targetChannel?: ChannelEntry) {
|
update_voice_status(targetChannel?: ChannelEntry) {
|
||||||
//TODO: Simplify this
|
if(!this._local_client) {
|
||||||
if(!this._local_client) return; /* we've been destroyed */
|
/* we've been destroyed */
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
targetChannel = targetChannel || this.getClient().currentChannel();
|
if(typeof targetChannel === "undefined")
|
||||||
|
targetChannel = this.getClient().currentChannel();
|
||||||
|
|
||||||
const vconnection = this.serverConnection.getVoiceConnection();
|
const vconnection = this.serverConnection.getVoiceConnection();
|
||||||
const basic_voice_support = vconnection.getConnectionState() === VoiceConnectionStatus.Connected && targetChannel;
|
|
||||||
const support_record = basic_voice_support && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
|
const codecEncodeSupported = !targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec);
|
||||||
const support_playback = basic_voice_support && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
|
const codecDecodeSupported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
|
||||||
|
|
||||||
const property_update = {
|
const property_update = {
|
||||||
client_input_muted: this.client_status.input_muted,
|
client_input_muted: this.client_status.input_muted,
|
||||||
client_output_muted: this.client_status.output_muted
|
client_output_muted: this.client_status.output_muted
|
||||||
};
|
};
|
||||||
|
|
||||||
if(support_record && basic_voice_support)
|
/* update the encoding codec */
|
||||||
|
if(codecEncodeSupported && targetChannel) {
|
||||||
vconnection.set_encoder_codec(targetChannel.properties.channel_codec);
|
vconnection.set_encoder_codec(targetChannel.properties.channel_codec);
|
||||||
|
}
|
||||||
|
|
||||||
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
|
if(!this.serverConnection.connected() || vconnection.getConnectionState() !== VoiceConnectionStatus.Connected) {
|
||||||
property_update["client_input_hardware"] = false;
|
property_update["client_input_hardware"] = false;
|
||||||
property_update["client_output_hardware"] = false;
|
property_update["client_output_hardware"] = false;
|
||||||
this.client_status.input_hardware = true; /* IDK if we have input hardware or not, but it dosn't matter at all so */
|
|
||||||
|
|
||||||
/* no icons are shown so no update at all */
|
|
||||||
} else {
|
} else {
|
||||||
const audio_source = vconnection.voice_recorder();
|
const recording_supported =
|
||||||
const recording_supported = typeof(audio_source) !== "undefined" && audio_source.record_supported && (!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec));
|
this.getInputHardwareState() === InputHardwareState.VALID &&
|
||||||
const playback_supported = !targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec);
|
(!targetChannel || vconnection.encoding_supported(targetChannel.properties.channel_codec)) &&
|
||||||
|
vconnection.getConnectionState() === VoiceConnectionStatus.Connected;
|
||||||
|
|
||||||
|
const playback_supported = this.hasOutputHardware() && (!targetChannel || vconnection.decoding_supported(targetChannel.properties.channel_codec));
|
||||||
|
|
||||||
property_update["client_input_hardware"] = recording_supported;
|
property_update["client_input_hardware"] = recording_supported;
|
||||||
property_update["client_output_hardware"] = playback_supported;
|
property_update["client_output_hardware"] = playback_supported;
|
||||||
this.client_status.input_hardware = recording_supported;
|
}
|
||||||
|
|
||||||
/* update icons */
|
{
|
||||||
const client_properties = this.getClient().properties;
|
const client_properties = this.getClient().properties;
|
||||||
for(const key of Object.keys(property_update)) {
|
for(const key of Object.keys(property_update)) {
|
||||||
if(client_properties[key] === property_update[key])
|
if(client_properties[key] === property_update[key])
|
||||||
|
@ -771,7 +739,7 @@ export class ConnectionHandler {
|
||||||
if(Object.keys(property_update).length > 0) {
|
if(Object.keys(property_update).length > 0) {
|
||||||
this.serverConnection.send_command("clientupdate", property_update).catch(error => {
|
this.serverConnection.send_command("clientupdate", property_update).catch(error => {
|
||||||
log.warn(LogCategory.GENERAL, tr("Failed to update client audio hardware properties. Error: %o"), error);
|
log.warn(LogCategory.GENERAL, tr("Failed to update client audio hardware properties. Error: %o"), error);
|
||||||
this.log.log(EventType.ERROR_CUSTOM, {message: tr("Failed to update audio hardware properties.")});
|
this.log.log(EventType.ERROR_CUSTOM, { message: tr("Failed to update audio hardware properties.") });
|
||||||
|
|
||||||
/* Update these properties anyways (for case the server fails to handle the command) */
|
/* Update these properties anyways (for case the server fails to handle the command) */
|
||||||
const updates = [];
|
const updates = [];
|
||||||
|
@ -782,50 +750,39 @@ export class ConnectionHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(targetChannel) {
|
||||||
if(targetChannel && basic_voice_support) {
|
if(this.client_status.channel_codec_decoding_supported !== codecDecodeSupported || this.client_status.channel_codec_encoding_supported !== codecEncodeSupported) {
|
||||||
const encoding_supported = vconnection && vconnection.encoding_supported(targetChannel.properties.channel_codec);
|
this.client_status.channel_codec_decoding_supported = codecDecodeSupported;
|
||||||
const decoding_supported = vconnection && vconnection.decoding_supported(targetChannel.properties.channel_codec);
|
this.client_status.channel_codec_encoding_supported = codecEncodeSupported;
|
||||||
|
|
||||||
if(this.client_status.channel_codec_decoding_supported !== decoding_supported || this.client_status.channel_codec_encoding_supported !== encoding_supported) {
|
|
||||||
this.client_status.channel_codec_decoding_supported = decoding_supported;
|
|
||||||
this.client_status.channel_codec_encoding_supported = encoding_supported;
|
|
||||||
|
|
||||||
let message;
|
let message;
|
||||||
if(!encoding_supported && !decoding_supported)
|
if(!codecEncodeSupported && !codecDecodeSupported) {
|
||||||
message = tr("This channel has an unsupported codec.<br>You cant speak or listen to anybody within this channel!");
|
message = tr("This channel has an unsupported codec.<br>You cant speak or listen to anybody within this channel!");
|
||||||
else if(!encoding_supported)
|
} else if(!codecEncodeSupported) {
|
||||||
message = tr("This channel has an unsupported codec.<br>You cant speak within this channel!");
|
message = tr("This channel has an unsupported codec.<br>You cant speak within this channel!");
|
||||||
else if(!decoding_supported)
|
} else if(!codecDecodeSupported) {
|
||||||
message = tr("This channel has an unsupported codec.<br>You listen to anybody within this channel!"); /* implies speaking does not work as well */
|
message = tr("This channel has an unsupported codec.<br>You cant listen to anybody within this channel!");
|
||||||
if(message)
|
}
|
||||||
|
|
||||||
|
if(message) {
|
||||||
createErrorModal(tr("Channel codec unsupported"), message).open();
|
createErrorModal(tr("Channel codec unsupported"), message).open();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.client_status = this.client_status || {} as any;
|
this.client_status = this.client_status || {} as any;
|
||||||
this.client_status.sound_record_supported = support_record;
|
this.client_status.sound_record_supported = codecEncodeSupported;
|
||||||
this.client_status.sound_playback_supported = support_playback;
|
this.client_status.sound_playback_supported = codecDecodeSupported;
|
||||||
|
|
||||||
if(vconnection && vconnection.voice_recorder() && vconnection.voice_recorder().record_supported) {
|
{
|
||||||
const active = !this.client_status.input_muted && !this.client_status.output_muted;
|
const enableRecording = !this.client_status.input_muted && !this.client_status.output_muted;
|
||||||
/* No need to start the microphone when we're not even connected */
|
/* No need to start the microphone when we're not even connected */
|
||||||
|
|
||||||
const input = vconnection.voice_recorder().input;
|
const input = vconnection.voice_recorder()?.input;
|
||||||
if(input) {
|
if(input) {
|
||||||
if(active && this.serverConnection.connected()) {
|
if(enableRecording && this.serverConnection.connected()) {
|
||||||
if(input.current_state() === InputState.PAUSED) {
|
if(this.getInputHardwareState() !== InputHardwareState.START_FAILED)
|
||||||
input.start().then(result => {
|
this.startVoiceRecorder(Date.now() - this._last_record_error_popup > 10 * 1000);
|
||||||
if(result != InputStartResult.EOK)
|
|
||||||
throw result;
|
|
||||||
}).catch(error => {
|
|
||||||
log.warn(LogCategory.VOICE, tr("Failed to start microphone input (%s)."), error);
|
|
||||||
if(Date.now() - (this._last_record_error_popup || 0) > 10 * 1000) {
|
|
||||||
this._last_record_error_popup = Date.now();
|
|
||||||
createErrorModal(tr("Failed to start recording"), formatMessage(tr("Microphone start failed.{:br:}Error: {}"), error)).open();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
input.stop();
|
input.stop();
|
||||||
}
|
}
|
||||||
|
@ -836,6 +793,7 @@ export class ConnectionHandler {
|
||||||
this.event_registry.fire("notify_state_updated", {
|
this.event_registry.fire("notify_state_updated", {
|
||||||
state: "microphone"
|
state: "microphone"
|
||||||
});
|
});
|
||||||
|
|
||||||
this.event_registry.fire("notify_state_updated", {
|
this.event_registry.fire("notify_state_updated", {
|
||||||
state: "speaker"
|
state: "speaker"
|
||||||
});
|
});
|
||||||
|
@ -849,7 +807,7 @@ export class ConnectionHandler {
|
||||||
client_output_muted: this.client_status.output_muted,
|
client_output_muted: this.client_status.output_muted,
|
||||||
client_away: typeof(this.client_status.away) === "string" || this.client_status.away,
|
client_away: typeof(this.client_status.away) === "string" || this.client_status.away,
|
||||||
client_away_message: typeof(this.client_status.away) === "string" ? this.client_status.away : "",
|
client_away_message: typeof(this.client_status.away) === "string" ? this.client_status.away : "",
|
||||||
client_input_hardware: this.client_status.sound_record_supported && this.client_status.input_hardware,
|
client_input_hardware: this.client_status.sound_record_supported && this.getInputHardwareState() === InputHardwareState.VALID,
|
||||||
client_output_hardware: this.client_status.sound_playback_supported
|
client_output_hardware: this.client_status.sound_playback_supported
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
log.warn(LogCategory.GENERAL, tr("Failed to sync handler state with server. Error: %o"), error);
|
log.warn(LogCategory.GENERAL, tr("Failed to sync handler state with server. Error: %o"), error);
|
||||||
|
@ -857,15 +815,67 @@ export class ConnectionHandler {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
resize_elements() {
|
/* can be called as much as you want, does nothing if nothing changed */
|
||||||
this.invoke_resized_on_activate = false;
|
async acquireInputHardware() {
|
||||||
|
/* if we're having multiple recorders, try to get the right one */
|
||||||
|
let recorder: RecorderProfile = default_recorder;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.serverConnection.getVoiceConnection().acquire_voice_recorder(recorder);
|
||||||
|
} catch (error) {
|
||||||
|
logError(LogCategory.AUDIO, tr("Failed to acquire recorder: %o"), error);
|
||||||
|
createErrorModal(tr("Failed to acquire recorder"), tr("Failed to acquire recorder.\nLookup the console for more details.")).open();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.connection_state === ConnectionState.CONNECTED) {
|
||||||
|
await this.startVoiceRecorder(true);
|
||||||
|
} else {
|
||||||
|
this.setInputHardwareState(InputHardwareState.VALID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
acquire_recorder(voice_recoder: RecorderProfile, update_control_bar: boolean) {
|
async startVoiceRecorder(notifyError: boolean) {
|
||||||
const vconnection = this.serverConnection.getVoiceConnection();
|
const input = this.getVoiceRecorder()?.input;
|
||||||
vconnection.acquire_voice_recorder(voice_recoder).catch(error => {
|
if(!input) return;
|
||||||
log.warn(LogCategory.VOICE, tr("Failed to acquire recorder (%o)"), error);
|
|
||||||
});
|
if(input.currentState() === InputState.PAUSED && this.connection_state === ConnectionState.CONNECTED) {
|
||||||
|
try {
|
||||||
|
const result = await input.start();
|
||||||
|
if(result !== InputStartResult.EOK) {
|
||||||
|
throw result;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setInputHardwareState(InputHardwareState.VALID);
|
||||||
|
this.update_voice_status();
|
||||||
|
} catch (error) {
|
||||||
|
this.setInputHardwareState(InputHardwareState.START_FAILED);
|
||||||
|
|
||||||
|
let errorMessage;
|
||||||
|
if(error === InputStartResult.ENOTSUPPORTED) {
|
||||||
|
errorMessage = tr("Your browser does not support voice recording");
|
||||||
|
} else if(error === InputStartResult.EBUSY) {
|
||||||
|
errorMessage = tr("The input device is busy");
|
||||||
|
} else if(error === InputStartResult.EDEVICEUNKNOWN) {
|
||||||
|
errorMessage = tr("Invalid input device");
|
||||||
|
} else if(error === InputStartResult.ENOTALLOWED) {
|
||||||
|
errorMessage = tr("No permissions");
|
||||||
|
} else if(error instanceof Error) {
|
||||||
|
errorMessage = error.message;
|
||||||
|
} else if(typeof error === "string") {
|
||||||
|
errorMessage = error;
|
||||||
|
} else {
|
||||||
|
errorMessage = tr("lookup the console");
|
||||||
|
}
|
||||||
|
log.warn(LogCategory.VOICE, tr("Failed to start microphone input (%s)."), error);
|
||||||
|
if(notifyError) {
|
||||||
|
this._last_record_error_popup = Date.now();
|
||||||
|
createErrorModal(tr("Failed to start recording"), tra("Microphone start failed.\nError: {}", errorMessage)).open();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.setInputHardwareState(InputHardwareState.VALID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getVoiceRecorder() : RecorderProfile | undefined { return this.serverConnection.getVoiceConnection().voice_recorder(); }
|
getVoiceRecorder() : RecorderProfile | undefined { return this.serverConnection.getVoiceConnection().voice_recorder(); }
|
||||||
|
@ -1015,15 +1025,9 @@ export class ConnectionHandler {
|
||||||
this.update_voice_status();
|
this.update_voice_status();
|
||||||
}
|
}
|
||||||
toggleMicrophone() { this.setMicrophoneMuted(!this.isMicrophoneMuted()); }
|
toggleMicrophone() { this.setMicrophoneMuted(!this.isMicrophoneMuted()); }
|
||||||
isMicrophoneMuted() { return this.client_status.input_muted; }
|
|
||||||
|
|
||||||
/*
|
isMicrophoneMuted() { return this.client_status.input_muted; }
|
||||||
* Returns whatever the client is able to talk or not. Reasons for returning true could be:
|
isMicrophoneDisabled() { return this.inputHardwareState !== InputHardwareState.VALID; }
|
||||||
* - Channel codec isn't supported
|
|
||||||
* - No recorder has been acquired
|
|
||||||
* - Voice bridge hasn't been set upped yet
|
|
||||||
*/
|
|
||||||
isMicrophoneDisabled() { return !this.client_status.input_hardware; }
|
|
||||||
|
|
||||||
setSpeakerMuted(muted: boolean) {
|
setSpeakerMuted(muted: boolean) {
|
||||||
if(this.client_status.output_muted === muted) return;
|
if(this.client_status.output_muted === muted) return;
|
||||||
|
@ -1057,10 +1061,10 @@ export class ConnectionHandler {
|
||||||
isSubscribeToAllChannels() : boolean { return this.client_status.channel_subscribe_all; }
|
isSubscribeToAllChannels() : boolean { return this.client_status.channel_subscribe_all; }
|
||||||
|
|
||||||
setAway(state: boolean | string) {
|
setAway(state: boolean | string) {
|
||||||
this.setAway_(state, true);
|
this.doSetAway(state, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private setAway_(state: boolean | string, play_sound: boolean) {
|
private doSetAway(state: boolean | string, play_sound: boolean) {
|
||||||
if(this.client_status.away === state)
|
if(this.client_status.away === state)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -1099,8 +1103,16 @@ export class ConnectionHandler {
|
||||||
return this.client_status.queries_visible;
|
return this.client_status.queries_visible;
|
||||||
}
|
}
|
||||||
|
|
||||||
hasInputHardware() : boolean { return this.client_status.input_hardware; }
|
getInputHardwareState() : InputHardwareState { return this.inputHardwareState; }
|
||||||
hasOutputHardware() : boolean { return this.client_status.output_muted; }
|
private setInputHardwareState(state: InputHardwareState) {
|
||||||
|
if(this.inputHardwareState === state)
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.inputHardwareState = state;
|
||||||
|
this.event_registry.fire("notify_state_updated", { state: "microphone" });
|
||||||
|
}
|
||||||
|
|
||||||
|
hasOutputHardware() : boolean { return true; }
|
||||||
|
|
||||||
getPluginCmdRegistry() : PluginCmdRegistry { return this.pluginCmdRegistry; }
|
getPluginCmdRegistry() : PluginCmdRegistry { return this.pluginCmdRegistry; }
|
||||||
}
|
}
|
||||||
|
|
157
shared/js/audio/recorder.ts
Normal file
157
shared/js/audio/recorder.ts
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
import {AbstractInput, LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||||
|
import {Registry} from "tc-shared/events";
|
||||||
|
|
||||||
|
export type DeviceQueryResult = {}
|
||||||
|
|
||||||
|
export interface AudioRecorderBacked {
|
||||||
|
createInput() : AbstractInput;
|
||||||
|
createLevelMeter(device: IDevice) : Promise<LevelMeter>;
|
||||||
|
|
||||||
|
getDeviceList() : DeviceList;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeviceListEvents {
|
||||||
|
/*
|
||||||
|
* Should only trigger if the list really changed.
|
||||||
|
*/
|
||||||
|
notify_list_updated: {
|
||||||
|
removedDeviceCount: number,
|
||||||
|
addedDeviceCount: number
|
||||||
|
},
|
||||||
|
|
||||||
|
notify_state_changed: {
|
||||||
|
oldState: DeviceListState;
|
||||||
|
newState: DeviceListState;
|
||||||
|
},
|
||||||
|
|
||||||
|
notify_permissions_changed: {
|
||||||
|
oldState: PermissionState,
|
||||||
|
newState: PermissionState
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DeviceListState = "healthy" | "uninitialized" | "no-permissions" | "error";
|
||||||
|
|
||||||
|
export interface IDevice {
|
||||||
|
deviceId: string;
|
||||||
|
|
||||||
|
driver: string;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
export namespace IDevice {
|
||||||
|
export const NoDeviceId = "none";
|
||||||
|
}
|
||||||
|
|
||||||
|
export type PermissionState = "granted" | "denied" | "unknown";
|
||||||
|
|
||||||
|
export interface DeviceList {
|
||||||
|
getEvents() : Registry<DeviceListEvents>;
|
||||||
|
|
||||||
|
isRefreshAvailable() : boolean;
|
||||||
|
refresh() : Promise<void>;
|
||||||
|
|
||||||
|
/* implicitly update our own permission state */
|
||||||
|
requestPermissions() : Promise<PermissionState>;
|
||||||
|
getPermissionState() : PermissionState;
|
||||||
|
|
||||||
|
getStatus() : DeviceListState;
|
||||||
|
getDevices() : IDevice[];
|
||||||
|
|
||||||
|
getDefaultDeviceId() : string;
|
||||||
|
|
||||||
|
awaitHealthy(): Promise<void>;
|
||||||
|
awaitInitialized() : Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export abstract class AbstractDeviceList implements DeviceList {
|
||||||
|
protected readonly events: Registry<DeviceListEvents>;
|
||||||
|
protected listState: DeviceListState;
|
||||||
|
protected permissionState: PermissionState;
|
||||||
|
|
||||||
|
protected constructor() {
|
||||||
|
this.events = new Registry<DeviceListEvents>();
|
||||||
|
this.permissionState = "unknown";
|
||||||
|
this.listState = "uninitialized";
|
||||||
|
}
|
||||||
|
|
||||||
|
getStatus(): DeviceListState {
|
||||||
|
return this.listState;
|
||||||
|
}
|
||||||
|
|
||||||
|
getPermissionState(): PermissionState {
|
||||||
|
return this.permissionState;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected setState(state: DeviceListState) {
|
||||||
|
if(this.listState === state)
|
||||||
|
return;
|
||||||
|
|
||||||
|
const oldState = this.listState;
|
||||||
|
this.listState = state;
|
||||||
|
this.events.fire("notify_state_changed", { oldState: oldState, newState: state });
|
||||||
|
}
|
||||||
|
|
||||||
|
protected setPermissionState(state: PermissionState) {
|
||||||
|
if(this.permissionState === state)
|
||||||
|
return;
|
||||||
|
|
||||||
|
const oldState = this.permissionState;
|
||||||
|
this.permissionState = state;
|
||||||
|
this.events.fire("notify_permissions_changed", { oldState: oldState, newState: state });
|
||||||
|
}
|
||||||
|
|
||||||
|
awaitInitialized(): Promise<void> {
|
||||||
|
if(this.listState !== "uninitialized")
|
||||||
|
return Promise.resolve();
|
||||||
|
|
||||||
|
return new Promise<void>(resolve => {
|
||||||
|
const callback = (event: DeviceListEvents["notify_state_changed"]) => {
|
||||||
|
if(event.newState === "uninitialized")
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.events.off("notify_state_changed", callback);
|
||||||
|
resolve();
|
||||||
|
};
|
||||||
|
this.events.on("notify_state_changed", callback);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
awaitHealthy(): Promise<void> {
|
||||||
|
if(this.listState === "healthy")
|
||||||
|
return Promise.resolve();
|
||||||
|
|
||||||
|
return new Promise<void>(resolve => {
|
||||||
|
const callback = (event: DeviceListEvents["notify_state_changed"]) => {
|
||||||
|
if(event.newState !== "healthy")
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.events.off("notify_state_changed", callback);
|
||||||
|
resolve();
|
||||||
|
};
|
||||||
|
this.events.on("notify_state_changed", callback);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract getDefaultDeviceId(): string;
|
||||||
|
abstract getDevices(): IDevice[];
|
||||||
|
abstract getEvents(): Registry<DeviceListEvents>;
|
||||||
|
abstract isRefreshAvailable(): boolean;
|
||||||
|
abstract refresh(): Promise<void>;
|
||||||
|
abstract requestPermissions(): Promise<PermissionState>;
|
||||||
|
}
|
||||||
|
|
||||||
|
let recorderBackend: AudioRecorderBacked;
|
||||||
|
|
||||||
|
export function getRecorderBackend() : AudioRecorderBacked {
|
||||||
|
if(typeof recorderBackend === "undefined")
|
||||||
|
throw tr("the recorder backend hasn't been set yet");
|
||||||
|
|
||||||
|
return recorderBackend;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setRecorderBackend(instance: AudioRecorderBacked) {
|
||||||
|
if(typeof recorderBackend !== "undefined")
|
||||||
|
throw tr("a recorder backend has already been initialized");
|
||||||
|
|
||||||
|
recorderBackend = instance;
|
||||||
|
}
|
|
@ -576,7 +576,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
||||||
let self = client instanceof LocalClientEntry;
|
let self = client instanceof LocalClientEntry;
|
||||||
|
|
||||||
let channel_to = tree.findChannel(parseInt(json["ctid"]));
|
let channel_to = tree.findChannel(parseInt(json["ctid"]));
|
||||||
let channel_from = tree.findChannel(parseInt(json["cfid"]));
|
let channelFrom = tree.findChannel(parseInt(json["cfid"]));
|
||||||
|
|
||||||
if(!client) {
|
if(!client) {
|
||||||
log.error(LogCategory.NETWORKING, tr("Unknown client move (Client)!"));
|
log.error(LogCategory.NETWORKING, tr("Unknown client move (Client)!"));
|
||||||
|
@ -589,17 +589,17 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!self) {
|
if(!self) {
|
||||||
if(!channel_from) {
|
if(!channelFrom) {
|
||||||
log.error(LogCategory.NETWORKING, tr("Unknown client move (Channel from)!"));
|
log.error(LogCategory.NETWORKING, tr("Unknown client move (Channel from)!"));
|
||||||
channel_from = client.currentChannel();
|
channelFrom = client.currentChannel();
|
||||||
} else if(channel_from != client.currentChannel()) {
|
} else if(channelFrom != client.currentChannel()) {
|
||||||
log.error(LogCategory.NETWORKING,
|
log.error(LogCategory.NETWORKING,
|
||||||
tr("Client move from invalid source channel! Local client registered in channel %d but server send %d."),
|
tr("Client move from invalid source channel! Local client registered in channel %d but server send %d."),
|
||||||
client.currentChannel().channelId, channel_from.channelId
|
client.currentChannel().channelId, channelFrom.channelId
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
channel_from = client.currentChannel();
|
channelFrom = client.currentChannel();
|
||||||
}
|
}
|
||||||
|
|
||||||
tree.moveClient(client, channel_to);
|
tree.moveClient(client, channel_to);
|
||||||
|
@ -607,7 +607,7 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
||||||
if(self) {
|
if(self) {
|
||||||
this.connection_handler.update_voice_status(channel_to);
|
this.connection_handler.update_voice_status(channel_to);
|
||||||
|
|
||||||
for(const entry of client.channelTree.clientsByChannel(channel_from)) {
|
for(const entry of client.channelTree.clientsByChannel(channelFrom)) {
|
||||||
if(entry !== client && entry.get_audio_handle()) {
|
if(entry !== client && entry.get_audio_handle()) {
|
||||||
entry.get_audio_handle().abort_replay();
|
entry.get_audio_handle().abort_replay();
|
||||||
entry.speaking = false;
|
entry.speaking = false;
|
||||||
|
@ -616,16 +616,18 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
||||||
|
|
||||||
const side_bar = this.connection_handler.side_bar;
|
const side_bar = this.connection_handler.side_bar;
|
||||||
side_bar.info_frame().update_channel_talk();
|
side_bar.info_frame().update_channel_talk();
|
||||||
|
} else {
|
||||||
|
client.speaking = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const own_channel = this.connection.client.getClient().currentChannel();
|
const own_channel = this.connection.client.getClient().currentChannel();
|
||||||
const event = self ? EventType.CLIENT_VIEW_MOVE_OWN : (channel_from == own_channel || channel_to == own_channel ? EventType.CLIENT_VIEW_MOVE_OWN_CHANNEL : EventType.CLIENT_VIEW_MOVE);
|
const event = self ? EventType.CLIENT_VIEW_MOVE_OWN : (channelFrom == own_channel || channel_to == own_channel ? EventType.CLIENT_VIEW_MOVE_OWN_CHANNEL : EventType.CLIENT_VIEW_MOVE);
|
||||||
this.connection_handler.log.log(event, {
|
this.connection_handler.log.log(event, {
|
||||||
channel_from: channel_from ? {
|
channel_from: channelFrom ? {
|
||||||
channel_id: channel_from.channelId,
|
channel_id: channelFrom.channelId,
|
||||||
channel_name: channel_from.channelName()
|
channel_name: channelFrom.channelName()
|
||||||
} : undefined,
|
} : undefined,
|
||||||
channel_from_own: channel_from == own_channel,
|
channel_from_own: channelFrom == own_channel,
|
||||||
|
|
||||||
channel_to: channel_to ? {
|
channel_to: channel_to ? {
|
||||||
channel_id: channel_to.channelId,
|
channel_id: channel_to.channelId,
|
||||||
|
@ -650,20 +652,20 @@ export class ConnectionCommandHandler extends AbstractCommandHandler {
|
||||||
this.connection_handler.sound.play(Sound.USER_MOVED_SELF);
|
this.connection_handler.sound.play(Sound.USER_MOVED_SELF);
|
||||||
else if(own_channel == channel_to)
|
else if(own_channel == channel_to)
|
||||||
this.connection_handler.sound.play(Sound.USER_ENTERED_MOVED);
|
this.connection_handler.sound.play(Sound.USER_ENTERED_MOVED);
|
||||||
else if(own_channel == channel_from)
|
else if(own_channel == channelFrom)
|
||||||
this.connection_handler.sound.play(Sound.USER_LEFT_MOVED);
|
this.connection_handler.sound.play(Sound.USER_LEFT_MOVED);
|
||||||
} else if(json["reasonid"] == ViewReasonId.VREASON_USER_ACTION) {
|
} else if(json["reasonid"] == ViewReasonId.VREASON_USER_ACTION) {
|
||||||
if(self) {} //If we do an action we wait for the error response
|
if(self) {} //If we do an action we wait for the error response
|
||||||
else if(own_channel == channel_to)
|
else if(own_channel == channel_to)
|
||||||
this.connection_handler.sound.play(Sound.USER_ENTERED);
|
this.connection_handler.sound.play(Sound.USER_ENTERED);
|
||||||
else if(own_channel == channel_from)
|
else if(own_channel == channelFrom)
|
||||||
this.connection_handler.sound.play(Sound.USER_LEFT);
|
this.connection_handler.sound.play(Sound.USER_LEFT);
|
||||||
} else if(json["reasonid"] == ViewReasonId.VREASON_CHANNEL_KICK) {
|
} else if(json["reasonid"] == ViewReasonId.VREASON_CHANNEL_KICK) {
|
||||||
if(self) {
|
if(self) {
|
||||||
this.connection_handler.sound.play(Sound.CHANNEL_KICKED);
|
this.connection_handler.sound.play(Sound.CHANNEL_KICKED);
|
||||||
} else if(own_channel == channel_to)
|
} else if(own_channel == channel_to)
|
||||||
this.connection_handler.sound.play(Sound.USER_ENTERED_KICKED);
|
this.connection_handler.sound.play(Sound.USER_ENTERED_KICKED);
|
||||||
else if(own_channel == channel_from)
|
else if(own_channel == channelFrom)
|
||||||
this.connection_handler.sound.play(Sound.USER_LEFT_KICKED_CHANNEL);
|
this.connection_handler.sound.play(Sound.USER_LEFT_KICKED_CHANNEL);
|
||||||
} else {
|
} else {
|
||||||
console.warn(tr("Unknown reason id %o"), json["reasonid"]);
|
console.warn(tr("Unknown reason id %o"), json["reasonid"]);
|
||||||
|
|
|
@ -100,8 +100,8 @@ export class HandshakeHandler {
|
||||||
client_server_password: this.parameters.password ? this.parameters.password.password : undefined,
|
client_server_password: this.parameters.password ? this.parameters.password.password : undefined,
|
||||||
client_browser_engine: navigator.product,
|
client_browser_engine: navigator.product,
|
||||||
|
|
||||||
client_input_hardware: this.connection.client.hasInputHardware(),
|
client_input_hardware: this.connection.client.isMicrophoneDisabled(),
|
||||||
client_output_hardware: false,
|
client_output_hardware: this.connection.client.hasOutputHardware(),
|
||||||
client_input_muted: this.connection.client.isMicrophoneMuted(),
|
client_input_muted: this.connection.client.isMicrophoneMuted(),
|
||||||
client_output_muted: this.connection.client.isSpeakerMuted(),
|
client_output_muted: this.connection.client.isSpeakerMuted(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -21,7 +21,6 @@ import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
||||||
import {formatMessage} from "tc-shared/ui/frames/chat";
|
import {formatMessage} from "tc-shared/ui/frames/chat";
|
||||||
import {openModalNewcomer} from "tc-shared/ui/modal/ModalNewcomer";
|
import {openModalNewcomer} from "tc-shared/ui/modal/ModalNewcomer";
|
||||||
import * as aplayer from "tc-backend/audio/player";
|
import * as aplayer from "tc-backend/audio/player";
|
||||||
import * as arecorder from "tc-backend/audio/recorder";
|
|
||||||
import * as ppt from "tc-backend/ppt";
|
import * as ppt from "tc-backend/ppt";
|
||||||
import * as keycontrol from "./KeyControl";
|
import * as keycontrol from "./KeyControl";
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
|
@ -182,8 +181,6 @@ async function initialize_app() {
|
||||||
aplayer.on_ready(() => aplayer.set_master_volume(settings.global(Settings.KEY_SOUND_MASTER) / 100));
|
aplayer.on_ready(() => aplayer.set_master_volume(settings.global(Settings.KEY_SOUND_MASTER) / 100));
|
||||||
else
|
else
|
||||||
log.warn(LogCategory.GENERAL, tr("Client does not support aplayer.set_master_volume()... May client is too old?"));
|
log.warn(LogCategory.GENERAL, tr("Client does not support aplayer.set_master_volume()... May client is too old?"));
|
||||||
if(arecorder.device_refresh_available())
|
|
||||||
arecorder.refresh_devices();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
set_default_recorder(new RecorderProfile("default"));
|
set_default_recorder(new RecorderProfile("default"));
|
||||||
|
@ -339,27 +336,10 @@ function main() {
|
||||||
top_menu.initialize();
|
top_menu.initialize();
|
||||||
|
|
||||||
const initial_handler = server_connections.spawn_server_connection();
|
const initial_handler = server_connections.spawn_server_connection();
|
||||||
initial_handler.acquire_recorder(default_recorder, false);
|
initial_handler.acquireInputHardware().then(() => {});
|
||||||
cmanager.server_connections.set_active_connection(initial_handler);
|
cmanager.server_connections.set_active_connection(initial_handler);
|
||||||
/** Setup the XF forum identity **/
|
/** Setup the XF forum identity **/
|
||||||
fidentity.update_forum();
|
fidentity.update_forum();
|
||||||
|
|
||||||
let _resize_timeout;
|
|
||||||
$(window).on('resize', event => {
|
|
||||||
if(event.target !== window)
|
|
||||||
return;
|
|
||||||
|
|
||||||
if(_resize_timeout)
|
|
||||||
clearTimeout(_resize_timeout);
|
|
||||||
_resize_timeout = setTimeout(() => {
|
|
||||||
for(const connection of server_connections.all_connections())
|
|
||||||
connection.invoke_resized_on_activate = true;
|
|
||||||
const active_connection = server_connections.active_connection();
|
|
||||||
if(active_connection)
|
|
||||||
active_connection.resize_elements();
|
|
||||||
$(".window-resize-listener").trigger('resize');
|
|
||||||
}, 1000);
|
|
||||||
});
|
|
||||||
keycontrol.initialize();
|
keycontrol.initialize();
|
||||||
|
|
||||||
stats.initialize({
|
stats.initialize({
|
||||||
|
@ -512,7 +492,7 @@ const task_teaweb_starter: loader.Task = {
|
||||||
if(!aplayer.initializeFromGesture) {
|
if(!aplayer.initializeFromGesture) {
|
||||||
console.error(tr("Missing aplayer.initializeFromGesture"));
|
console.error(tr("Missing aplayer.initializeFromGesture"));
|
||||||
} else
|
} else
|
||||||
$(document).one('click', event => aplayer.initializeFromGesture());
|
$(document).one('click', () => aplayer.initializeFromGesture());
|
||||||
}
|
}
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
console.error(ex.stack);
|
console.error(ex.stack);
|
||||||
|
|
|
@ -9,7 +9,6 @@ import {
|
||||||
IdentitifyType,
|
IdentitifyType,
|
||||||
Identity
|
Identity
|
||||||
} from "tc-shared/profiles/Identity";
|
} from "tc-shared/profiles/Identity";
|
||||||
import {settings} from "tc-shared/settings";
|
|
||||||
import {arrayBufferBase64, base64_encode_ab, str2ab8} from "tc-shared/utils/buffers";
|
import {arrayBufferBase64, base64_encode_ab, str2ab8} from "tc-shared/utils/buffers";
|
||||||
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
|
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
|
||||||
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
|
||||||
|
@ -259,7 +258,7 @@ export class TeaSpeakHandshakeHandler extends AbstractHandshakeIdentityHandler {
|
||||||
error = error.extra_message || error.message;
|
error = error.extra_message || error.message;
|
||||||
this.trigger_fail("failed to execute proof (" + error + ")");
|
this.trigger_fail("failed to execute proof (" + error + ")");
|
||||||
}).then(() => this.trigger_success());
|
}).then(() => this.trigger_success());
|
||||||
}).catch(error => {
|
}).catch(() => {
|
||||||
this.trigger_fail("failed to sign message");
|
this.trigger_fail("failed to sign message");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -702,10 +701,10 @@ export class TeaSpeakIdentity implements Identity {
|
||||||
|
|
||||||
const exit = () => {
|
const exit = () => {
|
||||||
const timeout = setTimeout(() => resolve(true), 1000);
|
const timeout = setTimeout(() => resolve(true), 1000);
|
||||||
Promise.all(worker_promise).then(result => {
|
Promise.all(worker_promise).then(() => {
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
resolve(true);
|
resolve(true);
|
||||||
}).catch(error => resolve(true));
|
}).catch(() => resolve(true));
|
||||||
active = false;
|
active = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ export const ModalFunctions = {
|
||||||
case "string":
|
case "string":
|
||||||
if(type == ElementType.HEADER)
|
if(type == ElementType.HEADER)
|
||||||
return $.spawn("div").addClass("modal-title").text(val);
|
return $.spawn("div").addClass("modal-title").text(val);
|
||||||
return $("<div>" + val + "</div>");
|
return $("<div>" + val.replace(/\n/g, "<br />") + "</div>");
|
||||||
case "object": return val as JQuery;
|
case "object": return val as JQuery;
|
||||||
case "undefined":
|
case "undefined":
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -113,9 +113,6 @@ export class ConnectionManager {
|
||||||
this._container_channel_tree.append(handler.channelTree.tag_tree());
|
this._container_channel_tree.append(handler.channelTree.tag_tree());
|
||||||
this._container_chat.append(handler.side_bar.html_tag());
|
this._container_chat.append(handler.side_bar.html_tag());
|
||||||
this._container_log_server.append(handler.log.getHTMLTag());
|
this._container_log_server.append(handler.log.getHTMLTag());
|
||||||
|
|
||||||
if(handler.invoke_resized_on_activate)
|
|
||||||
handler.resize_elements();
|
|
||||||
}
|
}
|
||||||
const old_handler = this.active_handler;
|
const old_handler = this.active_handler;
|
||||||
this.active_handler = handler;
|
this.active_handler = handler;
|
||||||
|
|
|
@ -3,7 +3,12 @@ import {Button} from "./button";
|
||||||
import {DropdownEntry} from "tc-shared/ui/frames/control-bar/dropdown";
|
import {DropdownEntry} from "tc-shared/ui/frames/control-bar/dropdown";
|
||||||
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
||||||
import {ReactComponentBase} from "tc-shared/ui/react-elements/ReactComponentBase";
|
import {ReactComponentBase} from "tc-shared/ui/react-elements/ReactComponentBase";
|
||||||
import {ConnectionEvents, ConnectionHandler, ConnectionStateUpdateType} from "tc-shared/ConnectionHandler";
|
import {
|
||||||
|
ConnectionEvents,
|
||||||
|
ConnectionHandler,
|
||||||
|
ConnectionState as CConnectionState,
|
||||||
|
ConnectionStateUpdateType
|
||||||
|
} from "tc-shared/ConnectionHandler";
|
||||||
import {Event, EventHandler, ReactEventHandler, Registry} from "tc-shared/events";
|
import {Event, EventHandler, ReactEventHandler, Registry} from "tc-shared/events";
|
||||||
import {ConnectionManagerEvents, server_connections} from "tc-shared/ui/frames/connection_handlers";
|
import {ConnectionManagerEvents, server_connections} from "tc-shared/ui/frames/connection_handlers";
|
||||||
import {Settings, settings} from "tc-shared/settings";
|
import {Settings, settings} from "tc-shared/settings";
|
||||||
|
@ -21,6 +26,7 @@ import {createInputModal} from "tc-shared/ui/elements/Modal";
|
||||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||||
import {global_client_actions} from "tc-shared/events/GlobalEvents";
|
import {global_client_actions} from "tc-shared/events/GlobalEvents";
|
||||||
import {icon_cache_loader} from "tc-shared/file/Icons";
|
import {icon_cache_loader} from "tc-shared/file/Icons";
|
||||||
|
import {InputState} from "tc-shared/voice/RecorderBase";
|
||||||
|
|
||||||
const cssStyle = require("./index.scss");
|
const cssStyle = require("./index.scss");
|
||||||
const cssButtonStyle = require("./button.scss");
|
const cssButtonStyle = require("./button.scss");
|
||||||
|
@ -704,8 +710,7 @@ function initialize(event_registry: Registry<InternalControlBarEvents>) {
|
||||||
|
|
||||||
if(current_connection_handler) {
|
if(current_connection_handler) {
|
||||||
current_connection_handler.setMicrophoneMuted(!state);
|
current_connection_handler.setMicrophoneMuted(!state);
|
||||||
if(!current_connection_handler.getVoiceRecorder())
|
current_connection_handler.acquireInputHardware().then(() => {});
|
||||||
current_connection_handler.acquire_recorder(default_recorder, true); /* acquire_recorder already updates the voice status */
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,10 @@ import { modal as emodal } from "tc-shared/events";
|
||||||
import {modal_settings} from "tc-shared/ui/modal/ModalSettings";
|
import {modal_settings} from "tc-shared/ui/modal/ModalSettings";
|
||||||
import {profiles} from "tc-shared/profiles/ConnectionProfile";
|
import {profiles} from "tc-shared/profiles/ConnectionProfile";
|
||||||
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
||||||
|
import {initialize_audio_microphone_controller, MicrophoneSettingsEvents} from "tc-shared/ui/modal/settings/Microphone";
|
||||||
|
import {MicrophoneSettings} from "tc-shared/ui/modal/settings/MicrophoneRenderer";
|
||||||
|
import * as React from "react";
|
||||||
|
import * as ReactDOM from "react-dom";
|
||||||
|
|
||||||
const next_step: {[key: string]:string} = {
|
const next_step: {[key: string]:string} = {
|
||||||
"welcome": "microphone",
|
"welcome": "microphone",
|
||||||
|
@ -69,7 +73,7 @@ function initializeBasicFunctionality(tag: JQuery, event_registry: Registry<emod
|
||||||
tag_body.find(".step").addClass("hidden");
|
tag_body.find(".step").addClass("hidden");
|
||||||
container_header.find(".step").addClass("hidden");
|
container_header.find(".step").addClass("hidden");
|
||||||
|
|
||||||
tag_body.find(".step.step-" + event.step).removeClass("hidden");
|
tag_body.find(".step.step-" + event.step).removeClass("hidden");
|
||||||
container_header.find(".step.step-" + event.step).removeClass("hidden");
|
container_header.find(".step.step-" + event.step).removeClass("hidden");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -79,7 +83,7 @@ function initializeBasicFunctionality(tag: JQuery, event_registry: Registry<emod
|
||||||
const button_last_step = buttons.find(".button-last-step");
|
const button_last_step = buttons.find(".button-last-step");
|
||||||
const button_next_step = buttons.find(".button-next-step");
|
const button_next_step = buttons.find(".button-next-step");
|
||||||
|
|
||||||
button_last_step.on('click', event => {
|
button_last_step.on('click', () => {
|
||||||
if(last_step[current_step])
|
if(last_step[current_step])
|
||||||
event_registry.fire("show_step", { step: last_step[current_step] as any });
|
event_registry.fire("show_step", { step: last_step[current_step] as any });
|
||||||
else
|
else
|
||||||
|
@ -100,8 +104,8 @@ function initializeBasicFunctionality(tag: JQuery, event_registry: Registry<emod
|
||||||
button_last_step.text(last_step[current_step] ? tr("Last step") : tr("Skip guide"));
|
button_last_step.text(last_step[current_step] ? tr("Last step") : tr("Skip guide"));
|
||||||
});
|
});
|
||||||
|
|
||||||
event_registry.on("show_step", event => button_next_step.prop("disabled", true));
|
event_registry.on("show_step", () => button_next_step.prop("disabled", true));
|
||||||
event_registry.on("show_step", event => button_last_step.prop("disabled", true));
|
event_registry.on("show_step", () => button_last_step.prop("disabled", true));
|
||||||
|
|
||||||
event_registry.on("step-status", event => button_next_step.prop("disabled", !event.next_button));
|
event_registry.on("step-status", event => button_next_step.prop("disabled", !event.next_button));
|
||||||
event_registry.on("step-status", event => button_last_step.prop("disabled", !event.previous_button));
|
event_registry.on("step-status", event => button_last_step.prop("disabled", !event.previous_button));
|
||||||
|
@ -292,145 +296,28 @@ function initializeStepIdentity(tag: JQuery, event_registry: Registry<emodal.new
|
||||||
}
|
}
|
||||||
|
|
||||||
function initializeStepMicrophone(tag: JQuery, event_registry: Registry<emodal.newcomer>, modal: Modal) {
|
function initializeStepMicrophone(tag: JQuery, event_registry: Registry<emodal.newcomer>, modal: Modal) {
|
||||||
const microphone_events = new Registry<emodal.settings.microphone>();
|
let helpStep = 0;
|
||||||
//microphone_events.enable_debug("settings-microphone");
|
|
||||||
//modal_settings.initialize_audio_microphone_controller(microphone_events);
|
|
||||||
//modal_settings.initialize_audio_microphone_view(tag, microphone_events);
|
|
||||||
modal.close_listener.push(() => microphone_events.fire_async("deinitialize"));
|
|
||||||
|
|
||||||
let help_animation_done = false;
|
const settingEvents = new Registry<MicrophoneSettingsEvents>();
|
||||||
const update_step_status = () => event_registry.fire_async("step-status", { next_button: help_animation_done, previous_button: help_animation_done });
|
settingEvents.on("query_help", () => settingEvents.fire_async("notify_highlight", { field: helpStep <= 2 ? ("hs-" + helpStep) as any : undefined }));
|
||||||
event_registry.on("show_step", e => {
|
settingEvents.on("action_help_click", () => {
|
||||||
if(e.step !== "microphone") return;
|
helpStep++;
|
||||||
|
settingEvents.fire("query_help");
|
||||||
|
|
||||||
update_step_status();
|
event_registry.fire_async("step-status", { next_button: helpStep > 2, previous_button: helpStep > 2 })
|
||||||
});
|
});
|
||||||
|
|
||||||
/* the help sequence */
|
initialize_audio_microphone_controller(settingEvents);
|
||||||
{
|
ReactDOM.render(<MicrophoneSettings events={settingEvents} />, tag[0]);
|
||||||
const container = tag.find(".container-settings-audio-microphone");
|
|
||||||
const container_help_text = tag.find(".container-help-text");
|
|
||||||
|
|
||||||
const container_profile_list = tag.find(".highlight-microphone-list");
|
modal.close_listener.push(() => {
|
||||||
const container_profile_settings = tag.find(".highlight-microphone-settings");
|
settingEvents.fire("notify_destroy");
|
||||||
|
ReactDOM.unmountComponentAtNode(tag[0]);
|
||||||
|
});
|
||||||
|
|
||||||
let is_first_show = true;
|
|
||||||
event_registry.on("show_step", event => {
|
|
||||||
if(!is_first_show || event.step !== "microphone") return;
|
|
||||||
is_first_show = false;
|
|
||||||
|
|
||||||
container.addClass("help-shown");
|
event_registry.on("show_step", event => {
|
||||||
const text = tr( /* @tr-ignore */
|
if(event.step !== "microphone") return;
|
||||||
"Firstly we need to setup a microphone.\n" +
|
event_registry.fire_async("step-status", { next_button: helpStep > 2, previous_button: helpStep > 2 });
|
||||||
"Let me guide you thru the basic UI elements.\n" +
|
});
|
||||||
"\n" +
|
|
||||||
"To continue click anywhere on the screen."
|
|
||||||
);
|
|
||||||
set_help_text(text);
|
|
||||||
$("body").one('mousedown', event => show_microphone_list_help());
|
|
||||||
});
|
|
||||||
|
|
||||||
const set_help_text = text => {
|
|
||||||
container_help_text.empty();
|
|
||||||
text.split("\n").forEach(e => container_help_text.append(e == "" ? $.spawn("br") : $.spawn("a").text(e)));
|
|
||||||
};
|
|
||||||
|
|
||||||
const show_microphone_list_help = () => {
|
|
||||||
container.find(".highlighted").removeClass("highlighted");
|
|
||||||
container_profile_list.addClass("highlighted");
|
|
||||||
|
|
||||||
const update_position = () => {
|
|
||||||
const font_size = parseFloat(getComputedStyle(container_help_text[0]).fontSize);
|
|
||||||
|
|
||||||
const offset = container_profile_list.offset();
|
|
||||||
const abs = container.offset();
|
|
||||||
|
|
||||||
container_help_text.css({
|
|
||||||
top: offset.top - abs.top,
|
|
||||||
left: ((offset.left - abs.left) + container_profile_list.outerWidth() + font_size) + "px",
|
|
||||||
right: "1em",
|
|
||||||
bottom: "1em"
|
|
||||||
});
|
|
||||||
};
|
|
||||||
update_position();
|
|
||||||
container_help_text.off('resize').on('resize', update_position);
|
|
||||||
|
|
||||||
const text = tr( /* @tr-ignore */
|
|
||||||
"All your available microphones are listed within this box.\n" +
|
|
||||||
"\n" +
|
|
||||||
"The currently selected microphone\n" +
|
|
||||||
"is marked with a green checkmark. To change the selected microphone\n" +
|
|
||||||
"just click on the new one.\n" +
|
|
||||||
"\n" +
|
|
||||||
"To continue click anywhere on the screen."
|
|
||||||
);
|
|
||||||
set_help_text(text);
|
|
||||||
$("body").one('mousedown', event => show_microphone_settings_help());
|
|
||||||
};
|
|
||||||
|
|
||||||
const show_microphone_settings_help = () => {
|
|
||||||
container.find(".highlighted").removeClass("highlighted");
|
|
||||||
container_profile_settings.addClass("highlighted");
|
|
||||||
|
|
||||||
const update_position = () => {
|
|
||||||
const font_size = parseFloat(getComputedStyle(container_help_text[0]).fontSize);
|
|
||||||
const container_settings_offset = container_profile_settings.offset();
|
|
||||||
const right = container_profile_settings.outerWidth() + font_size * 2;
|
|
||||||
container_help_text.css({
|
|
||||||
top: container_settings_offset.top - container.offset().top,
|
|
||||||
left: "1em",
|
|
||||||
right: right + "px",
|
|
||||||
bottom: "1em"
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
container_help_text.empty();
|
|
||||||
container_help_text.append($.spawn("div").addClass("help-microphone-settings").append(
|
|
||||||
$.spawn("a").text(tr("On the right side you'll find all microphone settings.")),
|
|
||||||
$.spawn("br"),
|
|
||||||
$.spawn("a").text("TeaSpeak has three voice activity detection types:"),
|
|
||||||
$.spawn("ol").append(
|
|
||||||
$.spawn("li").addClass("vad-type").append(
|
|
||||||
$.spawn("a").addClass("title").text(tr("Push to Talk")),
|
|
||||||
$.spawn("a").addClass("description").html(tr( /* @tr-ignore */
|
|
||||||
"To transmit audio data you'll have to<br>" +
|
|
||||||
"press a key. The key could be selected " +
|
|
||||||
"via the button right to the radio button."
|
|
||||||
))
|
|
||||||
),
|
|
||||||
$.spawn("li").addClass("vad-type").append(
|
|
||||||
$.spawn("a").addClass("title").text(tr("Voice activity detection")),
|
|
||||||
$.spawn("a").addClass("description").html(tr( /* @tr-ignore */
|
|
||||||
"In this mode, TeaSpeak will continuously analyze your microphone input. " +
|
|
||||||
"If the audio level is grater than a certain threshold, " +
|
|
||||||
"the audio will be transmitted. " +
|
|
||||||
"The threshold is changeable via the \"Sensitivity Settings\" slider."
|
|
||||||
))
|
|
||||||
),
|
|
||||||
$.spawn("li").addClass("vad-type").append(
|
|
||||||
$.spawn("a").addClass("title").html(tr("Always active")),
|
|
||||||
$.spawn("a").addClass("description").text(tr( /* @tr-ignore */
|
|
||||||
"Continuously transmit any audio data.\n"
|
|
||||||
))
|
|
||||||
)
|
|
||||||
),
|
|
||||||
$.spawn("br"),
|
|
||||||
$.spawn("a").text(tr("Now you're ready to configure your microphone. Just click anywhere on the screen."))
|
|
||||||
));
|
|
||||||
update_position();
|
|
||||||
container_help_text.off('resize').on('resize', update_position);
|
|
||||||
|
|
||||||
$("body").one('mousedown', event => hide_help());
|
|
||||||
};
|
|
||||||
|
|
||||||
const hide_help = () => {
|
|
||||||
container.find(".highlighted").removeClass("highlighted");
|
|
||||||
container.addClass("hide-help");
|
|
||||||
setTimeout(() => container.removeClass("help-shown"), 1000);
|
|
||||||
container_help_text.off('resize');
|
|
||||||
|
|
||||||
help_animation_done = true;
|
|
||||||
update_step_status();
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -12,8 +12,6 @@ import {LogCategory} from "tc-shared/log";
|
||||||
import * as profiles from "tc-shared/profiles/ConnectionProfile";
|
import * as profiles from "tc-shared/profiles/ConnectionProfile";
|
||||||
import {RepositoryTranslation, TranslationRepository} from "tc-shared/i18n/localize";
|
import {RepositoryTranslation, TranslationRepository} from "tc-shared/i18n/localize";
|
||||||
import {Registry} from "tc-shared/events";
|
import {Registry} from "tc-shared/events";
|
||||||
import {key_description} from "tc-shared/PPTListener";
|
|
||||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
|
||||||
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
import {spawnYesNo} from "tc-shared/ui/modal/ModalYesNo";
|
||||||
import * as i18n from "tc-shared/i18n/localize";
|
import * as i18n from "tc-shared/i18n/localize";
|
||||||
import * as i18nc from "tc-shared/i18n/country";
|
import * as i18nc from "tc-shared/i18n/country";
|
||||||
|
@ -22,12 +20,9 @@ import * as events from "tc-shared/events";
|
||||||
import * as sound from "tc-shared/sound/Sounds";
|
import * as sound from "tc-shared/sound/Sounds";
|
||||||
import * as forum from "tc-shared/profiles/identities/teaspeak-forum";
|
import * as forum from "tc-shared/profiles/identities/teaspeak-forum";
|
||||||
import {formatMessage, set_icon_size} from "tc-shared/ui/frames/chat";
|
import {formatMessage, set_icon_size} from "tc-shared/ui/frames/chat";
|
||||||
import {spawnKeySelect} from "tc-shared/ui/modal/ModalKeySelect";
|
|
||||||
import {spawnTeamSpeakIdentityImport, spawnTeamSpeakIdentityImprove} from "tc-shared/ui/modal/ModalIdentity";
|
import {spawnTeamSpeakIdentityImport, spawnTeamSpeakIdentityImprove} from "tc-shared/ui/modal/ModalIdentity";
|
||||||
import {Device} from "tc-shared/audio/player";
|
import {Device} from "tc-shared/audio/player";
|
||||||
import {LevelMeter} from "tc-shared/voice/RecorderBase";
|
|
||||||
import * as aplayer from "tc-backend/audio/player";
|
import * as aplayer from "tc-backend/audio/player";
|
||||||
import * as arecorder from "tc-backend/audio/recorder";
|
|
||||||
import {KeyMapSettings} from "tc-shared/ui/modal/settings/Keymap";
|
import {KeyMapSettings} from "tc-shared/ui/modal/settings/Keymap";
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import * as ReactDOM from "react-dom";
|
import * as ReactDOM from "react-dom";
|
||||||
|
|
115
shared/js/ui/modal/settings/Heighlight.scss
Normal file
115
shared/js/ui/modal/settings/Heighlight.scss
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
@import "../../../../css/static/mixin.scss";
|
||||||
|
@import "../../../../css/static/properties.scss";
|
||||||
|
|
||||||
|
.container {
|
||||||
|
$highlight-time: .5s;
|
||||||
|
$backdrop-color: rgba(0, 0, 0, .9);
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
position: relative;
|
||||||
|
|
||||||
|
padding: .5em;
|
||||||
|
|
||||||
|
background-color: inherit;
|
||||||
|
|
||||||
|
.background {
|
||||||
|
position: absolute;
|
||||||
|
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
|
||||||
|
display: none;
|
||||||
|
background-color: $backdrop-color;
|
||||||
|
border-radius: .15em;
|
||||||
|
|
||||||
|
padding: .5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
.highlightable {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
.helpText {
|
||||||
|
opacity: 0;
|
||||||
|
z-index: 20;
|
||||||
|
|
||||||
|
pointer-events: none;
|
||||||
|
|
||||||
|
display: block;
|
||||||
|
|
||||||
|
overflow: auto;
|
||||||
|
@include chat-scrollbar();
|
||||||
|
@include transition($highlight-time ease-in-out);
|
||||||
|
|
||||||
|
a {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol {
|
||||||
|
margin-top: .5em;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
margin-bottom: .5em;
|
||||||
|
|
||||||
|
.title {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&.shown {
|
||||||
|
opacity: 1;
|
||||||
|
pointer-events: initial;
|
||||||
|
|
||||||
|
@include transition($highlight-time ease-in-out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&.shown {
|
||||||
|
.background {
|
||||||
|
display: flex;
|
||||||
|
z-index: 1;
|
||||||
|
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlightable {
|
||||||
|
border-radius: .1em;
|
||||||
|
position: relative;
|
||||||
|
z-index: 10;
|
||||||
|
|
||||||
|
background-color: inherit;
|
||||||
|
|
||||||
|
@include transition($highlight-time ease-in-out);
|
||||||
|
|
||||||
|
&::after {
|
||||||
|
content: ' ';
|
||||||
|
|
||||||
|
z-index: 5;
|
||||||
|
position: absolute;
|
||||||
|
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
|
||||||
|
background-color: $backdrop-color;
|
||||||
|
|
||||||
|
@include transition($highlight-time ease-in-out);
|
||||||
|
}
|
||||||
|
|
||||||
|
&.highlighted {
|
||||||
|
padding: .5em;
|
||||||
|
|
||||||
|
&::after {
|
||||||
|
background-color: #00000000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
38
shared/js/ui/modal/settings/Heighlight.tsx
Normal file
38
shared/js/ui/modal/settings/Heighlight.tsx
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import {useContext} from "react";
|
||||||
|
|
||||||
|
const cssStyle = require("./Heighlight.scss");
|
||||||
|
|
||||||
|
const HighlightContext = React.createContext<string>(undefined);
|
||||||
|
export const HighlightContainer = (props: { children: React.ReactNode | React.ReactNode[], classList?: string, highlightedId?: string, onClick?: () => void }) => {
|
||||||
|
return (
|
||||||
|
<HighlightContext.Provider value={props.highlightedId}>
|
||||||
|
<div className={cssStyle.container + " " + (props.highlightedId ? cssStyle.shown : "") + " " + props.classList} onClick={props.highlightedId ? props.onClick : undefined}>
|
||||||
|
{props.children}
|
||||||
|
<div className={cssStyle.background} />
|
||||||
|
</div>
|
||||||
|
</HighlightContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const HighlightRegion = (props: React.HTMLProps<HTMLDivElement> & { highlightId: string } ) => {
|
||||||
|
const wProps = Object.assign({}, props);
|
||||||
|
delete wProps["highlightId"];
|
||||||
|
|
||||||
|
const highlightedId = useContext(HighlightContext);
|
||||||
|
const highlighted = highlightedId === props.highlightId;
|
||||||
|
|
||||||
|
wProps.className = (props.className || "") + " " + cssStyle.highlightable + " " + (highlighted ? cssStyle.highlighted : "");
|
||||||
|
return React.createElement("div", wProps);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const HighlightText = (props: { highlightId: string, className?: string, children?: React.ReactNode | React.ReactNode[] } ) => {
|
||||||
|
const highlightedId = useContext(HighlightContext);
|
||||||
|
const highlighted = highlightedId === props.highlightId;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cssStyle.helpText + " " + (highlighted ? cssStyle.shown : "") + " " + props.className}>
|
||||||
|
{props.children}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
};
|
|
@ -1,6 +1,11 @@
|
||||||
@import "../../../../css/static/properties";
|
@import "../../../../css/static/properties";
|
||||||
@import "../../../../css/static/mixin";
|
@import "../../../../css/static/mixin";
|
||||||
|
|
||||||
|
.highlightContainer {
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
.container {
|
.container {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
@ -11,6 +16,7 @@
|
||||||
min-width: 43em;
|
min-width: 43em;
|
||||||
min-height: 41em;
|
min-height: 41em;
|
||||||
|
|
||||||
|
background-color: inherit;
|
||||||
position: relative;
|
position: relative;
|
||||||
|
|
||||||
.left, .right {
|
.left, .right {
|
||||||
|
@ -241,7 +247,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.header {
|
.header {
|
||||||
height: 2.6em;
|
height: 3em;
|
||||||
|
|
||||||
flex-grow: 0;
|
flex-grow: 0;
|
||||||
flex-shrink: 0;
|
flex-shrink: 0;
|
||||||
|
@ -266,7 +272,7 @@
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.btn {
|
button {
|
||||||
flex-shrink: 0;
|
flex-shrink: 0;
|
||||||
flex-grow: 0;
|
flex-grow: 0;
|
||||||
|
|
||||||
|
@ -452,10 +458,24 @@
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
button {
|
||||||
|
width: 10em;
|
||||||
|
align-self: center;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
&.hidden {
|
&.hidden {
|
||||||
pointer-events: none;
|
pointer-events: none;
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
:global(.icon_em) {
|
||||||
|
align-self: center;
|
||||||
|
font-size: 10em;
|
||||||
|
|
||||||
|
margin-bottom: .25em;
|
||||||
|
margin-top: -.25em;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -574,6 +594,20 @@
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#70407e', endColorstr='#45407e',GradientType=1 ); /* IE6-9 */
|
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#70407e', endColorstr='#45407e',GradientType=1 ); /* IE6-9 */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* The help overlays */
|
||||||
|
.help {
|
||||||
|
position: absolute;
|
||||||
|
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
|
||||||
|
&.paddingTop {
|
||||||
|
padding-top: 3.6em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@-moz-keyframes spin { 100% { -moz-transform: rotate(360deg); } }
|
@-moz-keyframes spin { 100% { -moz-transform: rotate(360deg); } }
|
||||||
@-webkit-keyframes spin { 100% { -webkit-transform: rotate(360deg); } }
|
@-webkit-keyframes spin { 100% { -webkit-transform: rotate(360deg); } }
|
||||||
@keyframes spin { 100% { -webkit-transform: rotate(360deg); transform:rotate(360deg); } }
|
@keyframes spin { 100% { -webkit-transform: rotate(360deg); transform:rotate(360deg); } }
|
|
@ -2,16 +2,10 @@ import * as aplayer from "tc-backend/audio/player";
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import {Registry} from "tc-shared/events";
|
import {Registry} from "tc-shared/events";
|
||||||
import {LevelMeter} from "tc-shared/voice/RecorderBase";
|
import {LevelMeter} from "tc-shared/voice/RecorderBase";
|
||||||
import * as arecorder from "tc-backend/audio/recorder";
|
|
||||||
import * as log from "tc-shared/log";
|
import * as log from "tc-shared/log";
|
||||||
import {LogCategory, logWarn} from "tc-shared/log";
|
import {LogCategory, logWarn} from "tc-shared/log";
|
||||||
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
import {default_recorder} from "tc-shared/voice/RecorderProfile";
|
||||||
import * as loader from "tc-loader";
|
import {DeviceListState, getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
|
||||||
import {Stage} from "tc-loader";
|
|
||||||
import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
|
|
||||||
import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller";
|
|
||||||
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
|
||||||
import {MicrophoneSettings} from "tc-shared/ui/modal/settings/MicrophoneRenderer";
|
|
||||||
|
|
||||||
export type MicrophoneSetting = "volume" | "vad-type" | "ppt-key" | "ppt-release-delay" | "ppt-release-delay-active" | "threshold-threshold";
|
export type MicrophoneSetting = "volume" | "vad-type" | "ppt-key" | "ppt-release-delay" | "ppt-release-delay-active" | "threshold-threshold";
|
||||||
|
|
||||||
|
@ -24,11 +18,13 @@ export type MicrophoneDevice = {
|
||||||
|
|
||||||
export interface MicrophoneSettingsEvents {
|
export interface MicrophoneSettingsEvents {
|
||||||
"query_devices": { refresh_list: boolean },
|
"query_devices": { refresh_list: boolean },
|
||||||
|
"query_help": {},
|
||||||
"query_setting": {
|
"query_setting": {
|
||||||
setting: MicrophoneSetting
|
setting: MicrophoneSetting
|
||||||
},
|
},
|
||||||
|
|
||||||
|
"action_help_click": {},
|
||||||
|
"action_request_permissions": {},
|
||||||
"action_set_selected_device": { deviceId: string },
|
"action_set_selected_device": { deviceId: string },
|
||||||
"action_set_selected_device_result": {
|
"action_set_selected_device_result": {
|
||||||
deviceId: string, /* on error it will contain the current selected device */
|
deviceId: string, /* on error it will contain the current selected device */
|
||||||
|
@ -48,9 +44,11 @@ export interface MicrophoneSettingsEvents {
|
||||||
}
|
}
|
||||||
|
|
||||||
"notify_devices": {
|
"notify_devices": {
|
||||||
status: "success" | "error" | "audio-not-initialized",
|
status: "success" | "error" | "audio-not-initialized" | "no-permissions",
|
||||||
|
|
||||||
error?: string,
|
error?: string,
|
||||||
|
shouldAsk?: boolean,
|
||||||
|
|
||||||
devices?: MicrophoneDevice[]
|
devices?: MicrophoneDevice[]
|
||||||
selectedDevice?: string;
|
selectedDevice?: string;
|
||||||
},
|
},
|
||||||
|
@ -62,13 +60,21 @@ export interface MicrophoneSettingsEvents {
|
||||||
|
|
||||||
level?: number,
|
level?: number,
|
||||||
error?: string
|
error?: string
|
||||||
}}
|
}},
|
||||||
|
|
||||||
|
status: Exclude<DeviceListState, "error">
|
||||||
},
|
},
|
||||||
|
|
||||||
|
notify_highlight: {
|
||||||
|
field: "hs-0" | "hs-1" | "hs-2" | undefined
|
||||||
|
}
|
||||||
|
|
||||||
notify_destroy: {}
|
notify_destroy: {}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function initialize_audio_microphone_controller(events: Registry<MicrophoneSettingsEvents>) {
|
export function initialize_audio_microphone_controller(events: Registry<MicrophoneSettingsEvents>) {
|
||||||
|
const recorderBackend = getRecorderBackend();
|
||||||
|
|
||||||
/* level meters */
|
/* level meters */
|
||||||
{
|
{
|
||||||
const level_meters: {[key: string]:Promise<LevelMeter>} = {};
|
const level_meters: {[key: string]:Promise<LevelMeter>} = {};
|
||||||
|
@ -80,7 +86,7 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
||||||
const meter = level_meters[e];
|
const meter = level_meters[e];
|
||||||
delete level_meters[e];
|
delete level_meters[e];
|
||||||
|
|
||||||
meter.then(e => e.destory());
|
meter.then(e => e.destroy());
|
||||||
});
|
});
|
||||||
Object.keys(level_info).forEach(e => delete level_info[e]);
|
Object.keys(level_info).forEach(e => delete level_info[e]);
|
||||||
};
|
};
|
||||||
|
@ -88,37 +94,42 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
||||||
const update_level_meter = () => {
|
const update_level_meter = () => {
|
||||||
destroy_meters();
|
destroy_meters();
|
||||||
|
|
||||||
for(const device of arecorder.devices()) {
|
level_info["none"] = { deviceId: "none", status: "success", level: 0 };
|
||||||
let promise = arecorder.create_levelmeter(device).then(meter => {
|
|
||||||
meter.set_observer(level => {
|
|
||||||
if(level_meters[device.unique_id] !== promise) return; /* old level meter */
|
|
||||||
|
|
||||||
level_info[device.unique_id] = {
|
for(const device of recorderBackend.getDeviceList().getDevices()) {
|
||||||
deviceId: device.unique_id,
|
let promise = recorderBackend.createLevelMeter(device).then(meter => {
|
||||||
|
meter.set_observer(level => {
|
||||||
|
if(level_meters[device.deviceId] !== promise) return; /* old level meter */
|
||||||
|
|
||||||
|
level_info[device.deviceId] = {
|
||||||
|
deviceId: device.deviceId,
|
||||||
status: "success",
|
status: "success",
|
||||||
level: level
|
level: level
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
return Promise.resolve(meter);
|
return Promise.resolve(meter);
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
if(level_meters[device.unique_id] !== promise) return; /* old level meter */
|
if(level_meters[device.deviceId] !== promise) return; /* old level meter */
|
||||||
level_info[device.unique_id] = {
|
level_info[device.deviceId] = {
|
||||||
deviceId: device.unique_id,
|
deviceId: device.deviceId,
|
||||||
status: "error",
|
status: "error",
|
||||||
|
|
||||||
error: error
|
error: error
|
||||||
};
|
};
|
||||||
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize a level meter for device %s (%s): %o"), device.unique_id, device.driver + ":" + device.name, error);
|
log.warn(LogCategory.AUDIO, tr("Failed to initialize a level meter for device %s (%s): %o"), device.deviceId, device.driver + ":" + device.name, error);
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
});
|
});
|
||||||
level_meters[device.unique_id] = promise;
|
level_meters[device.deviceId] = promise;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
level_update_task = setInterval(() => {
|
level_update_task = setInterval(() => {
|
||||||
|
const deviceListStatus = recorderBackend.getDeviceList().getStatus();
|
||||||
|
|
||||||
events.fire("notify_device_level", {
|
events.fire("notify_device_level", {
|
||||||
level: level_info
|
level: level_info,
|
||||||
|
status: deviceListStatus === "error" ? "uninitialized" : deviceListStatus
|
||||||
});
|
});
|
||||||
}, 50);
|
}, 50);
|
||||||
|
|
||||||
|
@ -142,34 +153,43 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Promise.resolve().then(() => {
|
const deviceList = recorderBackend.getDeviceList();
|
||||||
return arecorder.device_refresh_available() && event.refresh_list ? arecorder.refresh_devices() : Promise.resolve();
|
switch (deviceList.getStatus()) {
|
||||||
}).catch(error => {
|
case "no-permissions":
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to refresh device list: %o"), error);
|
events.fire_async("notify_devices", { status: "no-permissions", shouldAsk: deviceList.getPermissionState() === "denied" });
|
||||||
return Promise.resolve();
|
return;
|
||||||
}).then(() => {
|
|
||||||
const devices = arecorder.devices();
|
case "uninitialized":
|
||||||
|
events.fire_async("notify_devices", { status: "audio-not-initialized" });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(event.refresh_list && deviceList.isRefreshAvailable()) {
|
||||||
|
/* will automatically trigger a device list changed event if something has changed */
|
||||||
|
deviceList.refresh().then(() => {});
|
||||||
|
} else {
|
||||||
|
const devices = deviceList.getDevices();
|
||||||
|
|
||||||
events.fire_async("notify_devices", {
|
events.fire_async("notify_devices", {
|
||||||
status: "success",
|
status: "success",
|
||||||
selectedDevice: default_recorder.current_device() ? default_recorder.current_device().unique_id : "none",
|
selectedDevice: default_recorder.getDeviceId(),
|
||||||
devices: devices.map(e => { return { id: e.unique_id, name: e.name, driver: e.driver }})
|
devices: devices.map(e => { return { id: e.deviceId, name: e.name, driver: e.driver }})
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
events.on("action_set_selected_device", event => {
|
events.on("action_set_selected_device", event => {
|
||||||
const device = arecorder.devices().find(e => e.unique_id === event.deviceId);
|
const device = recorderBackend.getDeviceList().getDevices().find(e => e.deviceId === event.deviceId);
|
||||||
if(!device && event.deviceId !== "none") {
|
if(!device && event.deviceId !== IDevice.NoDeviceId) {
|
||||||
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: default_recorder.current_device().unique_id });
|
events.fire_async("action_set_selected_device_result", { status: "error", error: tr("Invalid device id"), deviceId: default_recorder.getDeviceId() });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
default_recorder.set_device(device).then(() => {
|
default_recorder.set_device(device).then(() => {
|
||||||
console.debug(tr("Changed default microphone device"));
|
console.debug(tr("Changed default microphone device to %s"), event.deviceId);
|
||||||
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
||||||
}).catch((error) => {
|
}).catch((error) => {
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to change microphone to device %s: %o"), device ? device.unique_id : "none", error);
|
log.warn(LogCategory.AUDIO, tr("Failed to change microphone to device %s: %o"), device ? device.deviceId : IDevice.NoDeviceId, error);
|
||||||
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
events.fire_async("action_set_selected_device_result", { status: "success", deviceId: event.deviceId });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -265,7 +285,67 @@ export function initialize_audio_microphone_controller(events: Registry<Micropho
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
events.on("action_request_permissions", () => recorderBackend.getDeviceList().requestPermissions().then(result => {
|
||||||
|
console.error("Permission request result: %o", result);
|
||||||
|
|
||||||
|
if(result === "granted") {
|
||||||
|
/* we've nothing to do, the device change event will already update out list */
|
||||||
|
} else {
|
||||||
|
events.fire_async("notify_devices", { status: "no-permissions", shouldAsk: result === "denied" });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
events.on("notify_destroy", recorderBackend.getDeviceList().getEvents().on("notify_list_updated", () => {
|
||||||
|
events.fire("query_devices");
|
||||||
|
}));
|
||||||
|
|
||||||
|
events.on("notify_destroy", recorderBackend.getDeviceList().getEvents().on("notify_state_changed", () => {
|
||||||
|
events.fire("query_devices");
|
||||||
|
}));
|
||||||
|
|
||||||
if(!aplayer.initialized()) {
|
if(!aplayer.initialized()) {
|
||||||
aplayer.on_ready(() => { events.fire_async("query_devices"); });
|
aplayer.on_ready(() => { events.fire_async("query_devices"); });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
import * as loader from "tc-loader";
|
||||||
|
import {Stage} from "tc-loader";
|
||||||
|
import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
|
||||||
|
import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller";
|
||||||
|
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
||||||
|
import {MicrophoneSettings} from "tc-shared/ui/modal/settings/MicrophoneRenderer";
|
||||||
|
|
||||||
|
loader.register_task(Stage.LOADED, {
|
||||||
|
name: "test",
|
||||||
|
function: async () => {
|
||||||
|
aplayer.on_ready(() => {
|
||||||
|
const modal = spawnReactModal(class extends InternalModal {
|
||||||
|
settings = new Registry<MicrophoneSettingsEvents>();
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
initialize_audio_microphone_controller(this.settings);
|
||||||
|
}
|
||||||
|
|
||||||
|
renderBody(): React.ReactElement {
|
||||||
|
return <div style={{
|
||||||
|
padding: "1em",
|
||||||
|
backgroundColor: "#2f2f35"
|
||||||
|
}}>
|
||||||
|
<MicrophoneSettings events={this.settings} />
|
||||||
|
</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
title(): string | React.ReactElement<Translatable> {
|
||||||
|
return "test";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
modal.show();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
priority: -2
|
||||||
|
})
|
||||||
|
*/
|
|
@ -1,7 +1,7 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
import {Translatable} from "tc-shared/ui/react-elements/i18n";
|
||||||
import {Button} from "tc-shared/ui/react-elements/Button";
|
import {Button} from "tc-shared/ui/react-elements/Button";
|
||||||
import {modal, Registry} from "tc-shared/events";
|
import {Registry} from "tc-shared/events";
|
||||||
import {MicrophoneDevice, MicrophoneSettingsEvents} from "tc-shared/ui/modal/settings/Microphone";
|
import {MicrophoneDevice, MicrophoneSettingsEvents} from "tc-shared/ui/modal/settings/Microphone";
|
||||||
import {useEffect, useRef, useState} from "react";
|
import {useEffect, useRef, useState} from "react";
|
||||||
import {ClientIconRenderer} from "tc-shared/ui/react-elements/Icons";
|
import {ClientIconRenderer} from "tc-shared/ui/react-elements/Icons";
|
||||||
|
@ -9,13 +9,14 @@ import {ClientIcon} from "svg-sprites/client-icons";
|
||||||
import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
|
import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
|
||||||
import {createErrorModal} from "tc-shared/ui/elements/Modal";
|
import {createErrorModal} from "tc-shared/ui/elements/Modal";
|
||||||
import {Slider} from "tc-shared/ui/react-elements/Slider";
|
import {Slider} from "tc-shared/ui/react-elements/Slider";
|
||||||
import MicrophoneSettings = modal.settings.MicrophoneSettings;
|
|
||||||
import {RadioButton} from "tc-shared/ui/react-elements/RadioButton";
|
import {RadioButton} from "tc-shared/ui/react-elements/RadioButton";
|
||||||
import {VadType} from "tc-shared/voice/RecorderProfile";
|
import {VadType} from "tc-shared/voice/RecorderProfile";
|
||||||
import {key_description, KeyDescriptor} from "tc-shared/PPTListener";
|
import {key_description, KeyDescriptor} from "tc-shared/PPTListener";
|
||||||
import {spawnKeySelect} from "tc-shared/ui/modal/ModalKeySelect";
|
import {spawnKeySelect} from "tc-shared/ui/modal/ModalKeySelect";
|
||||||
import {Checkbox} from "tc-shared/ui/react-elements/Checkbox";
|
import {Checkbox} from "tc-shared/ui/react-elements/Checkbox";
|
||||||
import {BoxedInputField} from "tc-shared/ui/react-elements/InputField";
|
import {BoxedInputField} from "tc-shared/ui/react-elements/InputField";
|
||||||
|
import {IDevice} from "tc-shared/audio/recorder";
|
||||||
|
import {HighlightContainer, HighlightRegion, HighlightText} from "./Heighlight";
|
||||||
|
|
||||||
const cssStyle = require("./Microphone.scss");
|
const cssStyle = require("./Microphone.scss");
|
||||||
|
|
||||||
|
@ -37,28 +38,41 @@ const MicrophoneStatus = (props: { state: MicrophoneSelectedState }) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ActivityBarStatus = { mode: "success" } | { mode: "error", message: string } | { mode: "loading" };
|
type ActivityBarStatus = { mode: "success" } | { mode: "error", message: string } | { mode: "loading" } | { mode: "uninitialized" };
|
||||||
const ActivityBar = (props: { events: Registry<MicrophoneSettingsEvents>, deviceId: string, disabled?: boolean }) => {
|
const ActivityBar = (props: { events: Registry<MicrophoneSettingsEvents>, deviceId: string, disabled?: boolean }) => {
|
||||||
const refHider = useRef<HTMLDivElement>();
|
const refHider = useRef<HTMLDivElement>();
|
||||||
const [ status, setStatus ] = useState<ActivityBarStatus>({ mode: "loading" });
|
const [ status, setStatus ] = useState<ActivityBarStatus>({ mode: "loading" });
|
||||||
|
|
||||||
props.events.reactUse("notify_device_level", event => {
|
props.events.reactUse("notify_device_level", event => {
|
||||||
const device = event.level[props.deviceId];
|
if(event.status === "uninitialized") {
|
||||||
if(!device) {
|
if(status.mode === "uninitialized")
|
||||||
if(status.mode === "loading")
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
setStatus({ mode: "loading" });
|
setStatus({ mode: "uninitialized" });
|
||||||
} else if(device.status === "success") {
|
} else if(event.status === "no-permissions") {
|
||||||
if(status.mode !== "success") {
|
const noPermissionsMessage = tr("no permissions");
|
||||||
setStatus({ mode: "success" });
|
if(status.mode === "error" && status.message === noPermissionsMessage)
|
||||||
}
|
return;
|
||||||
refHider.current.style.width = (100 - device.level) + "%";
|
|
||||||
|
setStatus({ mode: "error", message: noPermissionsMessage });
|
||||||
} else {
|
} else {
|
||||||
if(status.mode === "error" && status.message === device.error)
|
const device = event.level[props.deviceId];
|
||||||
return;
|
if(!device) {
|
||||||
|
if(status.mode === "loading")
|
||||||
|
return;
|
||||||
|
|
||||||
setStatus({ mode: "error", message: device.error });
|
setStatus({ mode: "loading" });
|
||||||
|
} else if(device.status === "success") {
|
||||||
|
if(status.mode !== "success") {
|
||||||
|
setStatus({ mode: "success" });
|
||||||
|
}
|
||||||
|
refHider.current.style.width = (100 - device.level) + "%";
|
||||||
|
} else {
|
||||||
|
if(status.mode === "error" && status.message === device.error)
|
||||||
|
return;
|
||||||
|
|
||||||
|
setStatus({ mode: "error", message: device.error + "" });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -96,37 +110,74 @@ const Microphone = (props: { events: Registry<MicrophoneSettingsEvents>, device:
|
||||||
<div className={cssStyle.name}>{props.device.name}</div>
|
<div className={cssStyle.name}>{props.device.name}</div>
|
||||||
</div>
|
</div>
|
||||||
<div className={cssStyle.containerActivity}>
|
<div className={cssStyle.containerActivity}>
|
||||||
<ActivityBar events={props.events} deviceId={props.device.id} />
|
{props.device.id === IDevice.NoDeviceId ? undefined :
|
||||||
|
<ActivityBar key={"a"} events={props.events} deviceId={props.device.id} />
|
||||||
|
}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type MicrophoneListState = {
|
||||||
|
type: "normal" | "loading" | "audio-not-initialized"
|
||||||
|
} | {
|
||||||
|
type: "error",
|
||||||
|
message: string
|
||||||
|
} | {
|
||||||
|
type: "no-permissions",
|
||||||
|
bySystem: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const PermissionDeniedOverlay = (props: { bySystem: boolean, shown: boolean, onRequestPermission: () => void }) => {
|
||||||
|
if(props.bySystem) {
|
||||||
|
return (
|
||||||
|
<div key={"system"} className={cssStyle.overlay + " " + (props.shown ? undefined : cssStyle.hidden)}>
|
||||||
|
<ClientIconRenderer icon={ClientIcon.MicrophoneBroken} />
|
||||||
|
<a><Translatable>Microphone access has been blocked by your browser.</Translatable></a>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return (
|
||||||
|
<div key={"user"} className={cssStyle.overlay + " " + (props.shown ? undefined : cssStyle.hidden)}>
|
||||||
|
<a><Translatable>Please grant access to your microphone.</Translatable></a>
|
||||||
|
<Button
|
||||||
|
key={"request"}
|
||||||
|
color={"green"}
|
||||||
|
type={"small"}
|
||||||
|
onClick={props.onRequestPermission}
|
||||||
|
><Translatable>Request access</Translatable></Button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) => {
|
const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) => {
|
||||||
const [ state, setState ] = useState<"normal" | "loading" | "error" | "audio-not-initialized">(() => {
|
const [ state, setState ] = useState<MicrophoneListState>(() => {
|
||||||
props.events.fire("query_devices");
|
props.events.fire("query_devices");
|
||||||
return "loading";
|
return { type: "loading" };
|
||||||
});
|
});
|
||||||
const [ selectedDevice, setSelectedDevice ] = useState<{ deviceId: string, mode: "selected" | "selecting" }>();
|
const [ selectedDevice, setSelectedDevice ] = useState<{ deviceId: string, mode: "selected" | "selecting" }>();
|
||||||
const [ deviceList, setDeviceList ] = useState<MicrophoneDevice[]>([]);
|
const [ deviceList, setDeviceList ] = useState<MicrophoneDevice[]>([]);
|
||||||
const [ error, setError ] = useState(undefined);
|
|
||||||
|
|
||||||
props.events.reactUse("notify_devices", event => {
|
props.events.reactUse("notify_devices", event => {
|
||||||
setSelectedDevice(undefined);
|
setSelectedDevice(undefined);
|
||||||
switch (event.status) {
|
switch (event.status) {
|
||||||
case "success":
|
case "success":
|
||||||
setDeviceList(event.devices.slice(0));
|
setDeviceList(event.devices.slice(0));
|
||||||
setState("normal");
|
setState({ type: "normal" });
|
||||||
setSelectedDevice({ mode: "selected", deviceId: event.selectedDevice });
|
setSelectedDevice({ mode: "selected", deviceId: event.selectedDevice });
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "error":
|
case "error":
|
||||||
setError(event.error || tr("Unknown error"));
|
setState({ type: "error", message: event.error || tr("Unknown error") });
|
||||||
setState("error");
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "audio-not-initialized":
|
case "audio-not-initialized":
|
||||||
setState("audio-not-initialized");
|
setState({ type: "audio-not-initialized" });
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "no-permissions":
|
||||||
|
setState({ type: "no-permissions", bySystem: event.shouldAsk });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -144,25 +195,50 @@ const MicrophoneList = (props: { events: Registry<MicrophoneSettingsEvents> }) =
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cssStyle.body + " " + cssStyle.containerDevices}>
|
<div className={cssStyle.body + " " + cssStyle.containerDevices}>
|
||||||
<div className={cssStyle.overlay + " " + (state !== "audio-not-initialized" ? cssStyle.hidden : undefined)}>
|
<div className={cssStyle.overlay + " " + (state.type !== "audio-not-initialized" ? cssStyle.hidden : undefined)}>
|
||||||
<a>
|
<a>
|
||||||
<Translatable>The web audio play hasn't been initialized yet.</Translatable>
|
<Translatable>The web audio play hasn't been initialized yet.</Translatable>
|
||||||
<Translatable>Click somewhere on the base to initialize it.</Translatable>
|
<Translatable>Click somewhere on the base to initialize it.</Translatable>
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<div className={cssStyle.overlay + " " + (state !== "error" ? cssStyle.hidden : undefined)}>
|
<div className={cssStyle.overlay + " " + (state.type !== "error" ? cssStyle.hidden : undefined)}>
|
||||||
<a>{error}</a>
|
<a>{state.type === "error" ? state.message : undefined}</a>
|
||||||
</div>
|
</div>
|
||||||
<div className={cssStyle.overlay + " " + (state !== "loading" ? cssStyle.hidden : undefined)}>
|
<div className={cssStyle.overlay + " " + (state.type !== "no-permissions" ? cssStyle.hidden : undefined)}>
|
||||||
|
<a><Translatable>Please grant access to your microphone.</Translatable></a>
|
||||||
|
<Button
|
||||||
|
color={"green"}
|
||||||
|
type={"small"}
|
||||||
|
onClick={() => props.events.fire("action_request_permissions") }
|
||||||
|
><Translatable>Request access</Translatable></Button>
|
||||||
|
</div>
|
||||||
|
<PermissionDeniedOverlay
|
||||||
|
bySystem={state.type === "no-permissions" ? state.bySystem : false}
|
||||||
|
shown={state.type === "no-permissions"}
|
||||||
|
onRequestPermission={() => props.events.fire("action_request_permissions")}
|
||||||
|
/>
|
||||||
|
<div className={cssStyle.overlay + " " + (state.type !== "loading" ? cssStyle.hidden : undefined)}>
|
||||||
<a><Translatable>Loading</Translatable> <LoadingDots/></a>
|
<a><Translatable>Loading</Translatable> <LoadingDots/></a>
|
||||||
</div>
|
</div>
|
||||||
|
<Microphone key={"d-default"}
|
||||||
|
device={{ id: IDevice.NoDeviceId, driver: tr("No device"), name: tr("No device") }}
|
||||||
|
events={props.events}
|
||||||
|
state={IDevice.NoDeviceId === selectedDevice?.deviceId ? selectedDevice.mode === "selecting" ? "applying" : "selected" : "unselected"}
|
||||||
|
onClick={() => {
|
||||||
|
if(state.type !== "normal" || selectedDevice?.mode === "selecting")
|
||||||
|
return;
|
||||||
|
|
||||||
|
props.events.fire("action_set_selected_device", { deviceId: IDevice.NoDeviceId });
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
|
||||||
{deviceList.map(e => <Microphone
|
{deviceList.map(e => <Microphone
|
||||||
key={"d-" + e.id}
|
key={"d-" + e.id}
|
||||||
device={e}
|
device={e}
|
||||||
events={props.events}
|
events={props.events}
|
||||||
state={e.id === selectedDevice?.deviceId ? selectedDevice.mode === "selecting" ? "applying" : "selected" : "unselected"}
|
state={e.id === selectedDevice?.deviceId ? selectedDevice.mode === "selecting" ? "applying" : "selected" : "unselected"}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
if(state !== "normal" || selectedDevice?.mode === "selecting")
|
if(state.type !== "normal" || selectedDevice?.mode === "selecting")
|
||||||
return;
|
return;
|
||||||
|
|
||||||
props.events.fire("action_set_selected_device", { deviceId: e.id });
|
props.events.fire("action_set_selected_device", { deviceId: e.id });
|
||||||
|
@ -187,7 +263,7 @@ const ListRefreshButton = (props: { events: Registry<MicrophoneSettingsEvents> }
|
||||||
|
|
||||||
props.events.reactUse("query_devices", () => setUpdateTimeout(Date.now() + 2000));
|
props.events.reactUse("query_devices", () => setUpdateTimeout(Date.now() + 2000));
|
||||||
|
|
||||||
return <Button disabled={updateTimeout > 0} type={"small"} color={"blue"} onClick={() => props.events.fire("query_devices", { refresh_list: true })}>
|
return <Button disabled={updateTimeout > 0} color={"blue"} onClick={() => props.events.fire("query_devices", { refresh_list: true })}>
|
||||||
<Translatable>Update</Translatable>
|
<Translatable>Update</Translatable>
|
||||||
</Button>;
|
</Button>;
|
||||||
}
|
}
|
||||||
|
@ -203,7 +279,6 @@ const VolumeSettings = (props: { events: Registry<MicrophoneSettingsEvents> }) =
|
||||||
if(event.setting !== "volume")
|
if(event.setting !== "volume")
|
||||||
return;
|
return;
|
||||||
|
|
||||||
console.error("Set value: %o", event.value);
|
|
||||||
refSlider.current?.setState({ value: event.value });
|
refSlider.current?.setState({ value: event.value });
|
||||||
setValue(event.value);
|
setValue(event.value);
|
||||||
});
|
});
|
||||||
|
@ -386,6 +461,7 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
||||||
return "loading";
|
return "loading";
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const [ currentDevice, setCurrentDevice ] = useState(undefined);
|
||||||
const [ isActive, setActive ] = useState(false);
|
const [ isActive, setActive ] = useState(false);
|
||||||
|
|
||||||
props.events.reactUse("notify_setting", event => {
|
props.events.reactUse("notify_setting", event => {
|
||||||
|
@ -397,10 +473,18 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
props.events.reactUse("notify_devices", event => {
|
||||||
|
setCurrentDevice(event.selectedDevice);
|
||||||
|
});
|
||||||
|
|
||||||
|
props.events.reactUse("action_set_selected_device_result", event => {
|
||||||
|
setCurrentDevice(event.deviceId);
|
||||||
|
});
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cssStyle.containerSensitivity}>
|
<div className={cssStyle.containerSensitivity}>
|
||||||
<div className={cssStyle.containerBar}>
|
<div className={cssStyle.containerBar}>
|
||||||
<ActivityBar events={props.events} deviceId={"default"} disabled={!isActive} />
|
<ActivityBar events={props.events} deviceId={currentDevice} disabled={!isActive || !currentDevice} />
|
||||||
</div>
|
</div>
|
||||||
<Slider
|
<Slider
|
||||||
ref={refSlider}
|
ref={refSlider}
|
||||||
|
@ -416,43 +500,115 @@ const ThresholdSelector = (props: { events: Registry<MicrophoneSettingsEvents> }
|
||||||
|
|
||||||
disabled={value === "loading" || !isActive}
|
disabled={value === "loading" || !isActive}
|
||||||
|
|
||||||
onChange={value => {}}
|
onChange={value => { props.events.fire("action_set_setting", { setting: "threshold-threshold", value: value })}}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
export const MicrophoneSettings = (props: { events: Registry<MicrophoneSettingsEvents> }) => (
|
const HelpText0 = () => (
|
||||||
<div className={cssStyle.container}>
|
<HighlightText highlightId={"hs-0"} className={cssStyle.help}>
|
||||||
<div className={cssStyle.left}>
|
<Translatable>
|
||||||
<div className={cssStyle.header}>
|
Firstly we need to setup a microphone.<br />
|
||||||
<a><Translatable>Select your Microphone Device</Translatable></a>
|
Let me guide you thru the basic UI elements.<br />
|
||||||
<ListRefreshButton events={props.events} />
|
<br />
|
||||||
|
To continue click anywhere on the screen.
|
||||||
|
</Translatable>
|
||||||
|
</HighlightText>
|
||||||
|
);
|
||||||
|
|
||||||
|
const HelpText1 = () => (
|
||||||
|
<HighlightText highlightId={"hs-1"} className={cssStyle.help + " " + cssStyle.paddingTop}>
|
||||||
|
<Translatable>
|
||||||
|
All your available microphones are listed within this box.<br />
|
||||||
|
<br />
|
||||||
|
The currently selected microphone<br />
|
||||||
|
is marked with a green checkmark. To change the selected microphone<br />
|
||||||
|
just click on the new one.<br />
|
||||||
|
<br />
|
||||||
|
To continue click anywhere on the screen.
|
||||||
|
</Translatable>
|
||||||
|
</HighlightText>
|
||||||
|
);
|
||||||
|
|
||||||
|
const HelpText2 = () => (
|
||||||
|
<HighlightText highlightId={"hs-2"} className={cssStyle.help + " " + cssStyle.paddingTop}>
|
||||||
|
<a>
|
||||||
|
<Translatable>TeaSpeak has three voice activity detection types:</Translatable>
|
||||||
|
</a>
|
||||||
|
<ol>
|
||||||
|
<li>
|
||||||
|
<Translatable>
|
||||||
|
To transmit audio data you'll have to<br />
|
||||||
|
press a key. The key could be selected via the button right to the radio button
|
||||||
|
</Translatable>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<Translatable>
|
||||||
|
In this mode, TeaSpeak will continuously analyze your microphone input.
|
||||||
|
If the audio level is grater than a certain threshold,
|
||||||
|
the audio will be transmitted.
|
||||||
|
The threshold is changeable via the "Sensitivity Settings" slider
|
||||||
|
</Translatable>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<Translatable>Continuously transmit any audio data.</Translatable>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
<a>
|
||||||
|
<Translatable>
|
||||||
|
Now you're ready to configure your microphone.<br />
|
||||||
|
Just click anywhere on the screen.
|
||||||
|
</Translatable>
|
||||||
|
</a>
|
||||||
|
</HighlightText>
|
||||||
|
);
|
||||||
|
|
||||||
|
export const MicrophoneSettings = (props: { events: Registry<MicrophoneSettingsEvents> }) => {
|
||||||
|
const [ highlighted, setHighlighted ] = useState(() => {
|
||||||
|
props.events.fire("query_help");
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
props.events.reactUse("notify_highlight", event => setHighlighted(event.field));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<HighlightContainer highlightedId={highlighted} onClick={() => props.events.fire("action_help_click")} classList={cssStyle.highlightContainer}>
|
||||||
|
<div className={cssStyle.container}>
|
||||||
|
<HelpText0/>
|
||||||
|
<HighlightRegion className={cssStyle.left} highlightId={"hs-1"}>
|
||||||
|
<div className={cssStyle.header}>
|
||||||
|
<a><Translatable>Select your Microphone Device</Translatable></a>
|
||||||
|
<ListRefreshButton events={props.events}/>
|
||||||
|
</div>
|
||||||
|
<MicrophoneList events={props.events}/>
|
||||||
|
<HelpText2/>
|
||||||
|
</HighlightRegion>
|
||||||
|
<HighlightRegion className={cssStyle.right} highlightId={"hs-2"}>
|
||||||
|
<HelpText1/>
|
||||||
|
<div className={cssStyle.header}>
|
||||||
|
<a><Translatable>Microphone Settings</Translatable></a>
|
||||||
|
</div>
|
||||||
|
<div className={cssStyle.body}>
|
||||||
|
<VolumeSettings events={props.events}/>
|
||||||
|
<VadSelector events={props.events}/>
|
||||||
|
</div>
|
||||||
|
<div className={cssStyle.header}>
|
||||||
|
<a><Translatable>Sensitivity Settings</Translatable></a>
|
||||||
|
</div>
|
||||||
|
<div className={cssStyle.body}>
|
||||||
|
<ThresholdSelector events={props.events}/>
|
||||||
|
</div>
|
||||||
|
<div className={cssStyle.header}>
|
||||||
|
<a><Translatable>Advanced Settings</Translatable></a>
|
||||||
|
</div>
|
||||||
|
<div className={cssStyle.body}>
|
||||||
|
<div className={cssStyle.containerAdvanced}>
|
||||||
|
<PPTDelaySettings events={props.events}/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</HighlightRegion>
|
||||||
</div>
|
</div>
|
||||||
<MicrophoneList events={props.events} />
|
</HighlightContainer>
|
||||||
</div>
|
);
|
||||||
<div className={cssStyle.right}>
|
}
|
||||||
<div className={cssStyle.header}>
|
|
||||||
<a><Translatable>Microphone Settings</Translatable></a>
|
|
||||||
</div>
|
|
||||||
<div className={cssStyle.body}>
|
|
||||||
<VolumeSettings events={props.events} />
|
|
||||||
<VadSelector events={props.events} />
|
|
||||||
</div>
|
|
||||||
<div className={cssStyle.header}>
|
|
||||||
<a><Translatable>Sensitivity Settings</Translatable></a>
|
|
||||||
</div>
|
|
||||||
<div className={cssStyle.body}>
|
|
||||||
<ThresholdSelector events={props.events} />
|
|
||||||
</div>
|
|
||||||
<div className={cssStyle.header}>
|
|
||||||
<a><Translatable>Advanced Settings</Translatable></a>
|
|
||||||
</div>
|
|
||||||
<div className={cssStyle.body}>
|
|
||||||
<div className={cssStyle.containerAdvanced}>
|
|
||||||
<PPTDelaySettings events={props.events} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
|
@ -77,7 +77,10 @@ export abstract class AbstractExternalModalController extends EventControllerBas
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
this.modalState = ModalState.DESTROYED;
|
this.modalState = ModalState.DESTROYED;
|
||||||
this.doDestroyWindow();
|
if(__build.mode !== "debug") {
|
||||||
|
/* do not destroy the window in debug mode in order to debug what happened */
|
||||||
|
this.doDestroyWindow();
|
||||||
|
}
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,6 @@ import * as loader from "tc-loader";
|
||||||
import * as ipc from "../../../ipc/BrowserIPC";
|
import * as ipc from "../../../ipc/BrowserIPC";
|
||||||
import * as i18n from "../../../i18n/localize";
|
import * as i18n from "../../../i18n/localize";
|
||||||
|
|
||||||
import "tc-shared/proto";
|
|
||||||
|
|
||||||
import {Stage} from "tc-loader";
|
import {Stage} from "tc-loader";
|
||||||
import {AbstractModal, ModalRenderer} from "tc-shared/ui/react-elements/ModalDefinitions";
|
import {AbstractModal, ModalRenderer} from "tc-shared/ui/react-elements/ModalDefinitions";
|
||||||
import {Settings, SettingsKey} from "tc-shared/settings";
|
import {Settings, SettingsKey} from "tc-shared/settings";
|
||||||
|
@ -26,6 +24,7 @@ loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
|
||||||
name: "setup",
|
name: "setup",
|
||||||
priority: 110,
|
priority: 110,
|
||||||
function: async () => {
|
function: async () => {
|
||||||
|
await import("tc-shared/proto");
|
||||||
await i18n.initialize();
|
await i18n.initialize();
|
||||||
ipc.setup();
|
ipc.setup();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,38 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import {parseMessageWithArguments} from "tc-shared/ui/frames/chat";
|
import {parseMessageWithArguments} from "tc-shared/ui/frames/chat";
|
||||||
import {cloneElement} from "react";
|
import {cloneElement, ReactNode} from "react";
|
||||||
|
|
||||||
let instances = [];
|
let instances = [];
|
||||||
export class Translatable extends React.Component<{ children: string, __cacheKey?: string, trIgnore?: boolean, enforceTextOnly?: boolean }, { translated: string }> {
|
export class Translatable extends React.Component<{
|
||||||
|
children: string | (string | React.ReactElement<HTMLBRElement>)[],
|
||||||
|
__cacheKey?: string,
|
||||||
|
trIgnore?: boolean,
|
||||||
|
enforceTextOnly?: boolean
|
||||||
|
}, { translated: string }> {
|
||||||
|
protected renderElementIndex = 0;
|
||||||
|
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
|
|
||||||
|
let text;
|
||||||
|
if(Array.isArray(this.props.children)) {
|
||||||
|
text = (this.props.children as any[]).map(e => typeof e === "string" ? e : "\n").join("");
|
||||||
|
} else {
|
||||||
|
text = this.props.children;
|
||||||
|
}
|
||||||
|
|
||||||
this.state = {
|
this.state = {
|
||||||
translated: /* @tr-ignore */ tr(typeof this.props.children === "string" ? this.props.children : (this.props as any).message)
|
translated: /* @tr-ignore */ tr(text)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
return this.state.translated;
|
return this.state.translated.split("\n").reduce((previousValue, currentValue, currentIndex, array) => {
|
||||||
|
previousValue.push(<React.Fragment key={++this.renderElementIndex}>{currentValue}</React.Fragment>);
|
||||||
|
if(currentIndex + 1 !== array.length)
|
||||||
|
previousValue.push(<br key={++this.renderElementIndex} />);
|
||||||
|
return previousValue;
|
||||||
|
}, []);
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount(): void {
|
componentDidMount(): void {
|
||||||
|
|
|
@ -542,13 +542,17 @@ export class ChannelTree {
|
||||||
client["_channel"] = targetChannel;
|
client["_channel"] = targetChannel;
|
||||||
targetChannel?.registerClient(client);
|
targetChannel?.registerClient(client);
|
||||||
|
|
||||||
if(oldChannel)
|
if(oldChannel) {
|
||||||
this.client.side_bar.info_frame().update_channel_client_count(oldChannel);
|
this.client.side_bar.info_frame().update_channel_client_count(oldChannel);
|
||||||
if(targetChannel)
|
}
|
||||||
|
|
||||||
|
if(targetChannel) {
|
||||||
this.client.side_bar.info_frame().update_channel_client_count(targetChannel);
|
this.client.side_bar.info_frame().update_channel_client_count(targetChannel);
|
||||||
if(oldChannel && targetChannel)
|
}
|
||||||
|
|
||||||
|
if(oldChannel && targetChannel) {
|
||||||
client.events.fire("notify_client_moved", { oldChannel: oldChannel, newChannel: targetChannel });
|
client.events.fire("notify_client_moved", { oldChannel: oldChannel, newChannel: targetChannel });
|
||||||
client.speaking = false;
|
}
|
||||||
} finally {
|
} finally {
|
||||||
flush_batched_updates(BatchUpdateType.CHANNEL_TREE);
|
flush_batched_updates(BatchUpdateType.CHANNEL_TREE);
|
||||||
}
|
}
|
||||||
|
|
54
shared/js/voice/Filter.ts
Normal file
54
shared/js/voice/Filter.ts
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
export enum FilterType {
|
||||||
|
THRESHOLD,
|
||||||
|
VOICE_LEVEL,
|
||||||
|
STATE
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FilterBase {
|
||||||
|
readonly priority: number;
|
||||||
|
|
||||||
|
setEnabled(flag: boolean) : void;
|
||||||
|
isEnabled() : boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MarginedFilter {
|
||||||
|
getMarginFrames() : number;
|
||||||
|
setMarginFrames(value: number);
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThresholdFilter extends FilterBase, MarginedFilter {
|
||||||
|
readonly type: FilterType.THRESHOLD;
|
||||||
|
|
||||||
|
getThreshold() : number;
|
||||||
|
setThreshold(value: number);
|
||||||
|
|
||||||
|
getAttackSmooth() : number;
|
||||||
|
getReleaseSmooth() : number;
|
||||||
|
|
||||||
|
setAttackSmooth(value: number);
|
||||||
|
setReleaseSmooth(value: number);
|
||||||
|
|
||||||
|
registerLevelCallback(callback: (value: number) => void);
|
||||||
|
removeLevelCallback(callback: (value: number) => void);
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VoiceLevelFilter extends FilterBase, MarginedFilter {
|
||||||
|
type: FilterType.VOICE_LEVEL;
|
||||||
|
|
||||||
|
getLevel() : number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StateFilter extends FilterBase {
|
||||||
|
type: FilterType.STATE;
|
||||||
|
|
||||||
|
setState(state: boolean);
|
||||||
|
isActive() : boolean; /* if true the the filter allows data to pass */
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FilterTypeClass<T extends FilterType> =
|
||||||
|
T extends FilterType.STATE ? StateFilter :
|
||||||
|
T extends FilterType.VOICE_LEVEL ? VoiceLevelFilter :
|
||||||
|
T extends FilterType.THRESHOLD ? ThresholdFilter :
|
||||||
|
never;
|
||||||
|
|
||||||
|
export type Filter = ThresholdFilter | VoiceLevelFilter | StateFilter;
|
|
@ -1,122 +1,98 @@
|
||||||
export interface InputDevice {
|
import {IDevice} from "tc-shared/audio/recorder";
|
||||||
unique_id: string;
|
import {Registry} from "tc-shared/events";
|
||||||
driver: string;
|
import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
|
||||||
name: string;
|
|
||||||
default_input: boolean;
|
|
||||||
|
|
||||||
supported: boolean;
|
|
||||||
|
|
||||||
sample_rate: number;
|
|
||||||
channels: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum InputConsumerType {
|
export enum InputConsumerType {
|
||||||
CALLBACK,
|
CALLBACK,
|
||||||
NODE,
|
NODE,
|
||||||
NATIVE
|
NATIVE
|
||||||
}
|
}
|
||||||
|
export interface CallbackInputConsumer {
|
||||||
export interface InputConsumer {
|
type: InputConsumerType.CALLBACK;
|
||||||
type: InputConsumerType;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CallbackInputConsumer extends InputConsumer {
|
|
||||||
callback_audio?: (buffer: AudioBuffer) => any;
|
callback_audio?: (buffer: AudioBuffer) => any;
|
||||||
callback_buffer?: (buffer: Float32Array, samples: number, channels: number) => any;
|
callback_buffer?: (buffer: Float32Array, samples: number, channels: number) => any;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface NodeInputConsumer extends InputConsumer {
|
export interface NodeInputConsumer {
|
||||||
|
type: InputConsumerType.NODE;
|
||||||
callback_node: (source_node: AudioNode) => any;
|
callback_node: (source_node: AudioNode) => any;
|
||||||
callback_disconnect: (source_node: AudioNode) => any;
|
callback_disconnect: (source_node: AudioNode) => any;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface NativeInputConsumer {
|
||||||
export namespace filter {
|
type: InputConsumerType.NATIVE;
|
||||||
export enum Type {
|
|
||||||
THRESHOLD,
|
|
||||||
VOICE_LEVEL,
|
|
||||||
STATE
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Filter {
|
|
||||||
type: Type;
|
|
||||||
|
|
||||||
is_enabled() : boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface MarginedFilter {
|
|
||||||
get_margin_frames() : number;
|
|
||||||
set_margin_frames(value: number);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ThresholdFilter extends Filter, MarginedFilter {
|
|
||||||
get_threshold() : number;
|
|
||||||
set_threshold(value: number) : Promise<void>;
|
|
||||||
|
|
||||||
get_attack_smooth() : number;
|
|
||||||
get_release_smooth() : number;
|
|
||||||
|
|
||||||
set_attack_smooth(value: number);
|
|
||||||
set_release_smooth(value: number);
|
|
||||||
|
|
||||||
callback_level?: (value: number) => any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface VoiceLevelFilter extends Filter, MarginedFilter {
|
|
||||||
get_level() : number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface StateFilter extends Filter {
|
|
||||||
set_state(state: boolean) : Promise<void>;
|
|
||||||
is_active() : boolean; /* if true the the filter allows data to pass */
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type InputConsumer = CallbackInputConsumer | NodeInputConsumer | NativeInputConsumer;
|
||||||
|
|
||||||
|
|
||||||
export enum InputState {
|
export enum InputState {
|
||||||
|
/* Input recording has been paused */
|
||||||
PAUSED,
|
PAUSED,
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Recording has been requested, and is currently initializing.
|
||||||
|
* This state may persist, when the audio context hasn't been initialized yet
|
||||||
|
*/
|
||||||
INITIALIZING,
|
INITIALIZING,
|
||||||
RECORDING,
|
|
||||||
DRY
|
/* we're currently recording the input */
|
||||||
|
RECORDING
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum InputStartResult {
|
export enum InputStartResult {
|
||||||
EOK = "eok",
|
EOK = "eok",
|
||||||
EUNKNOWN = "eunknown",
|
EUNKNOWN = "eunknown",
|
||||||
|
EDEVICEUNKNOWN = "edeviceunknown",
|
||||||
EBUSY = "ebusy",
|
EBUSY = "ebusy",
|
||||||
ENOTALLOWED = "enotallowed",
|
ENOTALLOWED = "enotallowed",
|
||||||
ENOTSUPPORTED = "enotsupported"
|
ENOTSUPPORTED = "enotsupported"
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AbstractInput {
|
export interface InputEvents {
|
||||||
callback_begin: () => any;
|
notify_voice_start: {},
|
||||||
callback_end: () => any;
|
notify_voice_end: {}
|
||||||
|
}
|
||||||
|
|
||||||
current_state() : InputState;
|
export interface AbstractInput {
|
||||||
|
readonly events: Registry<InputEvents>;
|
||||||
|
|
||||||
|
currentState() : InputState;
|
||||||
|
|
||||||
start() : Promise<InputStartResult>;
|
start() : Promise<InputStartResult>;
|
||||||
stop() : Promise<void>;
|
stop() : Promise<void>;
|
||||||
|
|
||||||
current_device() : InputDevice | undefined;
|
/*
|
||||||
set_device(device: InputDevice | undefined) : Promise<void>;
|
* Returns true if the input is currently filtered.
|
||||||
|
* If the current state isn't recording, than it will return true.
|
||||||
|
*/
|
||||||
|
isFiltered() : boolean;
|
||||||
|
|
||||||
current_consumer() : InputConsumer | undefined;
|
currentDeviceId() : string | undefined;
|
||||||
set_consumer(consumer: InputConsumer) : Promise<void>;
|
|
||||||
|
|
||||||
get_filter(type: filter.Type) : filter.Filter | undefined;
|
/*
|
||||||
supports_filter(type: filter.Type) : boolean;
|
* This method should not throw!
|
||||||
|
* If the target device is unknown than it should return EDEVICEUNKNOWN on start.
|
||||||
|
* After changing the device, the input state falls to InputState.PAUSED.
|
||||||
|
*/
|
||||||
|
setDeviceId(device: string | undefined) : Promise<void>;
|
||||||
|
|
||||||
clear_filter();
|
currentConsumer() : InputConsumer | undefined;
|
||||||
disable_filter(type: filter.Type);
|
setConsumer(consumer: InputConsumer) : Promise<void>;
|
||||||
enable_filter(type: filter.Type);
|
|
||||||
|
|
||||||
get_volume() : number;
|
supportsFilter(type: FilterType) : boolean;
|
||||||
set_volume(volume: number);
|
createFilter<T extends FilterType>(type: T, priority: number) : FilterTypeClass<T>;
|
||||||
|
removeFilter(filter: Filter);
|
||||||
|
/* resetFilter(); */
|
||||||
|
|
||||||
|
getVolume() : number;
|
||||||
|
setVolume(volume: number);
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LevelMeter {
|
export interface LevelMeter {
|
||||||
device() : InputDevice;
|
device() : IDevice;
|
||||||
|
|
||||||
set_observer(callback: (value: number) => any);
|
set_observer(callback: (value: number) => any);
|
||||||
|
|
||||||
destory();
|
destroy();
|
||||||
}
|
}
|
|
@ -1,12 +1,13 @@
|
||||||
import * as log from "tc-shared/log";
|
import * as log from "tc-shared/log";
|
||||||
import {AbstractInput, filter, InputDevice} from "tc-shared/voice/RecorderBase";
|
import {LogCategory, logWarn} from "tc-shared/log";
|
||||||
|
import {AbstractInput} from "tc-shared/voice/RecorderBase";
|
||||||
import {KeyDescriptor, KeyHook} from "tc-shared/PPTListener";
|
import {KeyDescriptor, KeyHook} from "tc-shared/PPTListener";
|
||||||
import {LogCategory} from "tc-shared/log";
|
|
||||||
import {Settings, settings} from "tc-shared/settings";
|
import {Settings, settings} from "tc-shared/settings";
|
||||||
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
|
||||||
import * as aplayer from "tc-backend/audio/player";
|
import * as aplayer from "tc-backend/audio/player";
|
||||||
import * as arecorder from "tc-backend/audio/recorder";
|
|
||||||
import * as ppt from "tc-backend/ppt";
|
import * as ppt from "tc-backend/ppt";
|
||||||
|
import {getRecorderBackend, IDevice} from "tc-shared/audio/recorder";
|
||||||
|
import {FilterType, StateFilter, ThresholdFilter} from "tc-shared/voice/Filter";
|
||||||
|
|
||||||
export type VadType = "threshold" | "push_to_talk" | "active";
|
export type VadType = "threshold" | "push_to_talk" | "active";
|
||||||
export interface RecorderProfileConfig {
|
export interface RecorderProfileConfig {
|
||||||
|
@ -37,6 +38,7 @@ export let default_recorder: RecorderProfile; /* needs initialize */
|
||||||
export function set_default_recorder(recorder: RecorderProfile) {
|
export function set_default_recorder(recorder: RecorderProfile) {
|
||||||
default_recorder = recorder;
|
default_recorder = recorder;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class RecorderProfile {
|
export class RecorderProfile {
|
||||||
readonly name;
|
readonly name;
|
||||||
readonly volatile; /* not saving profile */
|
readonly volatile; /* not saving profile */
|
||||||
|
@ -46,18 +48,25 @@ export class RecorderProfile {
|
||||||
|
|
||||||
current_handler: ConnectionHandler;
|
current_handler: ConnectionHandler;
|
||||||
|
|
||||||
callback_input_change: (old_input: AbstractInput, new_input: AbstractInput) => Promise<void>;
|
/* attention: this callback will only be called when the audio input hasn't been initialized! */
|
||||||
|
callback_input_initialized: (input: AbstractInput) => void;
|
||||||
callback_start: () => any;
|
callback_start: () => any;
|
||||||
callback_stop: () => any;
|
callback_stop: () => any;
|
||||||
|
|
||||||
callback_unmount: () => any; /* called if somebody else takes the ownership */
|
callback_unmount: () => any; /* called if somebody else takes the ownership */
|
||||||
|
|
||||||
record_supported: boolean;
|
private readonly pptHook: KeyHook;
|
||||||
|
|
||||||
private pptHook: KeyHook;
|
|
||||||
private pptTimeout: number;
|
private pptTimeout: number;
|
||||||
private pptHookRegistered: boolean;
|
private pptHookRegistered: boolean;
|
||||||
|
|
||||||
|
private registeredFilter = {
|
||||||
|
"ppt-gate": undefined as StateFilter,
|
||||||
|
"threshold": undefined as ThresholdFilter,
|
||||||
|
|
||||||
|
/* disable voice transmission by default, e.g. when reinitializing filters etc. */
|
||||||
|
"default-disabled": undefined as StateFilter
|
||||||
|
}
|
||||||
|
|
||||||
constructor(name: string, volatile?: boolean) {
|
constructor(name: string, volatile?: boolean) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.volatile = typeof(volatile) === "boolean" ? volatile : false;
|
this.volatile = typeof(volatile) === "boolean" ? volatile : false;
|
||||||
|
@ -68,88 +77,95 @@ export class RecorderProfile {
|
||||||
clearTimeout(this.pptTimeout);
|
clearTimeout(this.pptTimeout);
|
||||||
|
|
||||||
this.pptTimeout = setTimeout(() => {
|
this.pptTimeout = setTimeout(() => {
|
||||||
const f = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
this.registeredFilter["ppt-gate"]?.setState(true);
|
||||||
if(f) f.set_state(true);
|
|
||||||
}, Math.max(this.config.vad_push_to_talk.delay, 0));
|
}, Math.max(this.config.vad_push_to_talk.delay, 0));
|
||||||
},
|
},
|
||||||
|
|
||||||
callback_press: () => {
|
callback_press: () => {
|
||||||
if(this.pptTimeout)
|
if(this.pptTimeout)
|
||||||
clearTimeout(this.pptTimeout);
|
clearTimeout(this.pptTimeout);
|
||||||
|
|
||||||
const f = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
this.registeredFilter["ppt-gate"]?.setState(false);
|
||||||
if(f) f.set_state(false);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
cancel: false
|
cancel: false
|
||||||
} as KeyHook;
|
} as KeyHook;
|
||||||
this.pptHookRegistered = false;
|
this.pptHookRegistered = false;
|
||||||
this.record_supported = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async initialize() : Promise<void> {
|
async initialize() : Promise<void> {
|
||||||
|
{
|
||||||
|
let config = {};
|
||||||
|
try {
|
||||||
|
config = settings.static_global(Settings.FN_PROFILE_RECORD(this.name), {}) as RecorderProfileConfig;
|
||||||
|
} catch (error) {
|
||||||
|
logWarn(LogCategory.AUDIO, tr("Failed to load old recorder profile config for %s"), this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* default values */
|
||||||
|
this.config = {
|
||||||
|
version: 1,
|
||||||
|
device_id: undefined,
|
||||||
|
volume: 100,
|
||||||
|
|
||||||
|
vad_threshold: {
|
||||||
|
threshold: 25
|
||||||
|
},
|
||||||
|
vad_type: "threshold",
|
||||||
|
vad_push_to_talk: {
|
||||||
|
delay: 300,
|
||||||
|
key_alt: false,
|
||||||
|
key_ctrl: false,
|
||||||
|
key_shift: false,
|
||||||
|
key_windows: false,
|
||||||
|
key_code: 't'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.assign(this.config, config || {});
|
||||||
|
}
|
||||||
|
|
||||||
aplayer.on_ready(async () => {
|
aplayer.on_ready(async () => {
|
||||||
this.initialize_input();
|
await getRecorderBackend().getDeviceList().awaitInitialized();
|
||||||
await this.load();
|
|
||||||
await this.reinitialize_filter();
|
await this.initializeInput();
|
||||||
|
await this.reinitializeFilter();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private initialize_input() {
|
private async initializeInput() {
|
||||||
this.input = arecorder.create_input();
|
this.input = getRecorderBackend().createInput();
|
||||||
this.input.callback_begin = () => {
|
|
||||||
|
this.input.events.on("notify_voice_start", () => {
|
||||||
log.debug(LogCategory.VOICE, "Voice start");
|
log.debug(LogCategory.VOICE, "Voice start");
|
||||||
if(this.callback_start)
|
if(this.callback_start)
|
||||||
this.callback_start();
|
this.callback_start();
|
||||||
};
|
});
|
||||||
|
|
||||||
this.input.callback_end = () => {
|
this.input.events.on("notify_voice_end", () => {
|
||||||
log.debug(LogCategory.VOICE, "Voice end");
|
log.debug(LogCategory.VOICE, "Voice end");
|
||||||
if(this.callback_stop)
|
if(this.callback_stop)
|
||||||
this.callback_stop();
|
this.callback_stop();
|
||||||
};
|
});
|
||||||
|
|
||||||
//TODO: Await etc?
|
this.registeredFilter["default-disabled"] = this.input.createFilter(FilterType.STATE, 20);
|
||||||
this.callback_input_change && this.callback_input_change(undefined, this.input);
|
await this.registeredFilter["default-disabled"].setState(true); /* filter */
|
||||||
}
|
this.registeredFilter["default-disabled"].setEnabled(true);
|
||||||
|
|
||||||
private async load() {
|
this.registeredFilter["ppt-gate"] = this.input.createFilter(FilterType.STATE, 100);
|
||||||
const config = settings.static_global(Settings.FN_PROFILE_RECORD(this.name), {}) as RecorderProfileConfig;
|
this.registeredFilter["ppt-gate"].setEnabled(false);
|
||||||
|
|
||||||
/* default values */
|
this.registeredFilter["threshold"] = this.input.createFilter(FilterType.THRESHOLD, 100);
|
||||||
this.config = {
|
this.registeredFilter["threshold"].setEnabled(false);
|
||||||
version: 1,
|
|
||||||
device_id: undefined,
|
|
||||||
volume: 100,
|
|
||||||
|
|
||||||
vad_threshold: {
|
if(this.callback_input_initialized) {
|
||||||
threshold: 25
|
this.callback_input_initialized(this.input);
|
||||||
},
|
|
||||||
vad_type: "threshold",
|
|
||||||
vad_push_to_talk: {
|
|
||||||
delay: 300,
|
|
||||||
key_alt: false,
|
|
||||||
key_ctrl: false,
|
|
||||||
key_shift: false,
|
|
||||||
key_windows: false,
|
|
||||||
key_code: 't'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Object.assign(this.config, config || {});
|
|
||||||
this.input.set_volume(this.config.volume / 100);
|
|
||||||
|
|
||||||
{
|
|
||||||
const all_devices = arecorder.devices();
|
|
||||||
const devices = all_devices.filter(e => e.default_input || e.unique_id === this.config.device_id);
|
|
||||||
const device = devices.find(e => e.unique_id === this.config.device_id) || devices[0];
|
|
||||||
|
|
||||||
log.info(LogCategory.VOICE, tr("Loaded record profile device %s | %o (%o)"), this.config.device_id, device, all_devices);
|
|
||||||
try {
|
|
||||||
await this.input.set_device(device);
|
|
||||||
} catch(error) {
|
|
||||||
log.error(LogCategory.VOICE, tr("Failed to set input device (%o)"), error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* apply initial config values */
|
||||||
|
this.input.setVolume(this.config.volume / 100);
|
||||||
|
await this.input.setDeviceId(this.config.device_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
private save() {
|
private save() {
|
||||||
|
@ -157,39 +173,63 @@ export class RecorderProfile {
|
||||||
settings.changeGlobal(Settings.FN_PROFILE_RECORD(this.name), this.config);
|
settings.changeGlobal(Settings.FN_PROFILE_RECORD(this.name), this.config);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async reinitialize_filter() {
|
private reinitializePPTHook() {
|
||||||
|
if(this.config.vad_type !== "push_to_talk")
|
||||||
|
return;
|
||||||
|
|
||||||
|
if(this.pptHookRegistered) {
|
||||||
|
ppt.unregister_key_hook(this.pptHook);
|
||||||
|
this.pptHookRegistered = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||||
|
this.pptHook[key] = this.config.vad_push_to_talk[key];
|
||||||
|
|
||||||
|
ppt.register_key_hook(this.pptHook);
|
||||||
|
this.pptHookRegistered = true;
|
||||||
|
|
||||||
|
this.registeredFilter["ppt-gate"]?.setState(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async reinitializeFilter() {
|
||||||
if(!this.input) return;
|
if(!this.input) return;
|
||||||
|
|
||||||
this.input.clear_filter();
|
/* don't let any audio pass while we initialize the other filters */
|
||||||
|
this.registeredFilter["default-disabled"].setEnabled(true);
|
||||||
|
|
||||||
|
/* disable all filter */
|
||||||
|
this.registeredFilter["threshold"].setEnabled(false);
|
||||||
|
this.registeredFilter["ppt-gate"].setEnabled(false);
|
||||||
|
|
||||||
if(this.pptHookRegistered) {
|
if(this.pptHookRegistered) {
|
||||||
ppt.unregister_key_hook(this.pptHook);
|
ppt.unregister_key_hook(this.pptHook);
|
||||||
this.pptHookRegistered = false;
|
this.pptHookRegistered = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(this.config.vad_type === "threshold") {
|
if(this.config.vad_type === "threshold") {
|
||||||
const filter_ = this.input.get_filter(filter.Type.THRESHOLD) as filter.ThresholdFilter;
|
const filter = this.registeredFilter["threshold"];
|
||||||
await filter_.set_threshold(this.config.vad_threshold.threshold);
|
filter.setEnabled(true);
|
||||||
await filter_.set_margin_frames(10); /* 500ms */
|
filter.setThreshold(this.config.vad_threshold.threshold);
|
||||||
|
|
||||||
/* legacy client support */
|
filter.setMarginFrames(10); /* 500ms */
|
||||||
if('set_attack_smooth' in filter_)
|
filter.setAttackSmooth(.25);
|
||||||
filter_.set_attack_smooth(.25);
|
filter.setReleaseSmooth(.9);
|
||||||
|
|
||||||
if('set_release_smooth' in filter_)
|
|
||||||
filter_.set_release_smooth(.9);
|
|
||||||
|
|
||||||
this.input.enable_filter(filter.Type.THRESHOLD);
|
|
||||||
} else if(this.config.vad_type === "push_to_talk") {
|
} else if(this.config.vad_type === "push_to_talk") {
|
||||||
const filter_ = this.input.get_filter(filter.Type.STATE) as filter.StateFilter;
|
const filter = this.registeredFilter["ppt-gate"];
|
||||||
await filter_.set_state(true);
|
filter.setEnabled(true);
|
||||||
|
filter.setState(true); /* by default set filtered */
|
||||||
|
|
||||||
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
for(const key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||||
this.pptHook[key] = this.config.vad_push_to_talk[key];
|
this.pptHook[key] = this.config.vad_push_to_talk[key];
|
||||||
|
|
||||||
ppt.register_key_hook(this.pptHook);
|
ppt.register_key_hook(this.pptHook);
|
||||||
this.pptHookRegistered = true;
|
this.pptHookRegistered = true;
|
||||||
|
} else if(this.config.vad_type === "active") {
|
||||||
|
/* we don't have to initialize any filters */
|
||||||
|
}
|
||||||
|
|
||||||
this.input.enable_filter(filter.Type.STATE);
|
|
||||||
} else if(this.config.vad_type === "active") {}
|
this.registeredFilter["default-disabled"].setEnabled(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
async unmount() : Promise<void> {
|
async unmount() : Promise<void> {
|
||||||
|
@ -199,13 +239,13 @@ export class RecorderProfile {
|
||||||
|
|
||||||
if(this.input) {
|
if(this.input) {
|
||||||
try {
|
try {
|
||||||
await this.input.set_consumer(undefined);
|
await this.input.setConsumer(undefined);
|
||||||
} catch(error) {
|
} catch(error) {
|
||||||
log.warn(LogCategory.VOICE, tr("Failed to unmount input consumer for profile (%o)"), error);
|
log.warn(LogCategory.VOICE, tr("Failed to unmount input consumer for profile (%o)"), error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.callback_input_change = undefined;
|
this.callback_input_initialized = undefined;
|
||||||
this.callback_start = undefined;
|
this.callback_start = undefined;
|
||||||
this.callback_stop = undefined;
|
this.callback_stop = undefined;
|
||||||
this.callback_unmount = undefined;
|
this.callback_unmount = undefined;
|
||||||
|
@ -216,11 +256,12 @@ export class RecorderProfile {
|
||||||
set_vad_type(type: VadType) : boolean {
|
set_vad_type(type: VadType) : boolean {
|
||||||
if(this.config.vad_type === type)
|
if(this.config.vad_type === type)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
if(["push_to_talk", "threshold", "active"].findIndex(e => e === type) == -1)
|
if(["push_to_talk", "threshold", "active"].findIndex(e => e === type) == -1)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
this.config.vad_type = type;
|
this.config.vad_type = type;
|
||||||
this.reinitialize_filter();
|
this.reinitializeFilter();
|
||||||
this.save();
|
this.save();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -231,7 +272,7 @@ export class RecorderProfile {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
this.config.vad_threshold.threshold = value;
|
this.config.vad_threshold.threshold = value;
|
||||||
this.reinitialize_filter();
|
this.registeredFilter["threshold"]?.setThreshold(this.config.vad_threshold.threshold);
|
||||||
this.save();
|
this.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -240,7 +281,7 @@ export class RecorderProfile {
|
||||||
for(const _key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
for(const _key of ["key_alt", "key_ctrl", "key_shift", "key_windows", "key_code"])
|
||||||
this.config.vad_push_to_talk[_key] = key[_key];
|
this.config.vad_push_to_talk[_key] = key[_key];
|
||||||
|
|
||||||
this.reinitialize_filter();
|
this.reinitializePPTHook();
|
||||||
this.save();
|
this.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -250,25 +291,23 @@ export class RecorderProfile {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
this.config.vad_push_to_talk.delay = value;
|
this.config.vad_push_to_talk.delay = value;
|
||||||
this.reinitialize_filter();
|
|
||||||
this.save();
|
this.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getDeviceId() : string { return this.config.device_id; }
|
||||||
current_device() : InputDevice | undefined { return this.input?.current_device(); }
|
set_device(device: IDevice | undefined) : Promise<void> {
|
||||||
set_device(device: InputDevice | undefined) : Promise<void> {
|
this.config.device_id = device ? device.deviceId : IDevice.NoDeviceId;
|
||||||
this.config.device_id = device ? device.unique_id : undefined;
|
|
||||||
this.save();
|
this.save();
|
||||||
return this.input.set_device(device);
|
return this.input?.setDevice(device) || Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
get_volume() : number { return this.input ? (this.input.get_volume() * 100) : this.config.volume; }
|
get_volume() : number { return this.input ? (this.input.getVolume() * 100) : this.config.volume; }
|
||||||
set_volume(volume: number) {
|
set_volume(volume: number) {
|
||||||
if(this.config.volume === volume)
|
if(this.config.volume === volume)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
this.config.volume = volume;
|
this.config.volume = volume;
|
||||||
this.input && this.input.set_volume(volume / 100);
|
this.input?.setVolume(volume / 100);
|
||||||
this.save();
|
this.save();
|
||||||
}
|
}
|
||||||
}
|
}
|
5
shared/svg-sprites/client-icons.d.ts
vendored
5
shared/svg-sprites/client-icons.d.ts
vendored
File diff suppressed because one or more lines are too long
|
@ -370,16 +370,29 @@ export function replace_processor(config: Configuration, cache: VolatileTransfor
|
||||||
throw new Error(source_location(ignoreAttribute) + ": Invalid attribute value of type " + SyntaxKind[ignoreAttribute.expression.kind]);
|
throw new Error(source_location(ignoreAttribute) + ": Invalid attribute value of type " + SyntaxKind[ignoreAttribute.expression.kind]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(element.children.length !== 1)
|
if(element.children.length < 1) {
|
||||||
throw new Error(source_location(element) + ": Element has been called with an invalid arguments (" + (element.children.length === 0 ? "too few" : "too many") + ")");
|
throw new Error(source_location(element) + ": Element has been called with an invalid arguments (too few)");
|
||||||
|
}
|
||||||
|
|
||||||
const text = element.children[0] as ts.JsxText;
|
let text = element.children.map(element => {
|
||||||
if(text.kind != SyntaxKind.JsxText)
|
if(element.kind === SyntaxKind.JsxText) {
|
||||||
throw new Error(source_location(element) + ": Element has invalid children. Expected JsxText but got " + SyntaxKind[text.kind]);
|
return element.text;
|
||||||
|
} else if(element.kind === SyntaxKind.JsxSelfClosingElement) {
|
||||||
|
if(element.tagName.kind !== SyntaxKind.Identifier) {
|
||||||
|
throw new Error(source_location(element.tagName) + ": Expected a JsxSelfClosingElement, but received " + SyntaxKind[element.tagName.kind]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(element.tagName.escapedText !== "br") {
|
||||||
|
throw new Error(source_location(element.tagName) + ": Expected a br element, but received " + element.tagName.escapedText);
|
||||||
|
}
|
||||||
|
|
||||||
|
return "\n";
|
||||||
|
}
|
||||||
|
}).join("");
|
||||||
|
|
||||||
let { line, character } = source_file.getLineAndCharacterOfPosition(node.getStart());
|
let { line, character } = source_file.getLineAndCharacterOfPosition(node.getStart());
|
||||||
cache.translations.push({
|
cache.translations.push({
|
||||||
message: text.text,
|
message: text,
|
||||||
line: line,
|
line: line,
|
||||||
character: character,
|
character: character,
|
||||||
filename: (source_file || {fileName: "unknown"}).fileName,
|
filename: (source_file || {fileName: "unknown"}).fileName,
|
||||||
|
|
588
web/app/audio/Recorder.ts
Normal file
588
web/app/audio/Recorder.ts
Normal file
|
@ -0,0 +1,588 @@
|
||||||
|
import {AudioRecorderBacked, DeviceList, IDevice,} from "tc-shared/audio/recorder";
|
||||||
|
import {Registry} from "tc-shared/events";
|
||||||
|
import {
|
||||||
|
AbstractInput,
|
||||||
|
InputConsumer,
|
||||||
|
InputConsumerType,
|
||||||
|
InputEvents,
|
||||||
|
InputStartResult,
|
||||||
|
InputState,
|
||||||
|
LevelMeter,
|
||||||
|
NodeInputConsumer
|
||||||
|
} from "tc-shared/voice/RecorderBase";
|
||||||
|
import * as log from "tc-shared/log";
|
||||||
|
import {LogCategory, logDebug, logWarn} from "tc-shared/log";
|
||||||
|
import * as aplayer from "./player";
|
||||||
|
import {JAbstractFilter, JStateFilter, JThresholdFilter} from "./RecorderFilter";
|
||||||
|
import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
|
||||||
|
import {inputDeviceList} from "tc-backend/web/audio/RecorderDeviceList";
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface MediaStream {
|
||||||
|
stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WebIDevice extends IDevice {
|
||||||
|
groupId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function requestMicrophoneMediaStream(constraints: MediaTrackConstraints, updateDeviceList: boolean) : Promise<InputStartResult | MediaStream> {
|
||||||
|
try {
|
||||||
|
log.info(LogCategory.AUDIO, tr("Requesting a microphone stream for device %s in group %s"), constraints.deviceId, constraints.groupId);
|
||||||
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: constraints });
|
||||||
|
|
||||||
|
if(updateDeviceList && inputDeviceList.getStatus() === "no-permissions") {
|
||||||
|
inputDeviceList.refresh().then(() => {}); /* added the then body to avoid a inspection warning... */
|
||||||
|
}
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
} catch(error) {
|
||||||
|
if('name' in error) {
|
||||||
|
if(error.name === "NotAllowedError") {
|
||||||
|
log.warn(LogCategory.AUDIO, tr("Microphone request failed (No permissions). Browser message: %o"), error.message);
|
||||||
|
return InputStartResult.ENOTALLOWED;
|
||||||
|
} else {
|
||||||
|
log.warn(LogCategory.AUDIO, tr("Microphone request failed. Request resulted in error: %o: %o"), error.name, error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.warn(LogCategory.AUDIO, tr("Failed to initialize recording stream (%o)"), error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return InputStartResult.EUNKNOWN;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* request permission for devices only one per time! */
|
||||||
|
let currentMediaStreamRequest: Promise<MediaStream | InputStartResult>;
|
||||||
|
async function requestMediaStream(deviceId: string, groupId: string) : Promise<MediaStream | InputStartResult> {
|
||||||
|
/* wait for the current media stream requests to finish */
|
||||||
|
while(currentMediaStreamRequest) {
|
||||||
|
try {
|
||||||
|
await currentMediaStreamRequest;
|
||||||
|
} catch(error) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
const audioConstrains: MediaTrackConstraints = {};
|
||||||
|
if(window.detectedBrowser?.name === "firefox") {
|
||||||
|
/*
|
||||||
|
* Firefox only allows to open one mic as well deciding whats the input device it.
|
||||||
|
* It does not respect the deviceId nor the groupId
|
||||||
|
*/
|
||||||
|
} else {
|
||||||
|
audioConstrains.deviceId = deviceId;
|
||||||
|
audioConstrains.groupId = groupId;
|
||||||
|
}
|
||||||
|
|
||||||
|
audioConstrains.echoCancellation = true;
|
||||||
|
audioConstrains.autoGainControl = true;
|
||||||
|
audioConstrains.noiseSuppression = true;
|
||||||
|
|
||||||
|
const promise = (currentMediaStreamRequest = requestMicrophoneMediaStream(audioConstrains, true));
|
||||||
|
try {
|
||||||
|
return await currentMediaStreamRequest;
|
||||||
|
} finally {
|
||||||
|
if(currentMediaStreamRequest === promise)
|
||||||
|
currentMediaStreamRequest = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WebAudioRecorder implements AudioRecorderBacked {
|
||||||
|
createInput(): AbstractInput {
|
||||||
|
return new JavascriptInput();
|
||||||
|
}
|
||||||
|
|
||||||
|
async createLevelMeter(device: IDevice): Promise<LevelMeter> {
|
||||||
|
const meter = new JavascriptLevelMeter(device as any);
|
||||||
|
await meter.initialize();
|
||||||
|
return meter;
|
||||||
|
}
|
||||||
|
|
||||||
|
getDeviceList(): DeviceList {
|
||||||
|
return inputDeviceList;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class JavascriptInput implements AbstractInput {
|
||||||
|
public readonly events: Registry<InputEvents>;
|
||||||
|
|
||||||
|
private state: InputState = InputState.PAUSED;
|
||||||
|
private deviceId: string | undefined;
|
||||||
|
private consumer: InputConsumer;
|
||||||
|
|
||||||
|
private currentStream: MediaStream;
|
||||||
|
private currentAudioStream: MediaStreamAudioSourceNode;
|
||||||
|
|
||||||
|
private audioContext: AudioContext;
|
||||||
|
private sourceNode: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
|
||||||
|
private audioNodeCallbackConsumer: ScriptProcessorNode;
|
||||||
|
private readonly audioScriptProcessorCallback;
|
||||||
|
private audioNodeVolume: GainNode;
|
||||||
|
|
||||||
|
/* The node is connected to the audio context. Used for the script processor so it has a sink */
|
||||||
|
private audioNodeMute: GainNode;
|
||||||
|
|
||||||
|
private registeredFilters: (Filter & JAbstractFilter<AudioNode>)[] = [];
|
||||||
|
private inputFiltered: boolean = false;
|
||||||
|
|
||||||
|
private startPromise: Promise<InputStartResult>;
|
||||||
|
|
||||||
|
private volumeModifier: number = 1;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.events = new Registry<InputEvents>();
|
||||||
|
|
||||||
|
aplayer.on_ready(() => this.handleAudioInitialized());
|
||||||
|
this.audioScriptProcessorCallback = this.handleAudio.bind(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleAudioInitialized() {
|
||||||
|
this.audioContext = aplayer.context();
|
||||||
|
this.audioNodeMute = this.audioContext.createGain();
|
||||||
|
this.audioNodeMute.gain.value = 0;
|
||||||
|
this.audioNodeMute.connect(this.audioContext.destination);
|
||||||
|
|
||||||
|
this.audioNodeCallbackConsumer = this.audioContext.createScriptProcessor(1024 * 4);
|
||||||
|
this.audioNodeCallbackConsumer.connect(this.audioNodeMute);
|
||||||
|
|
||||||
|
this.audioNodeVolume = this.audioContext.createGain();
|
||||||
|
this.audioNodeVolume.gain.value = this.volumeModifier;
|
||||||
|
|
||||||
|
this.initializeFilters();
|
||||||
|
if(this.state === InputState.INITIALIZING) {
|
||||||
|
this.start().catch(error => {
|
||||||
|
logWarn(LogCategory.AUDIO, tr("Failed to automatically start audio recording: %s"), error);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeFilters() {
|
||||||
|
this.registeredFilters.forEach(e => e.finalize());
|
||||||
|
this.registeredFilters.sort((a, b) => a.priority - b.priority);
|
||||||
|
|
||||||
|
if(this.audioContext && this.audioNodeVolume) {
|
||||||
|
const activeFilters = this.registeredFilters.filter(e => e.isEnabled());
|
||||||
|
|
||||||
|
let chain = "output <- ";
|
||||||
|
let currentSource: AudioNode = this.audioNodeVolume;
|
||||||
|
for(const f of activeFilters) {
|
||||||
|
f.initialize(this.audioContext, currentSource);
|
||||||
|
f.setPaused(false);
|
||||||
|
|
||||||
|
currentSource = f.audioNode;
|
||||||
|
chain += FilterType[f.type] + " <- ";
|
||||||
|
}
|
||||||
|
chain += "input";
|
||||||
|
logDebug(LogCategory.AUDIO, tr("Input filter chain: %s"), chain);
|
||||||
|
|
||||||
|
this.switchSourceNode(currentSource);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleAudio(event: AudioProcessingEvent) {
|
||||||
|
if(this.consumer?.type !== InputConsumerType.CALLBACK) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.consumer.callback_audio) {
|
||||||
|
this.consumer.callback_audio(event.inputBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.consumer.callback_buffer) {
|
||||||
|
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async start() : Promise<InputStartResult> {
|
||||||
|
while(this.startPromise) {
|
||||||
|
try {
|
||||||
|
await this.startPromise;
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.state != InputState.PAUSED)
|
||||||
|
return;
|
||||||
|
|
||||||
|
return await (this.startPromise = this.doStart());
|
||||||
|
}
|
||||||
|
|
||||||
|
private async doStart() : Promise<InputStartResult> {
|
||||||
|
try {
|
||||||
|
if(this.state != InputState.PAUSED)
|
||||||
|
throw tr("recorder already started");
|
||||||
|
|
||||||
|
this.state = InputState.INITIALIZING;
|
||||||
|
if(!this.deviceId) {
|
||||||
|
throw tr("invalid device");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!this.audioContext) {
|
||||||
|
/* Awaiting the audio context to be initialized */
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestResult = await requestMediaStream(this.deviceId, undefined);
|
||||||
|
if(!(requestResult instanceof MediaStream)) {
|
||||||
|
this.state = InputState.PAUSED;
|
||||||
|
return requestResult;
|
||||||
|
}
|
||||||
|
this.currentStream = requestResult;
|
||||||
|
|
||||||
|
for(const filter of this.registeredFilters) {
|
||||||
|
if(filter.isEnabled()) {
|
||||||
|
filter.setPaused(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* TODO: Only add if we're really having a callback consumer */
|
||||||
|
this.audioNodeCallbackConsumer.addEventListener('audioprocess', this.audioScriptProcessorCallback);
|
||||||
|
|
||||||
|
this.currentAudioStream = this.audioContext.createMediaStreamSource(this.currentStream);
|
||||||
|
this.currentAudioStream.connect(this.audioNodeVolume);
|
||||||
|
|
||||||
|
this.state = InputState.RECORDING;
|
||||||
|
this.recalculateFilterStatus(true);
|
||||||
|
|
||||||
|
return InputStartResult.EOK;
|
||||||
|
} catch(error) {
|
||||||
|
if(this.state == InputState.INITIALIZING) {
|
||||||
|
this.state = InputState.PAUSED;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
this.startPromise = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async stop() {
|
||||||
|
/* await the start */
|
||||||
|
if(this.startPromise) {
|
||||||
|
try {
|
||||||
|
await this.startPromise;
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.state = InputState.PAUSED;
|
||||||
|
if(this.currentAudioStream) {
|
||||||
|
this.currentAudioStream.disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.currentStream) {
|
||||||
|
if(this.currentStream.stop) {
|
||||||
|
this.currentStream.stop();
|
||||||
|
} else {
|
||||||
|
this.currentStream.getTracks().forEach(value => {
|
||||||
|
value.stop();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentStream = undefined;
|
||||||
|
this.currentAudioStream = undefined;
|
||||||
|
for(const f of this.registeredFilters) {
|
||||||
|
if(f.isEnabled()) {
|
||||||
|
f.setPaused(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this.audioNodeCallbackConsumer) {
|
||||||
|
this.audioNodeCallbackConsumer.removeEventListener('audioprocess', this.audioScriptProcessorCallback);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async setDeviceId(deviceId: string | undefined) {
|
||||||
|
if(this.deviceId === deviceId)
|
||||||
|
return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.stop();
|
||||||
|
} catch(error) {
|
||||||
|
log.warn(LogCategory.AUDIO, tr("Failed to stop previous record session (%o)"), error);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.deviceId = deviceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
createFilter<T extends FilterType>(type: T, priority: number): FilterTypeClass<T> {
|
||||||
|
let filter: JAbstractFilter<AudioNode> & Filter;
|
||||||
|
switch (type) {
|
||||||
|
case FilterType.STATE:
|
||||||
|
filter = new JStateFilter(priority);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case FilterType.THRESHOLD:
|
||||||
|
filter = new JThresholdFilter(priority);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case FilterType.VOICE_LEVEL:
|
||||||
|
throw tr("voice filter isn't supported!");
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw tr("unknown filter type");
|
||||||
|
}
|
||||||
|
|
||||||
|
filter.callback_active_change = () => this.recalculateFilterStatus(false);
|
||||||
|
filter.callback_enabled_change = () => this.initializeFilters();
|
||||||
|
|
||||||
|
this.registeredFilters.push(filter);
|
||||||
|
this.initializeFilters();
|
||||||
|
this.recalculateFilterStatus(false);
|
||||||
|
return filter as any;
|
||||||
|
}
|
||||||
|
|
||||||
|
supportsFilter(type: FilterType): boolean {
|
||||||
|
switch (type) {
|
||||||
|
case FilterType.THRESHOLD:
|
||||||
|
case FilterType.STATE:
|
||||||
|
return true;
|
||||||
|
default:
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resetFilter() {
|
||||||
|
for(const filter of this.registeredFilters) {
|
||||||
|
filter.finalize();
|
||||||
|
filter.enabled = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.registeredFilters = [];
|
||||||
|
this.initializeFilters();
|
||||||
|
this.recalculateFilterStatus(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
removeFilter(filterInstance: Filter) {
|
||||||
|
const index = this.registeredFilters.indexOf(filterInstance as any);
|
||||||
|
if(index === -1) return;
|
||||||
|
|
||||||
|
const [ filter ] = this.registeredFilters.splice(index, 1);
|
||||||
|
filter.finalize();
|
||||||
|
filter.enabled = false;
|
||||||
|
|
||||||
|
this.initializeFilters();
|
||||||
|
this.recalculateFilterStatus(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private recalculateFilterStatus(forceUpdate: boolean) {
|
||||||
|
let filtered = this.registeredFilters.filter(e => e.isEnabled()).filter(e => e.active).length > 0;
|
||||||
|
if(filtered === this.inputFiltered && !forceUpdate)
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.inputFiltered = filtered;
|
||||||
|
if(filtered) {
|
||||||
|
this.events.fire("notify_voice_end");
|
||||||
|
} else {
|
||||||
|
this.events.fire("notify_voice_start");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isRecording(): boolean {
|
||||||
|
return !this.inputFiltered;
|
||||||
|
}
|
||||||
|
|
||||||
|
async setConsumer(consumer: InputConsumer) {
|
||||||
|
if(this.consumer) {
|
||||||
|
if(this.consumer.type == InputConsumerType.NODE) {
|
||||||
|
if(this.sourceNode)
|
||||||
|
(this.consumer as NodeInputConsumer).callback_disconnect(this.sourceNode)
|
||||||
|
} else if(this.consumer.type === InputConsumerType.CALLBACK) {
|
||||||
|
if(this.sourceNode)
|
||||||
|
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(consumer) {
|
||||||
|
if(consumer.type == InputConsumerType.CALLBACK) {
|
||||||
|
if(this.sourceNode)
|
||||||
|
this.sourceNode.connect(this.audioNodeCallbackConsumer);
|
||||||
|
} else if(consumer.type == InputConsumerType.NODE) {
|
||||||
|
if(this.sourceNode)
|
||||||
|
(consumer as NodeInputConsumer).callback_node(this.sourceNode);
|
||||||
|
} else {
|
||||||
|
throw "native callback consumers are not supported!";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.consumer = consumer;
|
||||||
|
}
|
||||||
|
|
||||||
|
private switchSourceNode(newNode: AudioNode) {
|
||||||
|
if(this.consumer) {
|
||||||
|
if(this.consumer.type == InputConsumerType.NODE) {
|
||||||
|
const node_consumer = this.consumer as NodeInputConsumer;
|
||||||
|
if(this.sourceNode) {
|
||||||
|
node_consumer.callback_disconnect(this.sourceNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(newNode) {
|
||||||
|
node_consumer.callback_node(newNode);
|
||||||
|
}
|
||||||
|
} else if(this.consumer.type == InputConsumerType.CALLBACK) {
|
||||||
|
this.sourceNode.disconnect(this.audioNodeCallbackConsumer);
|
||||||
|
if(newNode) {
|
||||||
|
newNode.connect(this.audioNodeCallbackConsumer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.sourceNode = newNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentConsumer(): InputConsumer | undefined {
|
||||||
|
return this.consumer;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentDeviceId(): string | undefined {
|
||||||
|
return this.deviceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentState(): InputState {
|
||||||
|
return this.state;
|
||||||
|
}
|
||||||
|
|
||||||
|
getVolume(): number {
|
||||||
|
return this.volumeModifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
setVolume(volume: number) {
|
||||||
|
if(volume === this.volumeModifier)
|
||||||
|
return;
|
||||||
|
this.volumeModifier = volume;
|
||||||
|
this.audioNodeVolume.gain.value = volume;
|
||||||
|
}
|
||||||
|
|
||||||
|
isFiltered(): boolean {
|
||||||
|
return this.state === InputState.RECORDING ? this.inputFiltered : true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class JavascriptLevelMeter implements LevelMeter {
|
||||||
|
private static meterInstances: JavascriptLevelMeter[] = [];
|
||||||
|
private static meterUpdateTask: number;
|
||||||
|
|
||||||
|
readonly _device: WebIDevice;
|
||||||
|
|
||||||
|
private _callback: (num: number) => any;
|
||||||
|
|
||||||
|
private _context: AudioContext;
|
||||||
|
private _gain_node: GainNode;
|
||||||
|
private _source_node: MediaStreamAudioSourceNode;
|
||||||
|
private _analyser_node: AnalyserNode;
|
||||||
|
|
||||||
|
private _media_stream: MediaStream;
|
||||||
|
|
||||||
|
private _analyse_buffer: Uint8Array;
|
||||||
|
|
||||||
|
private _current_level = 0;
|
||||||
|
|
||||||
|
constructor(device: WebIDevice) {
|
||||||
|
this._device = device;
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const timeout = setTimeout(reject, 5000);
|
||||||
|
aplayer.on_ready(() => {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch(error) {
|
||||||
|
throw tr("audio context timeout");
|
||||||
|
}
|
||||||
|
this._context = aplayer.context();
|
||||||
|
if(!this._context) throw tr("invalid context");
|
||||||
|
|
||||||
|
this._gain_node = this._context.createGain();
|
||||||
|
this._gain_node.gain.setValueAtTime(0, 0);
|
||||||
|
|
||||||
|
/* analyser node */
|
||||||
|
this._analyser_node = this._context.createAnalyser();
|
||||||
|
|
||||||
|
const optimal_ftt_size = Math.ceil(this._context.sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
||||||
|
this._analyser_node.fftSize = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||||
|
|
||||||
|
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser_node.fftSize)
|
||||||
|
this._analyse_buffer = new Uint8Array(this._analyser_node.fftSize);
|
||||||
|
|
||||||
|
/* starting stream */
|
||||||
|
const _result = await requestMediaStream(this._device.deviceId, this._device.groupId);
|
||||||
|
if(!(_result instanceof MediaStream)){
|
||||||
|
if(_result === InputStartResult.ENOTALLOWED)
|
||||||
|
throw tr("No permissions");
|
||||||
|
if(_result === InputStartResult.ENOTSUPPORTED)
|
||||||
|
throw tr("Not supported");
|
||||||
|
if(_result === InputStartResult.EBUSY)
|
||||||
|
throw tr("Device busy");
|
||||||
|
if(_result === InputStartResult.EUNKNOWN)
|
||||||
|
throw tr("an error occurred");
|
||||||
|
throw _result;
|
||||||
|
}
|
||||||
|
this._media_stream = _result;
|
||||||
|
|
||||||
|
this._source_node = this._context.createMediaStreamSource(this._media_stream);
|
||||||
|
this._source_node.connect(this._analyser_node);
|
||||||
|
this._analyser_node.connect(this._gain_node);
|
||||||
|
this._gain_node.connect(this._context.destination);
|
||||||
|
|
||||||
|
JavascriptLevelMeter.meterInstances.push(this);
|
||||||
|
if(JavascriptLevelMeter.meterInstances.length == 1) {
|
||||||
|
clearInterval(JavascriptLevelMeter.meterUpdateTask);
|
||||||
|
JavascriptLevelMeter.meterUpdateTask = setInterval(() => JavascriptLevelMeter._analyse_all(), JThresholdFilter.update_task_interval) as any;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy() {
|
||||||
|
JavascriptLevelMeter.meterInstances.remove(this);
|
||||||
|
if(JavascriptLevelMeter.meterInstances.length == 0) {
|
||||||
|
clearInterval(JavascriptLevelMeter.meterUpdateTask);
|
||||||
|
JavascriptLevelMeter.meterUpdateTask = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this._source_node) {
|
||||||
|
this._source_node.disconnect();
|
||||||
|
this._source_node = undefined;
|
||||||
|
}
|
||||||
|
if(this._media_stream) {
|
||||||
|
if(this._media_stream.stop)
|
||||||
|
this._media_stream.stop();
|
||||||
|
else
|
||||||
|
this._media_stream.getTracks().forEach(value => {
|
||||||
|
value.stop();
|
||||||
|
});
|
||||||
|
this._media_stream = undefined;
|
||||||
|
}
|
||||||
|
if(this._gain_node) {
|
||||||
|
this._gain_node.disconnect();
|
||||||
|
this._gain_node = undefined;
|
||||||
|
}
|
||||||
|
if(this._analyser_node) {
|
||||||
|
this._analyser_node.disconnect();
|
||||||
|
this._analyser_node = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
device(): IDevice {
|
||||||
|
return this._device;
|
||||||
|
}
|
||||||
|
|
||||||
|
set_observer(callback: (value: number) => any) {
|
||||||
|
this._callback = callback;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static _analyse_all() {
|
||||||
|
for(const instance of [...this.meterInstances])
|
||||||
|
instance._analyse();
|
||||||
|
}
|
||||||
|
|
||||||
|
private _analyse() {
|
||||||
|
this._analyser_node.getByteTimeDomainData(this._analyse_buffer);
|
||||||
|
|
||||||
|
this._current_level = JThresholdFilter.calculateAudioLevel(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
|
||||||
|
if(this._callback)
|
||||||
|
this._callback(this._current_level);
|
||||||
|
}
|
||||||
|
}
|
190
web/app/audio/RecorderDeviceList.ts
Normal file
190
web/app/audio/RecorderDeviceList.ts
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
import {
|
||||||
|
AbstractDeviceList,
|
||||||
|
DeviceListEvents,
|
||||||
|
DeviceListState,
|
||||||
|
IDevice,
|
||||||
|
PermissionState
|
||||||
|
} from "tc-shared/audio/recorder";
|
||||||
|
import * as log from "tc-shared/log";
|
||||||
|
import {LogCategory, logWarn} from "tc-shared/log";
|
||||||
|
import {Registry} from "tc-shared/events";
|
||||||
|
import {WebIDevice} from "tc-backend/web/audio/Recorder";
|
||||||
|
import * as loader from "tc-loader";
|
||||||
|
|
||||||
|
async function requestMicrophonePermissions() : Promise<PermissionState> {
|
||||||
|
const begin = Date.now();
|
||||||
|
try {
|
||||||
|
await navigator.mediaDevices.getUserMedia({ audio: { deviceId: "default" }, video: false });
|
||||||
|
return "granted";
|
||||||
|
} catch (error) {
|
||||||
|
const end = Date.now();
|
||||||
|
const isSystem = (end - begin) < 250;
|
||||||
|
log.debug(LogCategory.AUDIO, tr("Microphone device request took %d milliseconds. System answered: %s"), end - begin, isSystem);
|
||||||
|
return "denied";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export let inputDeviceList: WebInputDeviceList;
|
||||||
|
class WebInputDeviceList extends AbstractDeviceList {
|
||||||
|
private devices: WebIDevice[];
|
||||||
|
|
||||||
|
private deviceListQueryPromise: Promise<void>;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
this.devices = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
if('permissions' in navigator && 'query' in navigator.permissions) {
|
||||||
|
try {
|
||||||
|
const result = await navigator.permissions.query({ name: "microphone" });
|
||||||
|
switch (result.state) {
|
||||||
|
case "denied":
|
||||||
|
this.setPermissionState("denied");
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "granted":
|
||||||
|
this.setPermissionState("granted");
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logWarn(LogCategory.GENERAL, tr("Failed to query for microphone permissions: %s"), error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getDefaultDeviceId(): string {
|
||||||
|
return "default";
|
||||||
|
}
|
||||||
|
|
||||||
|
getDevices(): IDevice[] {
|
||||||
|
return this.devices;
|
||||||
|
}
|
||||||
|
|
||||||
|
getEvents(): Registry<DeviceListEvents> {
|
||||||
|
return this.events;
|
||||||
|
}
|
||||||
|
|
||||||
|
getStatus(): DeviceListState {
|
||||||
|
return this.listState;
|
||||||
|
}
|
||||||
|
|
||||||
|
isRefreshAvailable(): boolean {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
refresh(askPermissions?: boolean): Promise<void> {
|
||||||
|
return this.queryDevices(askPermissions === true);
|
||||||
|
}
|
||||||
|
|
||||||
|
async requestPermissions(): Promise<PermissionState> {
|
||||||
|
if(this.permissionState !== "unknown")
|
||||||
|
return this.permissionState;
|
||||||
|
|
||||||
|
let result = await requestMicrophonePermissions();
|
||||||
|
if(result === "granted" && this.listState === "no-permissions") {
|
||||||
|
/* if called within doQueryDevices, queryDevices will just return the promise */
|
||||||
|
this.queryDevices(false).then(() => {});
|
||||||
|
}
|
||||||
|
this.setPermissionState(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private queryDevices(askPermissions: boolean) : Promise<void> {
|
||||||
|
if(this.deviceListQueryPromise) {
|
||||||
|
return this.deviceListQueryPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.deviceListQueryPromise = this.doQueryDevices(askPermissions).catch(error => {
|
||||||
|
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
|
||||||
|
|
||||||
|
if(this.listState !== "healthy") {
|
||||||
|
this.setState("error");
|
||||||
|
}
|
||||||
|
}).then(() => {
|
||||||
|
this.deviceListQueryPromise = undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.deviceListQueryPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async doQueryDevices(askPermissions: boolean) {
|
||||||
|
let devices = await navigator.mediaDevices.enumerateDevices();
|
||||||
|
let hasPermissions = devices.findIndex(e => e.label !== "") !== -1;
|
||||||
|
|
||||||
|
if(!hasPermissions && askPermissions) {
|
||||||
|
this.setState("no-permissions");
|
||||||
|
|
||||||
|
/* request permissions */
|
||||||
|
hasPermissions = await this.requestPermissions() === "granted";
|
||||||
|
if(hasPermissions) {
|
||||||
|
devices = await navigator.mediaDevices.enumerateDevices();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(hasPermissions) {
|
||||||
|
this.setPermissionState("granted");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(window.detectedBrowser?.name === "firefox") {
|
||||||
|
devices = [{
|
||||||
|
label: tr("Default Firefox device"),
|
||||||
|
groupId: "default",
|
||||||
|
deviceId: "default",
|
||||||
|
kind: "audioinput",
|
||||||
|
|
||||||
|
toJSON: undefined
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
|
const inputDevices = devices.filter(e => e.kind === "audioinput");
|
||||||
|
|
||||||
|
const oldDeviceList = this.devices;
|
||||||
|
this.devices = [];
|
||||||
|
|
||||||
|
let devicesAdded = 0;
|
||||||
|
for(const device of inputDevices) {
|
||||||
|
const oldIndex = oldDeviceList.findIndex(e => e.deviceId === device.deviceId);
|
||||||
|
if(oldIndex === -1) {
|
||||||
|
devicesAdded++;
|
||||||
|
} else {
|
||||||
|
oldDeviceList.splice(oldIndex, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.devices.push({
|
||||||
|
deviceId: device.deviceId,
|
||||||
|
driver: "WebAudio",
|
||||||
|
groupId: device.groupId,
|
||||||
|
name: device.label
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
this.events.fire("notify_list_updated", { addedDeviceCount: devicesAdded, removedDeviceCount: oldDeviceList.length });
|
||||||
|
if(hasPermissions) {
|
||||||
|
this.setState("healthy");
|
||||||
|
} else {
|
||||||
|
this.setState("no-permissions");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
||||||
|
function: async () => {
|
||||||
|
inputDeviceList = new WebInputDeviceList();
|
||||||
|
await inputDeviceList.initialize();
|
||||||
|
},
|
||||||
|
priority: 80,
|
||||||
|
name: "initialize media devices"
|
||||||
|
});
|
||||||
|
|
||||||
|
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
||||||
|
function: async () => {
|
||||||
|
inputDeviceList.refresh(false).then(() => {});
|
||||||
|
},
|
||||||
|
priority: 10,
|
||||||
|
name: "query media devices"
|
||||||
|
});
|
280
web/app/audio/RecorderFilter.ts
Normal file
280
web/app/audio/RecorderFilter.ts
Normal file
|
@ -0,0 +1,280 @@
|
||||||
|
import {FilterType, StateFilter, ThresholdFilter} from "tc-shared/voice/Filter";
|
||||||
|
|
||||||
|
export abstract class JAbstractFilter<NodeType extends AudioNode> {
|
||||||
|
readonly priority: number;
|
||||||
|
|
||||||
|
source_node: AudioNode;
|
||||||
|
audioNode: NodeType;
|
||||||
|
|
||||||
|
context: AudioContext;
|
||||||
|
enabled: boolean = false;
|
||||||
|
|
||||||
|
active: boolean = false; /* if true the filter filters! */
|
||||||
|
|
||||||
|
callback_active_change: (new_state: boolean) => any;
|
||||||
|
callback_enabled_change: () => any;
|
||||||
|
|
||||||
|
paused: boolean = true;
|
||||||
|
|
||||||
|
constructor(priority: number) {
|
||||||
|
this.priority = priority;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Attention: After initialized, paused is the default state */
|
||||||
|
abstract initialize(context: AudioContext, sourceNode: AudioNode);
|
||||||
|
abstract finalize();
|
||||||
|
|
||||||
|
/* whatever the input has been paused and we don't expect any input */
|
||||||
|
abstract setPaused(flag: boolean);
|
||||||
|
abstract isPaused() : boolean;
|
||||||
|
|
||||||
|
isEnabled(): boolean {
|
||||||
|
return this.enabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
setEnabled(flag: boolean) {
|
||||||
|
this.enabled = flag;
|
||||||
|
|
||||||
|
if(this.callback_enabled_change) {
|
||||||
|
this.callback_enabled_change();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class JThresholdFilter extends JAbstractFilter<GainNode> implements ThresholdFilter {
|
||||||
|
public static update_task_interval = 20; /* 20ms */
|
||||||
|
|
||||||
|
readonly type = FilterType.THRESHOLD;
|
||||||
|
|
||||||
|
private threshold = 50;
|
||||||
|
|
||||||
|
private analyzeTask: any;
|
||||||
|
private audioAnalyserNode: AnalyserNode;
|
||||||
|
private analyseBuffer: Uint8Array;
|
||||||
|
|
||||||
|
private silenceCount = 0;
|
||||||
|
private marginFrames = 5;
|
||||||
|
|
||||||
|
private currentLevel = 0;
|
||||||
|
private smoothRelease = 0;
|
||||||
|
private smoothAttack = 0;
|
||||||
|
|
||||||
|
private levelCallbacks: ((level: number) => void)[] = [];
|
||||||
|
|
||||||
|
finalize() {
|
||||||
|
this.paused = true;
|
||||||
|
this.shutdownAnalyzer();
|
||||||
|
|
||||||
|
if(this.source_node) {
|
||||||
|
try { this.source_node.disconnect(this.audioAnalyserNode) } catch (error) {}
|
||||||
|
try { this.source_node.disconnect(this.audioNode) } catch (error) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.audioAnalyserNode = undefined;
|
||||||
|
this.source_node = undefined;
|
||||||
|
this.audioNode = undefined;
|
||||||
|
this.context = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
initialize(context: AudioContext, source_node: AudioNode) {
|
||||||
|
this.paused = true;
|
||||||
|
|
||||||
|
this.context = context;
|
||||||
|
this.source_node = source_node;
|
||||||
|
|
||||||
|
this.audioNode = context.createGain();
|
||||||
|
this.audioAnalyserNode = context.createAnalyser();
|
||||||
|
|
||||||
|
const optimal_ftt_size = Math.ceil((source_node.context || context).sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
||||||
|
const base2_ftt = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
||||||
|
this.audioAnalyserNode.fftSize = base2_ftt;
|
||||||
|
|
||||||
|
if(!this.analyseBuffer || this.analyseBuffer.length < this.audioAnalyserNode.fftSize)
|
||||||
|
this.analyseBuffer = new Uint8Array(this.audioAnalyserNode.fftSize);
|
||||||
|
|
||||||
|
this.active = false;
|
||||||
|
this.audioNode.gain.value = 0; /* silence by default */
|
||||||
|
|
||||||
|
this.source_node.connect(this.audioNode);
|
||||||
|
this.source_node.connect(this.audioAnalyserNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
getMarginFrames(): number { return this.marginFrames; }
|
||||||
|
setMarginFrames(value: number) {
|
||||||
|
this.marginFrames = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
getAttackSmooth(): number {
|
||||||
|
return this.smoothAttack;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReleaseSmooth(): number {
|
||||||
|
return this.smoothRelease;
|
||||||
|
}
|
||||||
|
|
||||||
|
setAttackSmooth(value: number) {
|
||||||
|
this.smoothAttack = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
setReleaseSmooth(value: number) {
|
||||||
|
this.smoothRelease = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
getThreshold(): number {
|
||||||
|
return this.threshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
setThreshold(value: number) {
|
||||||
|
this.threshold = value;
|
||||||
|
this.updateGainNode(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static calculateAudioLevel(buffer: Uint8Array, fttSize: number, previous: number, smooth: number) : number {
|
||||||
|
let level;
|
||||||
|
{
|
||||||
|
let total = 0, float, rms;
|
||||||
|
|
||||||
|
for(let index = 0; index < fttSize; index++) {
|
||||||
|
float = ( buffer[index++] / 0x7f ) - 1;
|
||||||
|
total += (float * float);
|
||||||
|
}
|
||||||
|
rms = Math.sqrt(total / fttSize);
|
||||||
|
let db = 20 * ( Math.log(rms) / Math.log(10) );
|
||||||
|
// sanity check
|
||||||
|
|
||||||
|
db = Math.max(-192, Math.min(db, 0));
|
||||||
|
level = 100 + ( db * 1.92 );
|
||||||
|
}
|
||||||
|
|
||||||
|
return previous * smooth + level * (1 - smooth);
|
||||||
|
}
|
||||||
|
|
||||||
|
private analyzeAnalyseBuffer() {
|
||||||
|
if(!this.audioNode || !this.audioAnalyserNode)
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.audioAnalyserNode.getByteTimeDomainData(this.analyseBuffer);
|
||||||
|
|
||||||
|
let smooth;
|
||||||
|
if(this.silenceCount == 0)
|
||||||
|
smooth = this.smoothRelease;
|
||||||
|
else
|
||||||
|
smooth = this.smoothAttack;
|
||||||
|
|
||||||
|
this.currentLevel = JThresholdFilter.calculateAudioLevel(this.analyseBuffer, this.audioAnalyserNode.fftSize, this.currentLevel, smooth);
|
||||||
|
|
||||||
|
this.updateGainNode(true);
|
||||||
|
for(const callback of this.levelCallbacks)
|
||||||
|
callback(this.currentLevel);
|
||||||
|
}
|
||||||
|
|
||||||
|
private updateGainNode(increaseSilenceCount: boolean) {
|
||||||
|
let state;
|
||||||
|
if(this.currentLevel > this.threshold) {
|
||||||
|
this.silenceCount = 0;
|
||||||
|
state = true;
|
||||||
|
} else {
|
||||||
|
state = this.silenceCount < this.marginFrames;
|
||||||
|
if(increaseSilenceCount)
|
||||||
|
this.silenceCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(state) {
|
||||||
|
this.audioNode.gain.value = 1;
|
||||||
|
if(this.active) {
|
||||||
|
this.active = false;
|
||||||
|
this.callback_active_change(false);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.audioNode.gain.value = 0;
|
||||||
|
if(!this.active) {
|
||||||
|
this.active = true;
|
||||||
|
this.callback_active_change(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isPaused(): boolean {
|
||||||
|
return this.paused;
|
||||||
|
}
|
||||||
|
|
||||||
|
setPaused(flag: boolean) {
|
||||||
|
if(flag === this.paused) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.paused = flag;
|
||||||
|
this.initializeAnalyzer();
|
||||||
|
}
|
||||||
|
|
||||||
|
registerLevelCallback(callback: (value: number) => void) {
|
||||||
|
this.levelCallbacks.push(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
removeLevelCallback(callback: (value: number) => void) {
|
||||||
|
this.levelCallbacks.remove(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeAnalyzer() {
|
||||||
|
if(this.analyzeTask) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* by default we're consuming the input */
|
||||||
|
this.active = true;
|
||||||
|
this.audioNode.gain.value = 0;
|
||||||
|
|
||||||
|
this.analyzeTask = setInterval(() => this.analyzeAnalyseBuffer(), JThresholdFilter.update_task_interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
private shutdownAnalyzer() {
|
||||||
|
clearInterval(this.analyzeTask);
|
||||||
|
this.analyzeTask = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class JStateFilter extends JAbstractFilter<GainNode> implements StateFilter {
|
||||||
|
public readonly type = FilterType.STATE;
|
||||||
|
|
||||||
|
finalize() {
|
||||||
|
if(this.source_node) {
|
||||||
|
try { this.source_node.disconnect(this.audioNode) } catch (error) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.source_node = undefined;
|
||||||
|
this.audioNode = undefined;
|
||||||
|
this.context = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
initialize(context: AudioContext, source_node: AudioNode) {
|
||||||
|
this.context = context;
|
||||||
|
this.source_node = source_node;
|
||||||
|
|
||||||
|
this.audioNode = context.createGain();
|
||||||
|
this.audioNode.gain.value = this.active ? 0 : 1;
|
||||||
|
|
||||||
|
this.source_node.connect(this.audioNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
isActive(): boolean {
|
||||||
|
return this.active;
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: boolean) {
|
||||||
|
if(this.active === state)
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.active = state;
|
||||||
|
if(this.audioNode)
|
||||||
|
this.audioNode.gain.value = state ? 0 : 1;
|
||||||
|
this.callback_active_change(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
isPaused(): boolean {
|
||||||
|
return this.paused;
|
||||||
|
}
|
||||||
|
|
||||||
|
setPaused(flag: boolean) {
|
||||||
|
this.paused = flag;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,25 +1,3 @@
|
||||||
/*
|
|
||||||
import {Device} from "tc-shared/audio/player";
|
|
||||||
|
|
||||||
export function initialize() : boolean;
|
|
||||||
export function initialized() : boolean;
|
|
||||||
|
|
||||||
export function context() : AudioContext;
|
|
||||||
export function get_master_volume() : number;
|
|
||||||
export function set_master_volume(volume: number);
|
|
||||||
|
|
||||||
export function destination() : AudioNode;
|
|
||||||
|
|
||||||
export function on_ready(cb: () => any);
|
|
||||||
|
|
||||||
export function available_devices() : Promise<Device[]>;
|
|
||||||
export function set_device(device_id: string) : Promise<void>;
|
|
||||||
|
|
||||||
export function current_device() : Device;
|
|
||||||
|
|
||||||
export function initializeFromGesture();
|
|
||||||
*/
|
|
||||||
|
|
||||||
import {Device} from "tc-shared/audio/player";
|
import {Device} from "tc-shared/audio/player";
|
||||||
import * as log from "tc-shared/log";
|
import * as log from "tc-shared/log";
|
||||||
import {LogCategory} from "tc-shared/log";
|
import {LogCategory} from "tc-shared/log";
|
||||||
|
@ -52,6 +30,10 @@ function fire_initialized() {
|
||||||
|
|
||||||
function createNewContext() {
|
function createNewContext() {
|
||||||
audioContextInstance = new (window.webkitAudioContext || window.AudioContext)();
|
audioContextInstance = new (window.webkitAudioContext || window.AudioContext)();
|
||||||
|
audioContextInstance.onstatechange = () => {
|
||||||
|
if(audioContextInstance.state === "running")
|
||||||
|
fire_initialized();
|
||||||
|
};
|
||||||
|
|
||||||
audioContextInitializeCallbacks.unshift(() => {
|
audioContextInitializeCallbacks.unshift(() => {
|
||||||
globalAudioGainInstance = audioContextInstance.createGain();
|
globalAudioGainInstance = audioContextInstance.createGain();
|
||||||
|
@ -128,9 +110,7 @@ export function current_device() : Device {
|
||||||
export function initializeFromGesture() {
|
export function initializeFromGesture() {
|
||||||
if(audioContextInstance) {
|
if(audioContextInstance) {
|
||||||
if(audioContextInstance.state !== "running") {
|
if(audioContextInstance.state !== "running") {
|
||||||
audioContextInstance.resume().then(() => {
|
audioContextInstance.resume().catch(error => {
|
||||||
fire_initialized();
|
|
||||||
}).catch(error => {
|
|
||||||
log.error(LogCategory.AUDIO, tr("Failed to initialize audio context instance from gesture: %o"), error);
|
log.error(LogCategory.AUDIO, tr("Failed to initialize audio context instance from gesture: %o"), error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,877 +0,0 @@
|
||||||
import {
|
|
||||||
AbstractInput, CallbackInputConsumer,
|
|
||||||
InputConsumer,
|
|
||||||
InputConsumerType,
|
|
||||||
InputDevice, InputStartResult,
|
|
||||||
InputState,
|
|
||||||
LevelMeter, NodeInputConsumer
|
|
||||||
} from "tc-shared/voice/RecorderBase";
|
|
||||||
import * as log from "tc-shared/log";
|
|
||||||
import * as loader from "tc-loader";
|
|
||||||
import {LogCategory} from "tc-shared/log";
|
|
||||||
import * as aplayer from "./player";
|
|
||||||
import * as rbase from "tc-shared/voice/RecorderBase";
|
|
||||||
|
|
||||||
declare global {
|
|
||||||
interface MediaStream {
|
|
||||||
stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let _queried_devices: JavascriptInputDevice[];
|
|
||||||
let _queried_permissioned: boolean = false;
|
|
||||||
|
|
||||||
export interface JavascriptInputDevice extends InputDevice {
|
|
||||||
device_id: string;
|
|
||||||
group_id: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getUserMediaFunctionPromise() : (constraints: MediaStreamConstraints) => Promise<MediaStream> {
|
|
||||||
if('mediaDevices' in navigator && 'getUserMedia' in navigator.mediaDevices)
|
|
||||||
return constraints => navigator.mediaDevices.getUserMedia(constraints);
|
|
||||||
|
|
||||||
const _callbacked_function = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
|
|
||||||
if(!_callbacked_function)
|
|
||||||
return undefined;
|
|
||||||
|
|
||||||
return constraints => new Promise<MediaStream>((resolve, reject) => _callbacked_function(constraints, resolve, reject));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function query_devices() {
|
|
||||||
const general_supported = !!getUserMediaFunctionPromise();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const context = aplayer.context();
|
|
||||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
||||||
|
|
||||||
_queried_permissioned = false;
|
|
||||||
if(devices.filter(e => !!e.label).length > 0)
|
|
||||||
_queried_permissioned = true;
|
|
||||||
|
|
||||||
_queried_devices = devices.filter(e => e.kind === "audioinput").map((e: MediaDeviceInfo): JavascriptInputDevice => {
|
|
||||||
return {
|
|
||||||
channels: context ? context.destination.channelCount : 2,
|
|
||||||
sample_rate: context ? context.sampleRate : 44100,
|
|
||||||
|
|
||||||
default_input: e.deviceId == "default",
|
|
||||||
|
|
||||||
driver: "WebAudio",
|
|
||||||
name: e.label || "device-id{" + e.deviceId+ "}",
|
|
||||||
|
|
||||||
supported: general_supported,
|
|
||||||
|
|
||||||
device_id: e.deviceId,
|
|
||||||
group_id: e.groupId,
|
|
||||||
|
|
||||||
unique_id: e.deviceId
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if(_queried_devices.length > 0 && _queried_devices.filter(e => e.default_input).length == 0)
|
|
||||||
_queried_devices[0].default_input = true;
|
|
||||||
} catch(error) {
|
|
||||||
log.error(LogCategory.AUDIO, tr("Failed to query microphone devices (%o)"), error);
|
|
||||||
_queried_devices = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function devices() : InputDevice[] {
|
|
||||||
if(typeof(_queried_devices) === "undefined")
|
|
||||||
query_devices();
|
|
||||||
|
|
||||||
return _queried_devices || [];
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export function device_refresh_available() : boolean { return true; }
|
|
||||||
export function refresh_devices() : Promise<void> { return query_devices(); }
|
|
||||||
|
|
||||||
export function create_input() : AbstractInput { return new JavascriptInput(); }
|
|
||||||
|
|
||||||
export async function create_levelmeter(device: InputDevice) : Promise<LevelMeter> {
|
|
||||||
const meter = new JavascriptLevelmeter(device as any);
|
|
||||||
await meter.initialize();
|
|
||||||
return meter;
|
|
||||||
}
|
|
||||||
|
|
||||||
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
|
|
||||||
function: async () => { query_devices(); }, /* May wait for it? */
|
|
||||||
priority: 10,
|
|
||||||
name: "query media devices"
|
|
||||||
});
|
|
||||||
|
|
||||||
export namespace filter {
|
|
||||||
export abstract class JAbstractFilter<NodeType extends AudioNode> implements rbase.filter.Filter {
|
|
||||||
type;
|
|
||||||
|
|
||||||
source_node: AudioNode;
|
|
||||||
audio_node: NodeType;
|
|
||||||
|
|
||||||
context: AudioContext;
|
|
||||||
enabled: boolean = false;
|
|
||||||
|
|
||||||
active: boolean = false; /* if true the filter filters! */
|
|
||||||
callback_active_change: (new_state: boolean) => any;
|
|
||||||
|
|
||||||
paused: boolean = true;
|
|
||||||
|
|
||||||
abstract initialize(context: AudioContext, source_node: AudioNode);
|
|
||||||
abstract finalize();
|
|
||||||
|
|
||||||
/* whatever the input has been paused and we don't expect any input */
|
|
||||||
abstract set_pause(flag: boolean);
|
|
||||||
|
|
||||||
is_enabled(): boolean {
|
|
||||||
return this.enabled;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class JThresholdFilter extends JAbstractFilter<GainNode> implements rbase.filter.ThresholdFilter {
|
|
||||||
public static update_task_interval = 20; /* 20ms */
|
|
||||||
|
|
||||||
type = rbase.filter.Type.THRESHOLD;
|
|
||||||
callback_level?: (value: number) => any;
|
|
||||||
|
|
||||||
private _threshold = 50;
|
|
||||||
|
|
||||||
private _update_task: any;
|
|
||||||
private _analyser: AnalyserNode;
|
|
||||||
private _analyse_buffer: Uint8Array;
|
|
||||||
|
|
||||||
private _silence_count = 0;
|
|
||||||
private _margin_frames = 5;
|
|
||||||
|
|
||||||
private _current_level = 0;
|
|
||||||
private _smooth_release = 0;
|
|
||||||
private _smooth_attack = 0;
|
|
||||||
|
|
||||||
finalize() {
|
|
||||||
this.set_pause(true);
|
|
||||||
|
|
||||||
if(this.source_node) {
|
|
||||||
try { this.source_node.disconnect(this._analyser) } catch (error) {}
|
|
||||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
this._analyser = undefined;
|
|
||||||
this.source_node = undefined;
|
|
||||||
this.audio_node = undefined;
|
|
||||||
this.context = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
initialize(context: AudioContext, source_node: AudioNode) {
|
|
||||||
this.context = context;
|
|
||||||
this.source_node = source_node;
|
|
||||||
|
|
||||||
this.audio_node = context.createGain();
|
|
||||||
this._analyser = context.createAnalyser();
|
|
||||||
|
|
||||||
const optimal_ftt_size = Math.ceil((source_node.context || context).sampleRate * (JThresholdFilter.update_task_interval / 1000));
|
|
||||||
const base2_ftt = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
|
||||||
this._analyser.fftSize = base2_ftt;
|
|
||||||
|
|
||||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser.fftSize)
|
|
||||||
this._analyse_buffer = new Uint8Array(this._analyser.fftSize);
|
|
||||||
|
|
||||||
this.active = false;
|
|
||||||
this.audio_node.gain.value = 1;
|
|
||||||
|
|
||||||
this.source_node.connect(this.audio_node);
|
|
||||||
this.source_node.connect(this._analyser);
|
|
||||||
|
|
||||||
/* force update paused state */
|
|
||||||
this.set_pause(!(this.paused = !this.paused));
|
|
||||||
}
|
|
||||||
|
|
||||||
get_margin_frames(): number { return this._margin_frames; }
|
|
||||||
set_margin_frames(value: number) {
|
|
||||||
this._margin_frames = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
get_attack_smooth(): number {
|
|
||||||
return this._smooth_attack;
|
|
||||||
}
|
|
||||||
|
|
||||||
get_release_smooth(): number {
|
|
||||||
return this._smooth_release;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_attack_smooth(value: number) {
|
|
||||||
this._smooth_attack = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_release_smooth(value: number) {
|
|
||||||
this._smooth_release = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
get_threshold(): number {
|
|
||||||
return this._threshold;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_threshold(value: number): Promise<void> {
|
|
||||||
this._threshold = value;
|
|
||||||
return Promise.resolve();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static process(buffer: Uint8Array, ftt_size: number, previous: number, smooth: number) {
|
|
||||||
let level;
|
|
||||||
{
|
|
||||||
let total = 0, float, rms;
|
|
||||||
|
|
||||||
for(let index = 0; index < ftt_size; index++) {
|
|
||||||
float = ( buffer[index++] / 0x7f ) - 1;
|
|
||||||
total += (float * float);
|
|
||||||
}
|
|
||||||
rms = Math.sqrt(total / ftt_size);
|
|
||||||
let db = 20 * ( Math.log(rms) / Math.log(10) );
|
|
||||||
// sanity check
|
|
||||||
|
|
||||||
db = Math.max(-192, Math.min(db, 0));
|
|
||||||
level = 100 + ( db * 1.92 );
|
|
||||||
}
|
|
||||||
|
|
||||||
return previous * smooth + level * (1 - smooth);
|
|
||||||
}
|
|
||||||
|
|
||||||
private _analyse() {
|
|
||||||
this._analyser.getByteTimeDomainData(this._analyse_buffer);
|
|
||||||
|
|
||||||
let smooth;
|
|
||||||
if(this._silence_count == 0)
|
|
||||||
smooth = this._smooth_release;
|
|
||||||
else
|
|
||||||
smooth = this._smooth_attack;
|
|
||||||
|
|
||||||
this._current_level = JThresholdFilter.process(this._analyse_buffer, this._analyser.fftSize, this._current_level, smooth);
|
|
||||||
|
|
||||||
this._update_gain_node();
|
|
||||||
if(this.callback_level)
|
|
||||||
this.callback_level(this._current_level);
|
|
||||||
}
|
|
||||||
|
|
||||||
private _update_gain_node() {
|
|
||||||
let state;
|
|
||||||
if(this._current_level > this._threshold) {
|
|
||||||
this._silence_count = 0;
|
|
||||||
state = true;
|
|
||||||
} else {
|
|
||||||
state = this._silence_count++ < this._margin_frames;
|
|
||||||
}
|
|
||||||
if(state) {
|
|
||||||
this.audio_node.gain.value = 1;
|
|
||||||
if(this.active) {
|
|
||||||
this.active = false;
|
|
||||||
this.callback_active_change(false);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.audio_node.gain.value = 0;
|
|
||||||
if(!this.active) {
|
|
||||||
this.active = true;
|
|
||||||
this.callback_active_change(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
set_pause(flag: boolean) {
|
|
||||||
if(flag === this.paused) return;
|
|
||||||
this.paused = flag;
|
|
||||||
|
|
||||||
if(this.paused) {
|
|
||||||
clearInterval(this._update_task);
|
|
||||||
this._update_task = undefined;
|
|
||||||
|
|
||||||
if(this.active) {
|
|
||||||
this.active = false;
|
|
||||||
this.callback_active_change(false);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if(!this._update_task && this._analyser)
|
|
||||||
this._update_task = setInterval(() => this._analyse(), JThresholdFilter.update_task_interval);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class JStateFilter extends JAbstractFilter<GainNode> implements rbase.filter.StateFilter {
|
|
||||||
type = rbase.filter.Type.STATE;
|
|
||||||
|
|
||||||
finalize() {
|
|
||||||
if(this.source_node) {
|
|
||||||
try { this.source_node.disconnect(this.audio_node) } catch (error) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.source_node = undefined;
|
|
||||||
this.audio_node = undefined;
|
|
||||||
this.context = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
initialize(context: AudioContext, source_node: AudioNode) {
|
|
||||||
this.context = context;
|
|
||||||
this.source_node = source_node;
|
|
||||||
|
|
||||||
this.audio_node = context.createGain();
|
|
||||||
this.audio_node.gain.value = this.active ? 0 : 1;
|
|
||||||
|
|
||||||
this.source_node.connect(this.audio_node);
|
|
||||||
}
|
|
||||||
|
|
||||||
is_active(): boolean {
|
|
||||||
return this.active;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_state(state: boolean): Promise<void> {
|
|
||||||
if(this.active === state)
|
|
||||||
return Promise.resolve();
|
|
||||||
|
|
||||||
this.active = state;
|
|
||||||
if(this.audio_node)
|
|
||||||
this.audio_node.gain.value = state ? 0 : 1;
|
|
||||||
this.callback_active_change(state);
|
|
||||||
return Promise.resolve();
|
|
||||||
}
|
|
||||||
|
|
||||||
set_pause(flag: boolean) {
|
|
||||||
this.paused = flag;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class JavascriptInput implements AbstractInput {
|
|
||||||
private _state: InputState = InputState.PAUSED;
|
|
||||||
private _current_device: JavascriptInputDevice | undefined;
|
|
||||||
private _current_consumer: InputConsumer;
|
|
||||||
|
|
||||||
private _current_stream: MediaStream;
|
|
||||||
private _current_audio_stream: MediaStreamAudioSourceNode;
|
|
||||||
|
|
||||||
private _audio_context: AudioContext;
|
|
||||||
private _source_node: AudioNode; /* last node which could be connected to the target; target might be the _consumer_node */
|
|
||||||
private _consumer_callback_node: ScriptProcessorNode;
|
|
||||||
private readonly _consumer_audio_callback;
|
|
||||||
private _volume_node: GainNode;
|
|
||||||
private _mute_node: GainNode;
|
|
||||||
|
|
||||||
private _filters: rbase.filter.Filter[] = [];
|
|
||||||
private _filter_active: boolean = false;
|
|
||||||
|
|
||||||
private _volume: number = 1;
|
|
||||||
|
|
||||||
callback_begin: () => any = undefined;
|
|
||||||
callback_end: () => any = undefined;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
aplayer.on_ready(() => this._audio_initialized());
|
|
||||||
this._consumer_audio_callback = this._audio_callback.bind(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
private _audio_initialized() {
|
|
||||||
this._audio_context = aplayer.context();
|
|
||||||
if(!this._audio_context)
|
|
||||||
return;
|
|
||||||
|
|
||||||
this._mute_node = this._audio_context.createGain();
|
|
||||||
this._mute_node.gain.value = 0;
|
|
||||||
this._mute_node.connect(this._audio_context.destination);
|
|
||||||
|
|
||||||
this._consumer_callback_node = this._audio_context.createScriptProcessor(1024 * 4);
|
|
||||||
this._consumer_callback_node.connect(this._mute_node);
|
|
||||||
|
|
||||||
this._volume_node = this._audio_context.createGain();
|
|
||||||
this._volume_node.gain.value = this._volume;
|
|
||||||
|
|
||||||
this._initialize_filters();
|
|
||||||
if(this._state === InputState.INITIALIZING)
|
|
||||||
this.start();
|
|
||||||
}
|
|
||||||
|
|
||||||
private _initialize_filters() {
|
|
||||||
const filters = this._filters as any as filter.JAbstractFilter<AudioNode>[];
|
|
||||||
for(const filter of filters) {
|
|
||||||
if(filter.is_enabled())
|
|
||||||
filter.finalize();
|
|
||||||
}
|
|
||||||
|
|
||||||
if(this._audio_context && this._volume_node) {
|
|
||||||
const active_filter = filters.filter(e => e.is_enabled());
|
|
||||||
let stream: AudioNode = this._volume_node;
|
|
||||||
for(const f of active_filter) {
|
|
||||||
f.initialize(this._audio_context, stream);
|
|
||||||
stream = f.audio_node;
|
|
||||||
}
|
|
||||||
this._switch_source_node(stream);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private _audio_callback(event: AudioProcessingEvent) {
|
|
||||||
if(!this._current_consumer || this._current_consumer.type !== InputConsumerType.CALLBACK)
|
|
||||||
return;
|
|
||||||
|
|
||||||
const callback = this._current_consumer as CallbackInputConsumer;
|
|
||||||
if(callback.callback_audio)
|
|
||||||
callback.callback_audio(event.inputBuffer);
|
|
||||||
|
|
||||||
if(callback.callback_buffer) {
|
|
||||||
log.warn(LogCategory.AUDIO, tr("AudioInput has callback buffer, but this isn't supported yet!"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
current_state() : InputState { return this._state; };
|
|
||||||
|
|
||||||
private _start_promise: Promise<InputStartResult>;
|
|
||||||
async start() : Promise<InputStartResult> {
|
|
||||||
if(this._start_promise) {
|
|
||||||
try {
|
|
||||||
await this._start_promise;
|
|
||||||
if(this._state != InputState.PAUSED)
|
|
||||||
return;
|
|
||||||
} catch(error) {
|
|
||||||
log.debug(LogCategory.AUDIO, tr("JavascriptInput:start() Start promise await resulted in an error: %o"), error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return await (this._start_promise = this._start());
|
|
||||||
}
|
|
||||||
|
|
||||||
/* request permission for devices only one per time! */
|
|
||||||
private static _running_request: Promise<MediaStream | InputStartResult>;
|
|
||||||
static async request_media_stream(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
|
|
||||||
while(this._running_request) {
|
|
||||||
try {
|
|
||||||
await this._running_request;
|
|
||||||
} catch(error) { }
|
|
||||||
}
|
|
||||||
const promise = (this._running_request = this.request_media_stream0(device_id, group_id));
|
|
||||||
try {
|
|
||||||
return await this._running_request;
|
|
||||||
} finally {
|
|
||||||
if(this._running_request === promise)
|
|
||||||
this._running_request = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async request_media_stream0(device_id: string, group_id: string) : Promise<MediaStream | InputStartResult> {
|
|
||||||
const media_function = getUserMediaFunctionPromise();
|
|
||||||
if(!media_function) return InputStartResult.ENOTSUPPORTED;
|
|
||||||
|
|
||||||
try {
|
|
||||||
log.info(LogCategory.AUDIO, tr("Requesting a microphone stream for device %s in group %s"), device_id, group_id);
|
|
||||||
|
|
||||||
const audio_constrains: MediaTrackConstraints = {};
|
|
||||||
audio_constrains.deviceId = device_id;
|
|
||||||
audio_constrains.groupId = group_id;
|
|
||||||
|
|
||||||
audio_constrains.echoCancellation = true;
|
|
||||||
audio_constrains.autoGainControl = true;
|
|
||||||
audio_constrains.noiseSuppression = true;
|
|
||||||
/* disabled because most the time we get a OverconstrainedError */ //audio_constrains.sampleSize = {min: 420, max: 960 * 10, ideal: 960};
|
|
||||||
|
|
||||||
const stream = await media_function({
|
|
||||||
audio: audio_constrains,
|
|
||||||
video: undefined
|
|
||||||
});
|
|
||||||
if(!_queried_permissioned) query_devices(); /* we now got permissions, requery devices */
|
|
||||||
return stream;
|
|
||||||
} catch(error) {
|
|
||||||
if('name' in error) {
|
|
||||||
if(error.name === "NotAllowedError") {
|
|
||||||
//createErrorModal(tr("Failed to create microphone"), tr("Microphone recording failed. Please allow TeaWeb access to your microphone")).open();
|
|
||||||
//FIXME: Move this to somewhere else!
|
|
||||||
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed (No permissions). Browser message: %o"), error.message);
|
|
||||||
return InputStartResult.ENOTALLOWED;
|
|
||||||
} else {
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Microphone request failed. Request resulted in error: %o: %o"), error.name, error);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to initialize recording stream (%o)"), error);
|
|
||||||
}
|
|
||||||
return InputStartResult.EUNKNOWN;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async _start() : Promise<InputStartResult> {
|
|
||||||
try {
|
|
||||||
if(this._state != InputState.PAUSED)
|
|
||||||
throw tr("recorder already started");
|
|
||||||
|
|
||||||
this._state = InputState.INITIALIZING;
|
|
||||||
if(!this._current_device)
|
|
||||||
throw tr("invalid device");
|
|
||||||
|
|
||||||
if(!this._audio_context) {
|
|
||||||
debugger;
|
|
||||||
throw tr("missing audio context");
|
|
||||||
}
|
|
||||||
|
|
||||||
const _result = await JavascriptInput.request_media_stream(this._current_device.device_id, this._current_device.group_id);
|
|
||||||
if(!(_result instanceof MediaStream)) {
|
|
||||||
this._state = InputState.PAUSED;
|
|
||||||
return _result;
|
|
||||||
}
|
|
||||||
this._current_stream = _result;
|
|
||||||
|
|
||||||
for(const f of this._filters)
|
|
||||||
if(f.is_enabled() && f instanceof filter.JAbstractFilter)
|
|
||||||
f.set_pause(false);
|
|
||||||
this._consumer_callback_node.addEventListener('audioprocess', this._consumer_audio_callback);
|
|
||||||
|
|
||||||
this._current_audio_stream = this._audio_context.createMediaStreamSource(this._current_stream);
|
|
||||||
this._current_audio_stream.connect(this._volume_node);
|
|
||||||
this._state = InputState.RECORDING;
|
|
||||||
return InputStartResult.EOK;
|
|
||||||
} catch(error) {
|
|
||||||
if(this._state == InputState.INITIALIZING) {
|
|
||||||
this._state = InputState.PAUSED;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
this._start_promise = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async stop() {
|
|
||||||
/* await all starts */
|
|
||||||
try {
|
|
||||||
if(this._start_promise)
|
|
||||||
await this._start_promise;
|
|
||||||
} catch(error) {}
|
|
||||||
|
|
||||||
this._state = InputState.PAUSED;
|
|
||||||
if(this._current_audio_stream)
|
|
||||||
this._current_audio_stream.disconnect();
|
|
||||||
|
|
||||||
if(this._current_stream) {
|
|
||||||
if(this._current_stream.stop)
|
|
||||||
this._current_stream.stop();
|
|
||||||
else
|
|
||||||
this._current_stream.getTracks().forEach(value => {
|
|
||||||
value.stop();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
this._current_stream = undefined;
|
|
||||||
this._current_audio_stream = undefined;
|
|
||||||
for(const f of this._filters)
|
|
||||||
if(f.is_enabled() && f instanceof filter.JAbstractFilter)
|
|
||||||
f.set_pause(true);
|
|
||||||
if(this._consumer_callback_node)
|
|
||||||
this._consumer_callback_node.removeEventListener('audioprocess', this._consumer_audio_callback);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
current_device(): InputDevice | undefined {
|
|
||||||
return this._current_device;
|
|
||||||
}
|
|
||||||
|
|
||||||
async set_device(device: InputDevice | undefined) {
|
|
||||||
if(this._current_device === device)
|
|
||||||
return;
|
|
||||||
|
|
||||||
|
|
||||||
const saved_state = this._state;
|
|
||||||
try {
|
|
||||||
await this.stop();
|
|
||||||
} catch(error) {
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to stop previous record session (%o)"), error);
|
|
||||||
}
|
|
||||||
|
|
||||||
this._current_device = device as any; /* TODO: Test for device_id and device_group */
|
|
||||||
if(!device) {
|
|
||||||
this._state = saved_state === InputState.PAUSED ? InputState.PAUSED : InputState.DRY;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(saved_state !== InputState.PAUSED) {
|
|
||||||
try {
|
|
||||||
await this.start()
|
|
||||||
} catch(error) {
|
|
||||||
log.warn(LogCategory.AUDIO, tr("Failed to start new recording stream (%o)"), error);
|
|
||||||
throw "failed to start record";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
get_filter(type: rbase.filter.Type): rbase.filter.Filter | undefined {
|
|
||||||
for(const filter of this._filters)
|
|
||||||
if(filter.type == type)
|
|
||||||
return filter;
|
|
||||||
|
|
||||||
let new_filter: filter.JAbstractFilter<AudioNode>;
|
|
||||||
switch (type) {
|
|
||||||
case rbase.filter.Type.STATE:
|
|
||||||
new_filter = new filter.JStateFilter();
|
|
||||||
break;
|
|
||||||
case rbase.filter.Type.VOICE_LEVEL:
|
|
||||||
throw "voice filter isn't supported!";
|
|
||||||
case rbase.filter.Type.THRESHOLD:
|
|
||||||
new_filter = new filter.JThresholdFilter();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw "invalid filter type, or type isn't implemented! (" + type + ")";
|
|
||||||
}
|
|
||||||
|
|
||||||
new_filter.callback_active_change = () => this._recalculate_filter_status();
|
|
||||||
this._filters.push(new_filter as any);
|
|
||||||
this.enable_filter(type);
|
|
||||||
return new_filter as any;
|
|
||||||
}
|
|
||||||
|
|
||||||
supports_filter(type: rbase.filter.Type) : boolean {
|
|
||||||
switch (type) {
|
|
||||||
case rbase.filter.Type.THRESHOLD:
|
|
||||||
case rbase.filter.Type.STATE:
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private find_filter(type: rbase.filter.Type) : filter.JAbstractFilter<AudioNode> | undefined {
|
|
||||||
for(const filter of this._filters)
|
|
||||||
if(filter.type == type)
|
|
||||||
return filter as any;
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
clear_filter() {
|
|
||||||
for(const _filter of this._filters) {
|
|
||||||
if(!_filter.is_enabled())
|
|
||||||
continue;
|
|
||||||
|
|
||||||
const c_filter = _filter as any as filter.JAbstractFilter<AudioNode>;
|
|
||||||
c_filter.finalize();
|
|
||||||
c_filter.enabled = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
this._initialize_filters();
|
|
||||||
this._recalculate_filter_status();
|
|
||||||
}
|
|
||||||
|
|
||||||
disable_filter(type: rbase.filter.Type) {
|
|
||||||
const filter = this.find_filter(type);
|
|
||||||
if(!filter) return;
|
|
||||||
|
|
||||||
/* test if the filter is active */
|
|
||||||
if(!filter.is_enabled())
|
|
||||||
return;
|
|
||||||
|
|
||||||
filter.enabled = false;
|
|
||||||
filter.set_pause(true);
|
|
||||||
filter.finalize();
|
|
||||||
this._initialize_filters();
|
|
||||||
this._recalculate_filter_status();
|
|
||||||
}
|
|
||||||
|
|
||||||
enable_filter(type: rbase.filter.Type) {
|
|
||||||
const filter = this.get_filter(type) as any as filter.JAbstractFilter<AudioNode>;
|
|
||||||
if(filter.is_enabled())
|
|
||||||
return;
|
|
||||||
|
|
||||||
filter.enabled = true;
|
|
||||||
filter.set_pause(typeof this._current_audio_stream !== "object");
|
|
||||||
this._initialize_filters();
|
|
||||||
this._recalculate_filter_status();
|
|
||||||
}
|
|
||||||
|
|
||||||
private _recalculate_filter_status() {
|
|
||||||
let filtered = this._filters.filter(e => e.is_enabled()).filter(e => (e as any as filter.JAbstractFilter<AudioNode>).active).length > 0;
|
|
||||||
if(filtered === this._filter_active)
|
|
||||||
return;
|
|
||||||
|
|
||||||
this._filter_active = filtered;
|
|
||||||
if(filtered) {
|
|
||||||
if(this.callback_end)
|
|
||||||
this.callback_end();
|
|
||||||
} else {
|
|
||||||
if(this.callback_begin)
|
|
||||||
this.callback_begin();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
current_consumer(): InputConsumer | undefined {
|
|
||||||
return this._current_consumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
async set_consumer(consumer: InputConsumer) {
|
|
||||||
if(this._current_consumer) {
|
|
||||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
|
||||||
if(this._source_node)
|
|
||||||
(this._current_consumer as NodeInputConsumer).callback_disconnect(this._source_node)
|
|
||||||
} else if(this._current_consumer.type === InputConsumerType.CALLBACK) {
|
|
||||||
if(this._source_node)
|
|
||||||
this._source_node.disconnect(this._consumer_callback_node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(consumer) {
|
|
||||||
if(consumer.type == InputConsumerType.CALLBACK) {
|
|
||||||
if(this._source_node)
|
|
||||||
this._source_node.connect(this._consumer_callback_node);
|
|
||||||
} else if(consumer.type == InputConsumerType.NODE) {
|
|
||||||
if(this._source_node)
|
|
||||||
(consumer as NodeInputConsumer).callback_node(this._source_node);
|
|
||||||
} else {
|
|
||||||
throw "native callback consumers are not supported!";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._current_consumer = consumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
private _switch_source_node(new_node: AudioNode) {
|
|
||||||
if(this._current_consumer) {
|
|
||||||
if(this._current_consumer.type == InputConsumerType.NODE) {
|
|
||||||
const node_consumer = this._current_consumer as NodeInputConsumer;
|
|
||||||
if(this._source_node)
|
|
||||||
node_consumer.callback_disconnect(this._source_node);
|
|
||||||
if(new_node)
|
|
||||||
node_consumer.callback_node(new_node);
|
|
||||||
} else if(this._current_consumer.type == InputConsumerType.CALLBACK) {
|
|
||||||
this._source_node.disconnect(this._consumer_callback_node);
|
|
||||||
if(new_node)
|
|
||||||
new_node.connect(this._consumer_callback_node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._source_node = new_node;
|
|
||||||
}
|
|
||||||
|
|
||||||
get_volume(): number {
|
|
||||||
return this._volume;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_volume(volume: number) {
|
|
||||||
if(volume === this._volume)
|
|
||||||
return;
|
|
||||||
this._volume = volume;
|
|
||||||
this._volume_node.gain.value = volume;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class JavascriptLevelmeter implements LevelMeter {
|
|
||||||
private static _instances: JavascriptLevelmeter[] = [];
|
|
||||||
private static _update_task: number;
|
|
||||||
|
|
||||||
readonly _device: JavascriptInputDevice;
|
|
||||||
|
|
||||||
private _callback: (num: number) => any;
|
|
||||||
|
|
||||||
private _context: AudioContext;
|
|
||||||
private _gain_node: GainNode;
|
|
||||||
private _source_node: MediaStreamAudioSourceNode;
|
|
||||||
private _analyser_node: AnalyserNode;
|
|
||||||
|
|
||||||
private _media_stream: MediaStream;
|
|
||||||
|
|
||||||
private _analyse_buffer: Uint8Array;
|
|
||||||
|
|
||||||
private _current_level = 0;
|
|
||||||
|
|
||||||
constructor(device: JavascriptInputDevice) {
|
|
||||||
this._device = device;
|
|
||||||
}
|
|
||||||
|
|
||||||
async initialize() {
|
|
||||||
try {
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
const timeout = setTimeout(reject, 5000);
|
|
||||||
aplayer.on_ready(() => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} catch(error) {
|
|
||||||
throw tr("audio context timeout");
|
|
||||||
}
|
|
||||||
this._context = aplayer.context();
|
|
||||||
if(!this._context) throw tr("invalid context");
|
|
||||||
|
|
||||||
this._gain_node = this._context.createGain();
|
|
||||||
this._gain_node.gain.setValueAtTime(0, 0);
|
|
||||||
|
|
||||||
/* analyser node */
|
|
||||||
this._analyser_node = this._context.createAnalyser();
|
|
||||||
|
|
||||||
const optimal_ftt_size = Math.ceil(this._context.sampleRate * (filter.JThresholdFilter.update_task_interval / 1000));
|
|
||||||
this._analyser_node.fftSize = Math.pow(2, Math.ceil(Math.log2(optimal_ftt_size)));
|
|
||||||
|
|
||||||
if(!this._analyse_buffer || this._analyse_buffer.length < this._analyser_node.fftSize)
|
|
||||||
this._analyse_buffer = new Uint8Array(this._analyser_node.fftSize);
|
|
||||||
|
|
||||||
/* starting stream */
|
|
||||||
const _result = await JavascriptInput.request_media_stream(this._device.device_id, this._device.group_id);
|
|
||||||
if(!(_result instanceof MediaStream)){
|
|
||||||
if(_result === InputStartResult.ENOTALLOWED)
|
|
||||||
throw tr("No permissions");
|
|
||||||
if(_result === InputStartResult.ENOTSUPPORTED)
|
|
||||||
throw tr("Not supported");
|
|
||||||
if(_result === InputStartResult.EBUSY)
|
|
||||||
throw tr("Device busy");
|
|
||||||
if(_result === InputStartResult.EUNKNOWN)
|
|
||||||
throw tr("an error occurred");
|
|
||||||
throw _result;
|
|
||||||
}
|
|
||||||
this._media_stream = _result;
|
|
||||||
|
|
||||||
this._source_node = this._context.createMediaStreamSource(this._media_stream);
|
|
||||||
this._source_node.connect(this._analyser_node);
|
|
||||||
this._analyser_node.connect(this._gain_node);
|
|
||||||
this._gain_node.connect(this._context.destination);
|
|
||||||
|
|
||||||
JavascriptLevelmeter._instances.push(this);
|
|
||||||
if(JavascriptLevelmeter._instances.length == 1) {
|
|
||||||
clearInterval(JavascriptLevelmeter._update_task);
|
|
||||||
JavascriptLevelmeter._update_task = setInterval(() => JavascriptLevelmeter._analyse_all(), filter.JThresholdFilter.update_task_interval) as any;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
destory() {
|
|
||||||
JavascriptLevelmeter._instances.remove(this);
|
|
||||||
if(JavascriptLevelmeter._instances.length == 0) {
|
|
||||||
clearInterval(JavascriptLevelmeter._update_task);
|
|
||||||
JavascriptLevelmeter._update_task = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(this._source_node) {
|
|
||||||
this._source_node.disconnect();
|
|
||||||
this._source_node = undefined;
|
|
||||||
}
|
|
||||||
if(this._media_stream) {
|
|
||||||
if(this._media_stream.stop)
|
|
||||||
this._media_stream.stop();
|
|
||||||
else
|
|
||||||
this._media_stream.getTracks().forEach(value => {
|
|
||||||
value.stop();
|
|
||||||
});
|
|
||||||
this._media_stream = undefined;
|
|
||||||
}
|
|
||||||
if(this._gain_node) {
|
|
||||||
this._gain_node.disconnect();
|
|
||||||
this._gain_node = undefined;
|
|
||||||
}
|
|
||||||
if(this._analyser_node) {
|
|
||||||
this._analyser_node.disconnect();
|
|
||||||
this._analyser_node = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
device(): InputDevice {
|
|
||||||
return this._device;
|
|
||||||
}
|
|
||||||
|
|
||||||
set_observer(callback: (value: number) => any) {
|
|
||||||
this._callback = callback;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static _analyse_all() {
|
|
||||||
for(const instance of [...this._instances])
|
|
||||||
instance._analyse();
|
|
||||||
}
|
|
||||||
|
|
||||||
private _analyse() {
|
|
||||||
this._analyser_node.getByteTimeDomainData(this._analyse_buffer);
|
|
||||||
|
|
||||||
this._current_level = filter.JThresholdFilter.process(this._analyse_buffer, this._analyser_node.fftSize, this._current_level, .75);
|
|
||||||
if(this._callback)
|
|
||||||
this._callback(this._current_level);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -19,7 +19,6 @@ import {EventType} from "tc-shared/ui/frames/log/Definitions";
|
||||||
import {WrappedWebSocket} from "tc-backend/web/connection/WrappedWebSocket";
|
import {WrappedWebSocket} from "tc-backend/web/connection/WrappedWebSocket";
|
||||||
import {AbstractVoiceConnection} from "tc-shared/connection/VoiceConnection";
|
import {AbstractVoiceConnection} from "tc-shared/connection/VoiceConnection";
|
||||||
import {DummyVoiceConnection} from "tc-shared/connection/DummyVoiceConnection";
|
import {DummyVoiceConnection} from "tc-shared/connection/DummyVoiceConnection";
|
||||||
import {ServerConnectionFactory, setServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
|
|
||||||
|
|
||||||
class ReturnListener<T> {
|
class ReturnListener<T> {
|
||||||
resolve: (value?: T | PromiseLike<T>) => void;
|
resolve: (value?: T | PromiseLike<T>) => void;
|
||||||
|
|
|
@ -103,7 +103,7 @@ export class WrappedWebSocket {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if(this.socket.readyState === WebSocket.OPEN) {
|
if(this.socket.readyState === WebSocket.OPEN) {
|
||||||
this.socket.close();
|
this.socket.close(3000);
|
||||||
} else if(this.socket.readyState === WebSocket.CONNECTING) {
|
} else if(this.socket.readyState === WebSocket.CONNECTING) {
|
||||||
if(kPreventOpeningWebSocketClosing) {
|
if(kPreventOpeningWebSocketClosing) {
|
||||||
/* to prevent the "WebSocket is closed before the connection is established." warning in the console */
|
/* to prevent the "WebSocket is closed before the connection is established." warning in the console */
|
||||||
|
|
4
web/app/hooks/AudioRecorder.ts
Normal file
4
web/app/hooks/AudioRecorder.ts
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
import {setRecorderBackend} from "tc-shared/audio/recorder";
|
||||||
|
import {WebAudioRecorder} from "../audio/Recorder";
|
||||||
|
|
||||||
|
setRecorderBackend(new WebAudioRecorder());
|
|
@ -2,7 +2,8 @@ import "webrtc-adapter";
|
||||||
import "./index.scss";
|
import "./index.scss";
|
||||||
import "./FileTransfer";
|
import "./FileTransfer";
|
||||||
|
|
||||||
import "./factories/ServerConnection";
|
import "./hooks/ServerConnection";
|
||||||
import "./factories/ExternalModal";
|
import "./hooks/ExternalModal";
|
||||||
|
import "./hooks/AudioRecorder";
|
||||||
|
|
||||||
export = require("tc-shared/main");
|
export = require("tc-shared/main");
|
|
@ -88,16 +88,15 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
||||||
if(this.currentAudioSource === recorder && !enforce)
|
if(this.currentAudioSource === recorder && !enforce)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if(recorder) {
|
|
||||||
await recorder.unmount();
|
|
||||||
}
|
|
||||||
|
|
||||||
if(this.currentAudioSource) {
|
if(this.currentAudioSource) {
|
||||||
await this.voiceBridge?.setInput(undefined);
|
await this.voiceBridge?.setInput(undefined);
|
||||||
this.currentAudioSource.callback_unmount = undefined;
|
this.currentAudioSource.callback_unmount = undefined;
|
||||||
await this.currentAudioSource.unmount();
|
await this.currentAudioSource.unmount();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* unmount our target recorder */
|
||||||
|
await recorder?.unmount();
|
||||||
|
|
||||||
this.handleRecorderStop();
|
this.handleRecorderStop();
|
||||||
this.currentAudioSource = recorder;
|
this.currentAudioSource = recorder;
|
||||||
|
|
||||||
|
@ -108,18 +107,24 @@ export class VoiceConnection extends AbstractVoiceConnection {
|
||||||
recorder.callback_start = this.handleRecorderStart.bind(this);
|
recorder.callback_start = this.handleRecorderStart.bind(this);
|
||||||
recorder.callback_stop = this.handleRecorderStop.bind(this);
|
recorder.callback_stop = this.handleRecorderStop.bind(this);
|
||||||
|
|
||||||
recorder.callback_input_change = async (oldInput, newInput) => {
|
recorder.callback_input_initialized = async input => {
|
||||||
if(!this.voiceBridge)
|
if(!this.voiceBridge)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if(this.voiceBridge.getInput() && this.voiceBridge.getInput() !== oldInput) {
|
await this.voiceBridge.setInput(input);
|
||||||
logWarn(LogCategory.VOICE,
|
|
||||||
tr("Having a recorder input change, but our voice bridge still has another input (Having: %o, Expecting: %o)!"),
|
|
||||||
this.voiceBridge.getInput(), oldInput);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.voiceBridge.setInput(newInput);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if(recorder.input && this.voiceBridge) {
|
||||||
|
await this.voiceBridge.setInput(recorder.input);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!recorder.input || recorder.input.isFiltered()) {
|
||||||
|
this.handleRecorderStop();
|
||||||
|
} else {
|
||||||
|
this.handleRecorderStart();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await this.voiceBridge.setInput(undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.events.fire("notify_recorder_changed");
|
this.events.fire("notify_recorder_changed");
|
||||||
|
|
|
@ -61,11 +61,12 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
||||||
}
|
}
|
||||||
|
|
||||||
async setInput(input: AbstractInput | undefined) {
|
async setInput(input: AbstractInput | undefined) {
|
||||||
|
console.error("SET INPUT: %o", input);
|
||||||
if (this.currentInput === input)
|
if (this.currentInput === input)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (this.currentInput) {
|
if (this.currentInput) {
|
||||||
await this.currentInput.set_consumer(undefined);
|
await this.currentInput.setConsumer(undefined);
|
||||||
this.currentInput = undefined;
|
this.currentInput = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +74,7 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
||||||
|
|
||||||
if (this.currentInput) {
|
if (this.currentInput) {
|
||||||
try {
|
try {
|
||||||
await this.currentInput.set_consumer({
|
await this.currentInput.setConsumer({
|
||||||
type: InputConsumerType.NODE,
|
type: InputConsumerType.NODE,
|
||||||
callback_node: node => node.connect(this.localAudioDestinationNode),
|
callback_node: node => node.connect(this.localAudioDestinationNode),
|
||||||
callback_disconnect: node => node.disconnect(this.localAudioDestinationNode)
|
callback_disconnect: node => node.disconnect(this.localAudioDestinationNode)
|
||||||
|
@ -91,6 +92,7 @@ export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
|
||||||
packet[2] = (this.voicePacketId >> 8) & 0xFF; //HIGHT (voiceID)
|
packet[2] = (this.voicePacketId >> 8) & 0xFF; //HIGHT (voiceID)
|
||||||
packet[3] = (this.voicePacketId >> 0) & 0xFF; //LOW (voiceID)
|
packet[3] = (this.voicePacketId >> 0) & 0xFF; //LOW (voiceID)
|
||||||
packet[4] = codec; //Codec
|
packet[4] = codec; //Codec
|
||||||
|
this.voicePacketId++;
|
||||||
}
|
}
|
||||||
|
|
||||||
sendStopSignal(codec: number) {
|
sendStopSignal(codec: number) {
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
html, body {
|
|
||||||
overflow-y: hidden;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
position: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.app-container {
|
|
||||||
display: flex;
|
|
||||||
justify-content: stretch;
|
|
||||||
position: absolute;
|
|
||||||
top: 1.5em !important;
|
|
||||||
bottom: 0;
|
|
||||||
transition: all 0.5s linear;
|
|
||||||
}
|
|
||||||
.app-container .app {
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
margin: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
resize: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media only screen and (max-width: 650px) {
|
|
||||||
html, body {
|
|
||||||
padding: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.app-container {
|
|
||||||
bottom: 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*# sourceMappingURL=main.css.map */
|
|
|
@ -1 +0,0 @@
|
||||||
{"version":3,"sourceRoot":"","sources":["main.scss"],"names":[],"mappings":"AAAA;EACC;EAEG;EACA;EACA;;;AAGJ;EACC;EACA;EACA;EAEA;EACG;EAEA;;AAEH;EACC;EACA;EACA;EAEA;EAAe;EAAwB;;;AAKzC;EACC;IACC;;;EAGD;IACC","file":"main.css"}
|
|
Loading…
Add table
Reference in a new issue