Removed the audio lib

master
WolverinDEV 2021-03-16 15:55:27 +01:00
parent 1b2de3ed63
commit a82d474b32
39 changed files with 82 additions and 5676 deletions

31
package-lock.json generated
View File

@ -13001,14 +13001,12 @@
"source-list-map": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz",
"integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==",
"dev": true
"integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw=="
},
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
},
"source-map-resolve": {
"version": "0.5.3",
@ -16092,6 +16090,31 @@
}
}
},
"webpack-manifest-plugin": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-3.1.0.tgz",
"integrity": "sha512-7jgB8Kb0MRWXq3YaDfe+0smv5c7MLMfze8YvG6eBEXZmy6fhwMe/eT47A0KEIF30c0DDEYKbbYTXzaMQETaZ0Q==",
"requires": {
"tapable": "^2.0.0",
"webpack-sources": "^2.2.0"
},
"dependencies": {
"tapable": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.0.tgz",
"integrity": "sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw=="
},
"webpack-sources": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-2.2.0.tgz",
"integrity": "sha512-bQsA24JLwcnWGArOKUxYKhX3Mz/nK1Xf6hxullKERyktjNMC4x8koOeaDNTA2fEJ09BdWLbM/iTW0ithREUP0w==",
"requires": {
"source-list-map": "^2.0.1",
"source-map": "^0.6.1"
}
}
}
},
"webpack-sources": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz",

View File

@ -118,6 +118,7 @@
"twemoji": "^13.0.0",
"url-knife": "^3.1.3",
"webcrypto-liner": "^1.2.3",
"webpack-manifest-plugin": "^3.1.0",
"webrtc-adapter": "^7.5.1"
}
}

View File

@ -22,8 +22,10 @@ mdRenderer.renderer.rules.link_open = function (tokens, idx, options /*, env */)
let href = `<a href="${tokens[idx].href}" ${title} ${target} ></a>`;
href = dompurify.sanitize(href);
if (href.substr(-4) !== "</a>")
if (href.substr(-4) !== "</a>") {
return "<-- invalid link open... -->";
}
return href.substr(0, href.length - 4);
};
@ -98,13 +100,25 @@ export const WhatsNew = (props: { changesUI?: ChangeLog, changesClient?: ChangeL
let versionUIDate = props.changesUI?.currentVersion, versionNativeDate = props.changesClient?.currentVersion;
if (__build.target === "web") {
subtitleLong = <Translatable key={"sub-web"}>We've successfully updated the web client for you.</Translatable>;
infoText = <VariadicTranslatable key={"info-web"}
text={"The web client has been updated to the version from {}."}>{versionUIDate}</VariadicTranslatable>;
subtitleLong = (
<Translatable key={"sub-web"}>We've successfully updated the web client for you.</Translatable>
);
infoText = (
<VariadicTranslatable key={"info-web"}
text={"The web client has been updated to the version from {}."}
>
{versionUIDate}
</VariadicTranslatable>
);
} else if (props.changesUI && props.changesClient) {
subtitleLong =
<Translatable key={"sub-native-client-ui"}>We've successfully updated the native client and its UI for
you.</Translatable>;
subtitleLong = (
<Translatable
key={"sub-native-client-ui"}
>
We've successfully updated the native client and its UI for you.
</Translatable>
);
infoText = (
<React.Fragment key={"info-native-client-ui"}>
<VariadicTranslatable
@ -114,15 +128,33 @@ export const WhatsNew = (props: { changesUI?: ChangeLog, changesClient?: ChangeL
</React.Fragment>
);
} else if (props.changesClient) {
subtitleLong = <Translatable key={"sub-native-client"}>We've successfully updated the native client for
you.</Translatable>;
infoText = <VariadicTranslatable key={"info-native-client"}
text={"The native client has been updated to the version {}."}>{versionNativeDate}</VariadicTranslatable>;
subtitleLong = (
<Translatable key={"sub-native-client"}>
We've successfully updated the native client for you.
</Translatable>
);
infoText = (
<VariadicTranslatable
key={"info-native-client"}
text={"The native client has been updated to the version {}."}>
{versionNativeDate}
</VariadicTranslatable>
);
} else if (props.changesUI) {
subtitleLong = <Translatable key={"sub-native-ui"}>We've successfully updated the native clients UI for
you.</Translatable>;
infoText = <VariadicTranslatable key={"info-native-ui"}
text={"The native clients UI has been updated to the version from 18.08.2020."}>{versionUIDate}</VariadicTranslatable>;
subtitleLong = (
<Translatable
key={"sub-native-ui"}
>
We've successfully updated the native clients UI for you.
</Translatable>
);
infoText = (
<VariadicTranslatable key={"info-native-ui"}
text={"The native clients UI has been updated to the version from 18.08.2020."}
>
{versionUIDate}
</VariadicTranslatable>
);
}
const changes = [changesClient, changesUI].filter(e => !!e);

View File

@ -1,291 +0,0 @@
import {
AbstractVoiceConnection,
VoiceConnectionStatus,
WhisperSessionInitializer
} from "tc-shared/connection/VoiceConnection";
import {VoiceConnection} from "tc-backend/web/legacy/voice/VoiceHandler";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {VoiceClient} from "tc-shared/voice/VoiceClient";
import {WhisperSession, WhisperTarget} from "tc-shared/voice/VoiceWhisper";
import {AbstractServerConnection, ConnectionStatistics} from "tc-shared/connection/ConnectionBase";
import {EventDispatchType, Registry} from "tc-shared/events";
import {VoicePlayerEvents, VoicePlayerLatencySettings, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
import { tr } from "tc-shared/i18n/localize";
import {RtpVoiceConnection} from "tc-backend/web/voice/Connection";
class ProxiedVoiceClient implements VoiceClient {
readonly clientId: number;
readonly events: Registry<VoicePlayerEvents>;
handle: VoiceClient;
private volume: number;
private latencySettings: VoicePlayerLatencySettings | undefined;
private eventDisconnect: () => void;
constructor(clientId: number) {
this.clientId = clientId;
this.events = new Registry<VoicePlayerEvents>();
this.volume = 1;
}
setHandle(handle: VoiceClient | undefined) {
if(this.eventDisconnect) {
this.eventDisconnect();
this.eventDisconnect = undefined;
}
this.handle = handle;
if(this.latencySettings) {
this.handle?.setLatencySettings(this.latencySettings);
}
this.handle?.setVolume(this.volume);
if(this.handle) {
const targetEvents = this.events;
this.eventDisconnect = this.handle.events.registerConsumer({
handleEvent(mode: EventDispatchType, type: string, data: any) {
switch (mode) {
case "later":
targetEvents.fire_later(type as any, data);
break;
case "react":
targetEvents.fire_react(type as any, data);
break;
case "sync":
targetEvents.fire(type as any, data);
break;
}
}
});
}
}
abortReplay() {
this.handle?.abortReplay();
}
flushBuffer() {
this.handle?.flushBuffer();
}
getClientId(): number {
return this.clientId;
}
getLatencySettings(): Readonly<VoicePlayerLatencySettings> {
return this.handle?.getLatencySettings() || this.latencySettings || { maxBufferTime: 200, minBufferTime: 10 };
}
getState(): VoicePlayerState {
return this.handle ? this.handle.getState() : VoicePlayerState.STOPPED;
}
getVolume(): number {
return this.handle?.getVolume() || this.volume;
}
resetLatencySettings() {
this.handle.resetLatencySettings();
this.latencySettings = undefined;
}
setLatencySettings(settings: VoicePlayerLatencySettings) {
this.latencySettings = settings;
this.handle?.setLatencySettings(this.latencySettings);
}
setVolume(volume: number) {
this.volume = volume;
this.handle?.setVolume(volume);
}
}
export class LegacySupportVoiceBridge extends AbstractVoiceConnection {
private readonly newVoiceBride: RtpVoiceConnection;
private readonly oldVoiceBridge: VoiceConnection;
private activeBridge: AbstractVoiceConnection;
private disconnectEvents: () => void;
private encoderCodec: number;
private currentRecorder: RecorderProfile;
private registeredClients: ProxiedVoiceClient[] = [];
constructor(connection: AbstractServerConnection, oldVoiceBridge: VoiceConnection, newVoiceBride: RtpVoiceConnection) {
super(connection);
this.oldVoiceBridge = oldVoiceBridge;
this.newVoiceBride = newVoiceBride;
}
async setVoiceBridge(type: "old" | "new" | "unset") {
const oldState = this.getConnectionState();
this.registeredClients.forEach(e => {
if(e.handle) {
this.activeBridge.unregisterVoiceClient(e.handle);
e.setHandle(undefined);
}
});
if(this.disconnectEvents) {
this.disconnectEvents();
this.disconnectEvents = undefined;
}
this.activeBridge = type === "old" ? this.oldVoiceBridge : type === "new" ? this.newVoiceBride : undefined;
if(this.activeBridge) {
const targetEvents = this.events;
this.disconnectEvents = this.activeBridge.events.registerConsumer({
handleEvent(mode: EventDispatchType, type: string, data: any) {
switch (mode) {
case "later":
targetEvents.fire_later(type as any, data);
break;
case "react":
targetEvents.fire_react(type as any, data);
break;
case "sync":
targetEvents.fire(type as any, data);
break;
}
}
});
this.registeredClients.forEach(e => {
if(!e.handle) {
e.setHandle(this.activeBridge.registerVoiceClient(e.clientId));
}
});
await this.activeBridge.acquireVoiceRecorder(this.currentRecorder);
/* FIXME: Fire only if the state changed */
this.events.fire("notify_connection_status_changed", { oldStatus: oldState, newStatus: this.activeBridge.getConnectionState() });
this.events.fire("notify_voice_replay_state_change", { replaying: this.activeBridge.isReplayingVoice() });
} else {
/* FIXME: Fire only if the state changed */
this.events.fire("notify_connection_status_changed", { oldStatus: oldState, newStatus: VoiceConnectionStatus.Disconnected });
this.events.fire("notify_voice_replay_state_change", { replaying: false });
}
}
acquireVoiceRecorder(recorder: RecorderProfile | undefined): Promise<void> {
this.currentRecorder = recorder;
return this.activeBridge?.acquireVoiceRecorder(recorder);
}
decodingSupported(codec: number): boolean {
return !!this.activeBridge?.decodingSupported(codec);
}
encodingSupported(codec: number): boolean {
return !!this.activeBridge?.encodingSupported(codec);
}
dropWhisperSession(session: WhisperSession) {
this.activeBridge?.dropWhisperSession(session);
}
getConnectionState(): VoiceConnectionStatus {
return this.activeBridge ? this.activeBridge.getConnectionState() : VoiceConnectionStatus.Disconnected;
}
getConnectionStats(): Promise<ConnectionStatistics> {
return this.activeBridge ? this.activeBridge.getConnectionStats() : Promise.resolve({
bytesSend: 0,
bytesReceived: 0
});
}
getEncoderCodec(): number {
return this.activeBridge ? this.activeBridge.getEncoderCodec() : this.encoderCodec;
}
getFailedMessage(): string {
return this.activeBridge?.getFailedMessage();
}
getRetryTimestamp(): number | 0 {
return this.activeBridge ? this.activeBridge.getRetryTimestamp() : 0;
}
getWhisperSessionInitializer(): WhisperSessionInitializer | undefined {
return this.activeBridge?.getWhisperSessionInitializer();
}
getWhisperSessions(): WhisperSession[] {
return this.activeBridge?.getWhisperSessions() || [];
}
getWhisperTarget(): WhisperTarget | undefined {
return this.activeBridge?.getWhisperTarget();
}
isReplayingVoice(): boolean {
return !!this.activeBridge?.isReplayingVoice();
}
availableVoiceClients(): VoiceClient[] {
return this.registeredClients;
}
registerVoiceClient(clientId: number) {
if(this.registeredClients.findIndex(e => e.clientId === clientId) !== -1) {
throw tr("voice client already exists");
}
const client = new ProxiedVoiceClient(clientId);
client.setHandle(this.activeBridge?.registerVoiceClient(clientId));
this.registeredClients.push(client);
return client;
}
setEncoderCodec(codec: number) {
this.encoderCodec = codec;
this.newVoiceBride.setEncoderCodec(codec);
this.oldVoiceBridge.setEncoderCodec(codec);
}
setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined) {
this.newVoiceBride.setWhisperSessionInitializer(initializer);
this.oldVoiceBridge.setWhisperSessionInitializer(initializer);
}
startWhisper(target: WhisperTarget): Promise<void> {
return this.activeBridge ? this.activeBridge.startWhisper(target) : Promise.reject(tr("voice bridge not connected"));
}
stopAllVoiceReplays() {
this.activeBridge?.stopAllVoiceReplays();
}
stopWhisper() {
this.oldVoiceBridge?.stopWhisper();
this.newVoiceBride?.stopWhisper();
}
unregisterVoiceClient(client: VoiceClient) {
if(!(client instanceof ProxiedVoiceClient)) {
throw tr("invalid voice client");
}
const index = this.registeredClients.indexOf(client);
if(index === -1) { return; }
this.registeredClients.splice(index, 1);
if(client.handle) {
this.activeBridge?.unregisterVoiceClient(client.handle);
}
}
voiceRecorder(): RecorderProfile {
return this.currentRecorder;
}
}

View File

@ -19,12 +19,11 @@ import {parseCommand} from "tc-backend/web/connection/CommandParser";
import {ServerAddress} from "tc-shared/tree/Server";
import {RtpVoiceConnection} from "tc-backend/web/voice/Connection";
import {VideoConnection} from "tc-shared/connection/VideoConnection";
import {VoiceConnection} from "tc-backend/web/legacy/voice/VoiceHandler";
import {LegacySupportVoiceBridge} from "tc-backend/web/connection/LegacySupportVoiceBridge";
import {ServerFeature} from "tc-shared/connection/ServerFeatures";
import {RTCConnection} from "tc-shared/connection/rtc/Connection";
import {RtpVideoConnection} from "tc-shared/connection/rtc/video/Connection";
import { tr } from "tc-shared/i18n/localize";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
class ReturnListener<T> {
resolve: (value?: T | PromiseLike<T>) => void;
@ -51,10 +50,6 @@ export class ServerConnection extends AbstractServerConnection {
private voiceConnection: RtpVoiceConnection;
private videoConnection: RtpVideoConnection;
/* legacy */
private oldVoiceConnection: VoiceConnection;
private legacyVoiceConnection: LegacySupportVoiceBridge;
private pingStatistics = {
thread_id: 0,
@ -82,9 +77,6 @@ export class ServerConnection extends AbstractServerConnection {
this.rtcConnection = new RTCConnection(this, true);
this.voiceConnection = new RtpVoiceConnection(this, this.rtcConnection);
this.videoConnection = new RtpVideoConnection(this.rtcConnection);
this.oldVoiceConnection = new VoiceConnection(this);
this.legacyVoiceConnection = new LegacySupportVoiceBridge(this, this.oldVoiceConnection, this.voiceConnection);
}
destroy() {
@ -115,11 +107,6 @@ export class ServerConnection extends AbstractServerConnection {
this.voiceConnection && this.voiceConnection.destroy();
this.voiceConnection = undefined;
this.oldVoiceConnection?.destroy();
this.oldVoiceConnection = undefined;
this.legacyVoiceConnection = undefined;
this.commandHandlerBoss && this.commandHandlerBoss.destroy();
this.commandHandlerBoss = undefined;
@ -379,8 +366,6 @@ export class ServerConnection extends AbstractServerConnection {
this.events.fire("notify_ping_updated", { newPing: this.ping() });
//logDebug(LogCategory.NETWORKING, tr("Received new pong. Updating ping to: JS: %o Native: %o"), this._ping.value.toFixed(3), this._ping.value_native.toFixed(3));
}
} else if(json["type"] === "WebRTC") {
this.oldVoiceConnection?.handleControlPacket(json);
} else {
logWarn(LogCategory.NETWORKING, tr("Unknown command type %o"), json["type"]);
}
@ -409,22 +394,12 @@ export class ServerConnection extends AbstractServerConnection {
}
if(this.client.serverFeatures.supportsFeature(ServerFeature.VIDEO, 1)) {
this.legacyVoiceConnection.setVoiceBridge("new").then(() => {
this.rtcConnection.doInitialSetup();
}).catch(error => {
logError(LogCategory.VOICE, tr("Failed to setup the voice bridge: %o"), error);
/* FIXME: Some kind of error modal? */
});
} else{
/* old voice connection */
logDebug(LogCategory.NETWORKING, tr("Using legacy voice connection for TeaSpeak server bellow 1.4.5"));
this.legacyVoiceConnection.setVoiceBridge("old").then(() => {
this.oldVoiceConnection.startVoiceBridge();
logDebug(LogCategory.NETWORKING, tr("Using legacy voice connection for TeaSpeak server bellow 1.5"));
createErrorModal(tr("Server outdated"), tr("Please update your server in order to use the WebClient")).open();
this.rtcConnection.setNotSupported();
}).catch(error => {
logError(LogCategory.VOICE, tr("Failed to setup the old voice bridge: %o"), error);
/* FIXME: Some kind of error modal? */
});
}
});
}
@ -487,7 +462,7 @@ export class ServerConnection extends AbstractServerConnection {
}
getVoiceConnection(): AbstractVoiceConnection {
return this.legacyVoiceConnection;
return this.voiceConnection;
}
getVideoConnection(): VideoConnection {

View File

@ -1,39 +0,0 @@
import {AudioLibrary} from "./index";
import {LogCategory, logWarn} from "tc-shared/log";
import { tr } from "tc-shared/i18n/localize";
export class AudioClient {
private readonly handle: AudioLibrary;
private readonly clientId: number;
public callback_decoded: (buffer: AudioBuffer) => void;
public callback_ended: () => void;
constructor(handle: AudioLibrary, clientId: number) {
this.handle = handle;
this.clientId = clientId;
}
async initialize() { }
destroy() {
this.callback_ended = undefined;
this.callback_decoded = undefined;
this.handle.destroyClient(this.clientId);
}
enqueueBuffer(buffer: Uint8Array, packetId: number, codec: number, head: boolean) {
this.handle.getWorker().executeThrow("enqueue-audio-packet", {
clientId: this.clientId,
codec: codec,
packetId: packetId,
head: head,
buffer: buffer.buffer,
byteLength: buffer.byteLength,
byteOffset: buffer.byteOffset,
}, 5000, [buffer.buffer]).catch(error => {
logWarn(LogCategory.AUDIO, tr("Failed to enqueue audio buffer for audio client %d: %o"), this.clientId, error);
});
}
}

View File

@ -1,51 +0,0 @@
/* from handle to worker */
export interface AWCommand {
"initialize": {},
"create-client": {},
"enqueue-audio-packet": {
clientId: number,
packetId: number,
codec: number,
head: boolean,
buffer: ArrayBuffer,
byteLength: number,
byteOffset: number,
},
"destroy-client": {
clientId: number
}
}
/* from worker to handle */
export interface AWCommandResponse {
"create-client-result": { clientId: number }
}
export interface AWMessageRelations {
"initialize": void,
"create-client": "create-client-result",
"create-client-result": never,
"enqueue-audio-packet": void,
"destroy-client": void
}
/* host to worker notifies */
export interface AWNotifies {}
/* worker to host notifies */
export interface AWNotifiesWorker {
"notify-decoded-audio": {
clientId: number,
buffer: ArrayBuffer,
byteLength: number,
byteOffset: number,
channelCount: number,
sampleRate: number
}
}

View File

@ -1,99 +0,0 @@
import {WorkerOwner} from "tc-shared/workers/WorkerOwner";
import {
AWCommand,
AWCommandResponse,
AWMessageRelations,
AWNotifies,
AWNotifiesWorker
} from "./WorkerMessages";
import {AudioClient} from "./AudioClient";
import {LogCategory, logWarn} from "tc-shared/log";
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
import { tr } from "tc-shared/i18n/localize";
export type AudioLibraryWorker = WorkerOwner<AWCommand, AWCommandResponse, AWMessageRelations, AWNotifies, AWNotifiesWorker>;
export class AudioLibrary {
private readonly worker: AudioLibraryWorker;
private registeredClients: {[key: number]: AudioClient} = {};
constructor() {
this.worker = new WorkerOwner(AudioLibrary.spawnNewWorker);
}
private static spawnNewWorker() : Worker {
/*
* Attention don't use () => new Worker(...).
* This confuses the worker plugin and will not emit any modules
*/
return new Worker("./worker/index.ts", { type: "module" });
}
async initialize() {
await this.worker.spawnWorker();
await this.worker.executeThrow("initialize", {}, 10000);
this.worker.registerNotifyHandler("notify-decoded-audio", payload => {
if(payload.channelCount === 0 || payload.byteLength === 0) {
this.registeredClients[payload.clientId]?.callback_ended();
return;
}
let buffer = new Float32Array(payload.buffer, payload.byteOffset, payload.byteLength / 4);
let audioBuffer = new AudioBuffer({ length: buffer.length / payload.channelCount, numberOfChannels: payload.channelCount, sampleRate: payload.sampleRate });
for(let channel = 0; channel < payload.channelCount; channel++) {
audioBuffer.copyToChannel(buffer.subarray(channel * audioBuffer.length), channel);
}
this.registeredClients[payload.clientId]?.callback_decoded(audioBuffer);
});
}
async createClient() {
const { clientId } = await this.worker.executeThrow("create-client", {}, 5000);
const wrapper = new AudioClient(this, clientId);
try {
await wrapper.initialize();
} catch (error) {
this.worker.executeThrow("destroy-client", { clientId: clientId }).catch(error => {
logWarn(LogCategory.AUDIO, tr("Failed to destroy client after a failed initialialization: %o"), error);
});
throw error;
}
this.registeredClients[clientId] = wrapper;
return wrapper;
}
destroyClient(clientId: number) {
delete this.registeredClients[clientId];
this.worker.execute("destroy-client", { clientId: clientId }).then(result => {
if(result.success === false) {
logWarn(LogCategory.AUDIO, tr("Failed to destroy audio client %d: %s"), clientId, result.error);
}
});
}
getWorker() : AudioLibraryWorker {
return this.worker;
}
}
let audioLibrary: AudioLibrary;
export function getAudioLibrary() {
return audioLibrary;
}
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
name: "audio lib init",
priority: 10,
function: async () => {
audioLibrary = new AudioLibrary();
try {
await audioLibrary.initialize();
} catch (error) {
loader.critical_error("Audio library initialisation failed", "Lookup the console for more details");
throw error;
}
}
});

View File

@ -1,7 +0,0 @@
import * as lib from "tc-backend/audio-lib/index";
export type AudioLibrary = (typeof lib) & {
memory: WebAssembly.Memory
}
export function getAudioLibraryInstance() : Promise<AudioLibrary>;

View File

@ -1,13 +0,0 @@
/*
* This is more a hack than any thing else.
* TypeScript can't keep the async import statements, but we require it to initialize the wasm object...
*/
module.exports = {
getAudioLibraryInstance() {
return import("tc-backend/audio-lib/index_bg.wasm").then(wasm => {
return import("tc-backend/audio-lib/index").then(pkg => {
return Object.assign(pkg, { memory: wasm.memory });
});
});
}
};

View File

@ -1,69 +0,0 @@
// @ts-ignore
__webpack_public_path__ = './';
import {WorkerHandler} from "tc-shared/workers/WorkerHandler";
import {
AWCommand,
AWCommandResponse,
AWMessageRelations,
AWNotifies,
AWNotifiesWorker
} from "../WorkerMessages";
import {AudioLibrary, getAudioLibraryInstance} from "./async_require";
/*
* Fix since rust wasm is used to run in normal space, not as worker.
*/
(self as any).Window = (self as any).DedicatedWorkerGlobalScope;
let audioLibrary: AudioLibrary;
export async function initializeAudioLib() {
audioLibrary = await getAudioLibraryInstance();
const error = audioLibrary.initialize();
if(typeof error === "string") {
console.error("Failed to initialize the audio lib: %s", error);
}
}
const workerHandler = new WorkerHandler<AWCommand, AWCommandResponse, AWMessageRelations, AWNotifies, AWNotifiesWorker>();
workerHandler.initialize();
workerHandler.registerMessageHandler("create-client", () => {
const client = audioLibrary.audio_client_create();
audioLibrary.audio_client_buffer_callback(client, (ptr, samples, channels) => {
try {
const sendBuffer = new Uint8Array(samples * channels * 4);
sendBuffer.set(new Uint8Array(audioLibrary.memory.buffer, ptr, samples * channels * 4));
workerHandler.notify("notify-decoded-audio", {
buffer: sendBuffer.buffer,
byteLength: sendBuffer.byteLength,
byteOffset: sendBuffer.byteOffset,
clientId: client,
sampleRate: 48000,
channelCount: channels
});
} catch (error) {
console.error(error);
}
});
return {
clientId: client
}
});
workerHandler.registerMessageHandler("destroy-client", payload => {
audioLibrary.audio_client_destroy(payload.clientId);
})
workerHandler.registerMessageHandler("initialize", async () => {
await initializeAudioLib();
})
workerHandler.registerMessageHandler("enqueue-audio-packet", payload => {
audioLibrary.audio_client_enqueue_buffer(payload.clientId, new Uint8Array(payload.buffer, payload.byteOffset, payload.byteLength), payload.packetId, payload.codec, payload.head);
});

View File

@ -1,56 +0,0 @@
import {LogCategory, logWarn} from "tc-shared/log";
import { tr } from "tc-shared/i18n/localize";
const OfflineAudioContext = window.webkitOfflineAudioContext || window.OfflineAudioContext;
export class AudioResampler {
private readonly targetSampleRate: number;
private readonly isPromiseResponse: boolean;
constructor(targetSampleRate: number){
this.targetSampleRate = targetSampleRate;
this.isPromiseResponse = navigator.browserSpecs.name != 'Safari';
if(this.targetSampleRate < 3000 || this.targetSampleRate > 384000) {
throw tr("The target sample rate is outside the range [3000, 384000].");
}
}
getTargetSampleRate() : number {
return this.targetSampleRate;
}
async resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
if(!buffer) {
logWarn(LogCategory.AUDIO, tr("Received empty buffer as input! Returning empty output!"));
return buffer;
}
if(buffer.sampleRate == this.targetSampleRate) {
return buffer;
}
const context = new OfflineAudioContext(
buffer.numberOfChannels,
Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate),
this.targetSampleRate
);
let source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(0);
if(this.isPromiseResponse) {
return await context.startRendering();
} else {
return await new Promise<AudioBuffer>((resolve, reject) => {
context.oncomplete = event => resolve(event.renderedBuffer);
try {
context.startRendering();
} catch (ex) {
reject(ex);
}
});
}
}
}

View File

@ -1,15 +0,0 @@
import {VoiceClient} from "tc-shared/voice/VoiceClient";
import {WebVoicePlayer} from "./VoicePlayer";
export class VoiceClientController extends WebVoicePlayer implements VoiceClient {
private readonly clientId: number;
constructor(clientId) {
super();
this.clientId = clientId;
}
getClientId(): number {
return this.clientId;
}
}

View File

@ -1,547 +0,0 @@
import * as log from "tc-shared/log";
import {LogCategory, logDebug, logError, logInfo, logTrace, logWarn} from "tc-shared/log";
import * as aplayer from "../../audio/player";
import {ServerConnection} from "../../connection/ServerConnection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {VoiceClientController} from "./VoiceClient";
import {tr} from "tc-shared/i18n/localize";
import {
AbstractVoiceConnection,
VoiceConnectionStatus,
WhisperSessionInitializer
} from "tc-shared/connection/VoiceConnection";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
import {ConnectionStatistics, ServerConnectionEvents} from "tc-shared/connection/ConnectionBase";
import {ConnectionState} from "tc-shared/ConnectionHandler";
import {VoiceBridge, VoicePacket, VoiceWhisperPacket} from "./bridge/VoiceBridge";
import {NativeWebRTCVoiceBridge} from "./bridge/NativeWebRTCVoiceBridge";
import {
kUnknownWhisperClientUniqueId,
WhisperSession,
WhisperSessionState,
WhisperTarget
} from "tc-shared/voice/VoiceWhisper";
import {VoiceClient} from "tc-shared/voice/VoiceClient";
import {WebWhisperSession} from "./VoiceWhisper";
import {VoicePlayerState} from "tc-shared/voice/VoicePlayer";
type CancelableWhisperTarget = WhisperTarget & { canceled: boolean };
export class VoiceConnection extends AbstractVoiceConnection {
readonly connection: ServerConnection;
private readonly serverConnectionStateListener;
private connectionState: VoiceConnectionStatus;
private failedConnectionMessage: string;
private localAudioStarted = false;
private connectionLostModalOpen = false;
private connectAttemptCounter = 0;
private awaitingAudioInitialize = false;
private currentAudioSource: RecorderProfile;
private voiceClients: {[key: number]: VoiceClientController} = {};
private whisperSessionInitializer: WhisperSessionInitializer;
private whisperSessions: {[key: number]: WebWhisperSession} = {};
private whisperTarget: CancelableWhisperTarget | undefined;
private whisperTargetInitialize: Promise<void>;
private voiceBridge: VoiceBridge;
private encoderCodec: number = 5;
private lastConnectAttempt: number = 0;
private currentlyReplayingVoice: boolean = false;
private readonly voiceClientStateChangedEventListener;
private readonly whisperSessionStateChangedEventListener;
constructor(connection: ServerConnection) {
super(connection);
this.setWhisperSessionInitializer(undefined);
this.connectionState = VoiceConnectionStatus.Disconnected;
this.connection = connection;
this.connection.events.on("notify_connection_state_changed",
this.serverConnectionStateListener = this.handleServerConnectionStateChanged.bind(this));
this.voiceClientStateChangedEventListener = this.handleVoiceClientStateChange.bind(this);
this.whisperSessionStateChangedEventListener = this.handleWhisperSessionStateChange.bind(this);
}
getConnectionState(): VoiceConnectionStatus {
return this.connectionState;
}
getFailedMessage(): string {
return this.failedConnectionMessage;
}
getRetryTimestamp(): number | 0 {
return 0;
}
destroy() {
this.connection.events.off(this.serverConnectionStateListener);
this.dropVoiceBridge();
this.acquireVoiceRecorder(undefined, true).catch(error => {
logWarn(LogCategory.VOICE, tr("Failed to release voice recorder: %o"), error);
}).then(() => {
for(const client of Object.keys(this.voiceClients).map(clientId => this.voiceClients[clientId])) {
client.abortReplay();
}
this.voiceClients = undefined;
this.currentAudioSource = undefined;
});
if(Object.keys(this.voiceClients).length !== 0) {
logWarn(LogCategory.AUDIO, tr("Voice connection will be destroyed, but some voice clients are still left (%d)."), Object.keys(this.voiceClients).length);
}
const whisperSessions = Object.keys(this.whisperSessions);
whisperSessions.forEach(session => this.whisperSessions[session].destroy());
this.whisperSessions = {};
this.events.destroy();
}
reset() {
this.dropVoiceBridge();
}
async acquireVoiceRecorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
if(this.currentAudioSource === recorder && !enforce) {
return;
}
if(this.currentAudioSource) {
await this.voiceBridge?.setInput(undefined);
this.currentAudioSource.callback_unmount = undefined;
await this.currentAudioSource.unmount();
}
/* unmount our target recorder */
await recorder?.unmount();
this.handleRecorderStop();
const oldRecorder = recorder;
this.currentAudioSource = recorder;
if(recorder) {
recorder.current_handler = this.connection.client;
recorder.callback_unmount = this.handleRecorderUnmount.bind(this);
recorder.callback_start = this.handleRecorderStart.bind(this);
recorder.callback_stop = this.handleRecorderStop.bind(this);
recorder.callback_input_initialized = async input => {
if(!this.voiceBridge)
return;
await this.voiceBridge.setInput(input);
};
if(recorder.input && this.voiceBridge) {
await this.voiceBridge.setInput(recorder.input);
}
if(!recorder.input || recorder.input.isFiltered()) {
this.handleRecorderStop();
} else {
this.handleRecorderStart();
}
} else {
await this.voiceBridge?.setInput(undefined);
}
this.events.fire("notify_recorder_changed", {
oldRecorder: oldRecorder,
newRecorder: recorder
});
}
public startVoiceBridge() {
if(!aplayer.initialized()) {
logDebug(LogCategory.VOICE, tr("Audio player isn't initialized yet. Waiting for it to initialize."));
if(!this.awaitingAudioInitialize) {
this.awaitingAudioInitialize = true;
aplayer.on_ready(() => this.startVoiceBridge());
}
return;
}
if(this.connection.getConnectionState() !== ConnectionState.CONNECTED) {
return;
}
this.lastConnectAttempt = Date.now();
this.connectAttemptCounter++;
if(this.voiceBridge) {
this.voiceBridge.callbackDisconnect = undefined;
this.voiceBridge.disconnect();
}
this.voiceBridge = new NativeWebRTCVoiceBridge();
this.voiceBridge.callback_incoming_voice = packet => this.handleVoicePacket(packet);
this.voiceBridge.callback_incoming_whisper = packet => this.handleWhisperPacket(packet);
this.voiceBridge.callback_send_control_data = (request, payload) => {
this.connection.sendData(JSON.stringify(Object.assign({
type: "WebRTC",
request: request
}, payload)))
};
this.voiceBridge.callbackDisconnect = () => {
this.connection.client.log.log("connection.voice.dropped", { });
if(!this.connectionLostModalOpen) {
this.connectionLostModalOpen = true;
const modal = createErrorModal(tr("Voice connection lost"), tr("Lost voice connection to the target server. Trying to reconnect..."));
modal.close_listener.push(() => this.connectionLostModalOpen = false);
modal.open();
}
logInfo(LogCategory.WEBRTC, tr("Lost voice connection to target server. Trying to reconnect."));
this.executeVoiceBridgeReconnect();
}
this.connection.client.log.log("connection.voice.connect", { attemptCount: this.connectAttemptCounter });
this.setConnectionState(VoiceConnectionStatus.Connecting);
this.voiceBridge.connect().then(result => {
if(result.type === "success") {
this.lastConnectAttempt = 0;
this.connectAttemptCounter = 0;
this.connection.client.log.log("connection.voice.connect.succeeded", { });
const currentInput = this.voiceRecorder()?.input;
if(currentInput) {
this.voiceBridge.setInput(currentInput).catch(error => {
createErrorModal(tr("Input recorder attechment failed"), tr("Failed to apply the current microphone recorder to the voice sender.")).open();
logWarn(LogCategory.VOICE, tr("Failed to apply the input to the voice bridge: %o"), error);
this.handleRecorderUnmount();
});
}
this.setConnectionState(VoiceConnectionStatus.Connected);
} else if(result.type === "canceled") {
/* we've to do nothing here */
} else if(result.type === "failed") {
let doReconnect = result.allowReconnect && this.connectAttemptCounter < 5;
logWarn(LogCategory.VOICE, tr("Failed to setup voice bridge: %s. Reconnect: %o"), result.message, doReconnect);
this.connection.client.log.log("connection.voice.connect.failed", {
reason: result.message,
reconnect_delay: doReconnect ? 1 : 0
});
if(doReconnect) {
this.executeVoiceBridgeReconnect();
} else {
this.failedConnectionMessage = result.message;
this.setConnectionState(VoiceConnectionStatus.Failed);
}
}
});
}
private executeVoiceBridgeReconnect() {
/* TODO: May some kind of incremental timeout? */
this.startVoiceBridge();
}
private dropVoiceBridge() {
if(this.voiceBridge) {
this.voiceBridge.callbackDisconnect = undefined;
this.voiceBridge.disconnect();
this.voiceBridge = undefined;
}
this.setConnectionState(VoiceConnectionStatus.Disconnected);
}
handleControlPacket(json) {
this.voiceBridge.handleControlData(json["request"], json);
return;
}
protected handleVoicePacket(packet: VoicePacket) {
const chandler = this.connection.client;
if(chandler.isSpeakerMuted() || chandler.isSpeakerDisabled()) /* we dont need to do anything with sound playback when we're not listening to it */
return;
let client = this.findVoiceClient(packet.clientId);
if(!client) {
logError(LogCategory.VOICE, tr("Having voice from unknown audio client? (ClientID: %o)"), packet.clientId);
return;
}
client.enqueueAudioPacket(packet.voiceId, packet.codec, packet.head, packet.payload);
}
private handleRecorderStop() {
const chandler = this.connection.client;
const ch = chandler.getClient();
if(ch) ch.speaking = false;
if(!chandler.connected)
return false;
if(chandler.isMicrophoneMuted())
return false;
logInfo(LogCategory.VOICE, tr("Local voice ended"));
this.localAudioStarted = false;
this.voiceBridge?.sendStopSignal(this.encoderCodec);
}
private handleRecorderStart() {
const chandler = this.connection.client;
if(chandler.isMicrophoneMuted()) {
logWarn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted!"));
return;
}
this.localAudioStarted = true;
logInfo(LogCategory.VOICE, tr("Local voice started"));
const ch = chandler.getClient();
if(ch) ch.speaking = true;
}
private handleRecorderUnmount() {
logInfo(LogCategory.VOICE, "Lost recorder!");
this.currentAudioSource = undefined;
this.acquireVoiceRecorder(undefined, true); /* we can ignore the promise because we should finish this directly */
}
private setConnectionState(state: VoiceConnectionStatus) {
if(this.connectionState === state)
return;
const oldState = this.connectionState;
this.connectionState = state;
this.events.fire("notify_connection_status_changed", { newStatus: state, oldStatus: oldState });
}
private handleServerConnectionStateChanged(event: ServerConnectionEvents["notify_connection_state_changed"]) {
if(event.newState === ConnectionState.CONNECTED) {
/* startVoiceBridge() will be called by the server connection if we're using this old voice bridge */
/* this.startVoiceBridge(); */
} else {
this.connectAttemptCounter = 0;
this.lastConnectAttempt = 0;
this.dropVoiceBridge();
}
}
voiceRecorder(): RecorderProfile {
return this.currentAudioSource;
}
availableVoiceClients(): VoiceClient[] {
return Object.values(this.voiceClients);
}
findVoiceClient(clientId: number) : VoiceClientController | undefined {
return this.voiceClients[clientId];
}
unregisterVoiceClient(client: VoiceClient) {
if(!(client instanceof VoiceClientController))
throw "Invalid client type";
client.events.off("notify_state_changed", this.voiceClientStateChangedEventListener);
delete this.voiceClients[client.getClientId()];
client.destroy();
}
registerVoiceClient(clientId: number): VoiceClient {
if(typeof this.voiceClients[clientId] !== "undefined") {
throw tr("voice client already registered");
}
const client = new VoiceClientController(clientId);
this.voiceClients[clientId] = client;
client.events.on("notify_state_changed", this.voiceClientStateChangedEventListener);
return client;
}
decodingSupported(codec: number): boolean {
return codec >= 4 && codec <= 5;
}
encodingSupported(codec: number): boolean {
return codec >= 4 && codec <= 5;
}
getEncoderCodec(): number {
return this.encoderCodec;
}
setEncoderCodec(codec: number) {
this.encoderCodec = codec;
}
stopAllVoiceReplays() {
this.availableVoiceClients().forEach(e => e.abortReplay());
/* TODO: Whisper sessions as well */
}
isReplayingVoice(): boolean {
return this.currentlyReplayingVoice;
}
private handleVoiceClientStateChange(/* event: VoicePlayerEvents["notify_state_changed"] */) {
this.updateVoiceReplaying();
}
private handleWhisperSessionStateChange() {
this.updateVoiceReplaying();
}
private updateVoiceReplaying() {
let replaying = false;
if(this.connectionState === VoiceConnectionStatus.Connected) {
let index = this.availableVoiceClients().findIndex(client => client.getState() === VoicePlayerState.PLAYING || client.getState() === VoicePlayerState.BUFFERING);
replaying = index !== -1;
if(!replaying) {
index = this.getWhisperSessions().findIndex(session => session.getSessionState() === WhisperSessionState.PLAYING);
replaying = index !== -1;
}
}
if(this.currentlyReplayingVoice !== replaying) {
this.currentlyReplayingVoice = replaying;
this.events.fire_later("notify_voice_replay_state_change", { replaying: replaying });
}
}
protected handleWhisperPacket(packet: VoiceWhisperPacket) {
const clientId = packet.clientId;
let session = this.whisperSessions[clientId];
if(typeof session !== "object") {
logDebug(LogCategory.VOICE, tr("Received new whisper from %d (%s)"), packet.clientId, packet.clientNickname);
session = (this.whisperSessions[clientId] = new WebWhisperSession(packet));
session.events.on("notify_state_changed", this.whisperSessionStateChangedEventListener);
this.whisperSessionInitializer(session).then(result => {
session.initializeFromData(result).then(() => {
if(this.whisperSessions[clientId] !== session) {
/* seems to be an old session */
return;
}
this.events.fire("notify_whisper_initialized", { session });
}).catch(error => {
logError(LogCategory.VOICE, tr("Failed to internally initialize a voice whisper session: %o"), error);
session.setSessionState(WhisperSessionState.INITIALIZE_FAILED);
});
}).catch(error => {
logError(LogCategory.VOICE, tr("Failed to initialize whisper session: %o."), error);
session.initializeFailed();
});
session.events.on("notify_timed_out", () => {
logTrace(LogCategory.VOICE, tr("Whisper session %d timed out. Dropping session."), session.getClientId());
this.dropWhisperSession(session);
});
this.events.fire("notify_whisper_created", { session: session });
}
session.enqueueWhisperPacket(packet);
}
getWhisperSessions(): WhisperSession[] {
return Object.values(this.whisperSessions);
}
dropWhisperSession(session: WhisperSession) {
if(!(session instanceof WebWhisperSession)) {
throw tr("Session isn't an instance of the web whisper system");
}
session.events.off("notify_state_changed", this.whisperSessionStateChangedEventListener);
delete this.whisperSessions[session.getClientId()];
session.destroy();
}
setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined) {
this.whisperSessionInitializer = initializer;
if(!this.whisperSessionInitializer) {
this.whisperSessionInitializer = async session => {
logWarn(LogCategory.VOICE, tr("Missing whisper session initializer. Blocking whisper from %d (%s)"), session.getClientId(), session.getClientUniqueId());
return {
clientName: session.getClientName() || tr("Unknown client"),
clientUniqueId: session.getClientUniqueId() || kUnknownWhisperClientUniqueId,
blocked: true,
volume: 1,
sessionTimeout: 60 * 1000
}
}
}
}
getWhisperSessionInitializer(): WhisperSessionInitializer | undefined {
return this.whisperSessionInitializer;
}
async startWhisper(target: WhisperTarget): Promise<void> {
while(this.whisperTargetInitialize) {
this.whisperTarget.canceled = true;
await this.whisperTargetInitialize;
}
this.whisperTarget = Object.assign({ canceled: false }, target);
try {
await (this.whisperTargetInitialize = this.doStartWhisper(this.whisperTarget));
} finally {
this.whisperTargetInitialize = undefined;
}
}
private async doStartWhisper(target: CancelableWhisperTarget) {
if(target.target === "echo") {
await this.connection.send_command("setwhispertarget", {
type: 0x10, /* self */
target: 0,
id: 0
}, { flagset: ["new"] });
} else if(target.target === "channel-clients") {
throw "target not yet supported";
} else if(target.target === "groups") {
throw "target not yet supported";
} else {
throw "target not yet supported";
}
if(target.canceled) {
return;
}
this.voiceBridge.startWhispering();
}
getWhisperTarget(): WhisperTarget | undefined {
return this.whisperTarget;
}
stopWhisper() {
if(this.whisperTarget) {
this.whisperTarget.canceled = true;
this.whisperTargetInitialize = undefined;
this.connection.send_command("clearwhispertarget").catch(error => {
logWarn(LogCategory.CLIENT, tr("Failed to clear the whisper target: %o"), error);
});
}
this.voiceBridge?.stopWhispering();
}
async getConnectionStats(): Promise<ConnectionStatistics> {
return {
bytesSend: 0,
bytesReceived: 0
};
}
}

View File

@ -1,293 +0,0 @@
import {
VoicePlayer,
VoicePlayerEvents,
VoicePlayerLatencySettings,
VoicePlayerState
} from "tc-shared/voice/VoicePlayer";
import {AudioClient} from "../audio-lib/AudioClient";
import {AudioResampler} from "./AudioResampler";
import {Registry} from "tc-shared/events";
import * as aplayer from "tc-backend/web/audio/player";
import {getAudioLibrary} from "../audio-lib";
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
import { tr } from "tc-shared/i18n/localize";
const kDefaultLatencySettings = {
minBufferTime: 60,
maxBufferTime: 400
} as VoicePlayerLatencySettings;
export class WebVoicePlayer implements VoicePlayer {
public readonly events: Registry<VoicePlayerEvents>;
private speakerContext: AudioContext;
private gainNode: GainNode;
private playerState = VoicePlayerState.STOPPED;
private currentPlaybackTime: number = 0;
private bufferTimeout: number;
private bufferQueueTime: number = 0;
private bufferQueue: AudioBuffer[] = [];
private playingNodes: AudioBufferSourceNode[] = [];
private currentVolume: number = 1;
private latencySettings: VoicePlayerLatencySettings;
private audioInitializePromise: Promise<void>;
private audioClient: AudioClient;
private resampler: AudioResampler;
constructor() {
this.events = new Registry<VoicePlayerEvents>();
this.resampler = new AudioResampler(48000);
aplayer.on_ready(() => {
this.speakerContext = aplayer.context();
this.gainNode = aplayer.context().createGain();
this.gainNode.connect(this.speakerContext.destination);
this.gainNode.gain.value = this.currentVolume;
this.initializeAudio();
});
this.resetLatencySettings();
this.setPlayerState(VoicePlayerState.STOPPED);
}
abortReplay() {
this.stopAudio(true);
}
flushBuffer() {
this.bufferQueue = [];
this.bufferQueueTime = 0;
for(const entry of this.playingNodes) {
entry.stop(0);
}
this.playingNodes = [];
}
getState(): VoicePlayerState {
return this.playerState;
}
getVolume(): number {
return this.currentVolume;
}
setVolume(volume: number) {
if(this.currentVolume == volume) {
return;
}
this.currentVolume = volume;
if(this.gainNode) {
this.gainNode.gain.value = volume;
}
}
getLatencySettings(): Readonly<VoicePlayerLatencySettings> {
return this.latencySettings;
}
setLatencySettings(settings: VoicePlayerLatencySettings) {
this.latencySettings = settings
}
resetLatencySettings() {
this.latencySettings = kDefaultLatencySettings;
}
enqueueAudioPacket(packetId: number, codec: number, head: boolean, buffer: Uint8Array) {
if(!this.audioClient) {
return;
} else {
this.initializeAudio().then(() => {
if(!this.audioClient) {
/* we've already been destroyed */
return;
}
this.audioClient.enqueueBuffer(buffer, packetId, codec, head);
});
}
}
destroy() {
this.audioClient?.destroy();
this.audioClient = undefined;
this.events.destroy();
}
private initializeAudio() : Promise<void> {
if(this.audioInitializePromise) {
return this.audioInitializePromise;
}
this.audioInitializePromise = (async () => {
this.audioClient = await getAudioLibrary().createClient();
this.audioClient.callback_decoded = buffer => {
this.resampler.resample(buffer).then(buffer => {
this.playbackAudioBuffer(buffer);
});
}
this.audioClient.callback_ended = () => {
this.stopAudio(false);
};
})();
return this.audioInitializePromise;
}
playbackAudioBuffer(buffer: AudioBuffer) {
if(!buffer) {
logWarn(LogCategory.VOICE, tr("[AudioController] Got empty or undefined buffer! Dropping it"));
return;
}
if(!this.speakerContext) {
logWarn(LogCategory.VOICE, tr("[AudioController] Failed to replay audio. Global audio context not initialized yet!"));
return;
}
if (buffer.sampleRate != this.speakerContext.sampleRate) {
logWarn(LogCategory.VOICE, tr("[AudioController] Source sample rate isn't equal to playback sample rate! (%o | %o)"), buffer.sampleRate, this.speakerContext.sampleRate);
}
if(this.playerState == VoicePlayerState.STOPPED || this.playerState == VoicePlayerState.STOPPING) {
logDebug(LogCategory.VOICE, tr("[Audio] Starting new playback"));
this.setPlayerState(VoicePlayerState.PREBUFFERING);
}
if(this.playerState === VoicePlayerState.PREBUFFERING || this.playerState === VoicePlayerState.BUFFERING) {
this.resetBufferTimeout(true);
this.bufferQueue.push(buffer);
this.bufferQueueTime += buffer.duration;
if(this.bufferQueueTime <= this.latencySettings.minBufferTime / 1000) {
return;
}
/* finished buffering */
if(this.playerState == VoicePlayerState.PREBUFFERING) {
logDebug(LogCategory.VOICE, tr("[Audio] Prebuffering succeeded (Replaying now)"));
} else {
logDebug(LogCategory.VOICE, tr("[Audio] Buffering succeeded (Replaying now)"));
}
this.gainNode.gain.value = 0;
this.gainNode.gain.linearRampToValueAtTime(this.currentVolume, this.speakerContext.currentTime + .1);
this.replayBufferQueue();
this.setPlayerState(VoicePlayerState.PLAYING);
} else if(this.playerState === VoicePlayerState.PLAYING) {
const latency = this.getCurrentPlaybackLatency();
if(latency > (this.latencySettings.maxBufferTime / 1000)) {
logWarn(LogCategory.VOICE, tr("Dropping replay buffer because of too high replay latency. (Current: %f, Max: %f)"),
latency.toFixed(3), (this.latencySettings.maxBufferTime / 1000).toFixed(3));
return;
}
this.enqueueBufferForPayback(buffer);
} else {
logError(LogCategory.AUDIO, tr("This block should be unreachable!"));
return;
}
}
getCurrentPlaybackLatency() {
return Math.max(this.currentPlaybackTime - this.speakerContext.currentTime, 0);
}
stopAudio(abortPlayback: boolean) {
if(abortPlayback) {
this.setPlayerState(VoicePlayerState.STOPPED);
this.flushBuffer();
} else {
this.setPlayerState(VoicePlayerState.STOPPING);
/* replay all pending buffers */
this.replayBufferQueue();
/* test if there are any buffers which are currently played, if not the state will change to stopped */
this.testReplayState();
}
}
private replayBufferQueue() {
for(const buffer of this.bufferQueue)
this.enqueueBufferForPayback(buffer);
this.bufferQueue = [];
this.bufferQueueTime = 0;
}
private enqueueBufferForPayback(buffer: AudioBuffer) {
/* advance the playback time index, we seem to be behind a bit */
if(this.currentPlaybackTime < this.speakerContext.currentTime)
this.currentPlaybackTime = this.speakerContext.currentTime;
const player = this.speakerContext.createBufferSource();
player.buffer = buffer;
player.onended = () => this.handleBufferPlaybackEnded(player);
this.playingNodes.push(player);
player.connect(this.gainNode);
player.start(this.currentPlaybackTime);
this.currentPlaybackTime += buffer.duration;
}
private handleBufferPlaybackEnded(node: AudioBufferSourceNode) {
this.playingNodes.remove(node);
this.testReplayState();
}
private testReplayState() {
if(this.bufferQueue.length > 0 || this.playingNodes.length > 0) {
return;
}
if(this.playerState === VoicePlayerState.STOPPING) {
/* All buffers have been replayed successfully */
this.setPlayerState(VoicePlayerState.STOPPED);
} else if(this.playerState === VoicePlayerState.PLAYING) {
logDebug(LogCategory.VOICE, tr("Voice player has a buffer underflow. Changing state to buffering."));
this.setPlayerState(VoicePlayerState.BUFFERING);
this.resetBufferTimeout(true);
}
}
/***
* Schedule a new buffer timeout.
* The buffer timeout is used to playback even small amounts of audio, which are less than the min. buffer size.
* @param scheduleNewTimeout
* @private
*/
private resetBufferTimeout(scheduleNewTimeout: boolean) {
clearTimeout(this.bufferTimeout);
if(scheduleNewTimeout) {
this.bufferTimeout = setTimeout(() => {
if(this.playerState == VoicePlayerState.PREBUFFERING || this.playerState == VoicePlayerState.BUFFERING) {
logWarn(LogCategory.VOICE, tr("[Audio] Buffering exceeded timeout. Flushing and stopping replay."));
this.stopAudio(false);
}
this.bufferTimeout = undefined;
}, 1000);
}
}
private setPlayerState(state: VoicePlayerState) {
if(this.playerState === state) {
return;
}
const oldState = this.playerState;
this.playerState = state;
this.events.fire("notify_state_changed", {
oldState: oldState,
newState: state
});
}
}

View File

@ -1,159 +0,0 @@
import {WhisperSession, WhisperSessionEvents, WhisperSessionState} from "tc-shared/voice/VoiceWhisper";
import {Registry} from "tc-shared/events";
import {VoicePlayer, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
import {WhisperSessionInitializeData} from "tc-shared/connection/VoiceConnection";
import {VoiceWhisperPacket} from "./bridge/VoiceBridge";
import {WebVoicePlayer} from "./VoicePlayer";
const kMaxUninitializedBuffers = 10;
export class WebWhisperSession implements WhisperSession {
readonly events: Registry<WhisperSessionEvents>;
private readonly clientId: number;
private clientName: string;
private clientUniqueId: string;
private sessionState: WhisperSessionState;
private sessionBlocked: boolean;
private sessionTimeout: number;
private sessionTimeoutId: number;
private lastWhisperTimestamp: number;
private packetBuffer: VoiceWhisperPacket[] = [];
private voicePlayer: WebVoicePlayer;
constructor(initialPacket: VoiceWhisperPacket) {
this.events = new Registry<WhisperSessionEvents>();
this.clientId = initialPacket.clientId;
this.clientName = initialPacket.clientNickname;
this.clientUniqueId = initialPacket.clientUniqueId;
this.sessionState = WhisperSessionState.INITIALIZING;
}
getClientId(): number {
return this.clientId;
}
getClientName(): string | undefined {
return this.clientName;
}
getClientUniqueId(): string | undefined {
return this.clientUniqueId;
}
getLastWhisperTimestamp(): number {
return this.lastWhisperTimestamp;
}
getSessionState(): WhisperSessionState {
return this.sessionState;
}
getSessionTimeout(): number {
return this.sessionTimeout;
}
getVoicePlayer(): VoicePlayer | undefined {
return this.voicePlayer;
}
setSessionTimeout(timeout: number) {
this.sessionTimeout = timeout;
this.resetSessionTimeout();
}
isBlocked(): boolean {
return this.sessionBlocked;
}
setBlocked(blocked: boolean) {
this.sessionBlocked = blocked;
}
async initializeFromData(data: WhisperSessionInitializeData) {
this.clientName = data.clientName;
this.clientUniqueId = data.clientUniqueId;
this.sessionBlocked = data.blocked;
this.sessionTimeout = data.sessionTimeout;
this.voicePlayer = new WebVoicePlayer();
this.voicePlayer.setVolume(data.volume);
this.voicePlayer.events.on("notify_state_changed", event => {
if(event.newState === VoicePlayerState.BUFFERING) {
return;
}
this.resetSessionTimeout();
if(event.newState === VoicePlayerState.PLAYING || event.newState === VoicePlayerState.STOPPING) {
this.setSessionState(WhisperSessionState.PLAYING);
} else {
this.setSessionState(WhisperSessionState.PAUSED);
}
});
this.setSessionState(WhisperSessionState.PAUSED);
}
initializeFailed() {
this.setSessionState(WhisperSessionState.INITIALIZE_FAILED);
/* if we're receiving nothing for more than 5 seconds we can try it again */
this.sessionTimeout = 5000;
this.resetSessionTimeout();
}
destroy() {
clearTimeout(this.sessionTimeoutId);
this.events.destroy();
this.voicePlayer?.destroy();
this.voicePlayer = undefined;
}
enqueueWhisperPacket(packet: VoiceWhisperPacket) {
this.resetSessionTimeout();
if(this.sessionBlocked) {
/* do nothing, the session has been blocked */
return;
}
if(this.sessionState === WhisperSessionState.INITIALIZE_FAILED) {
return;
} else if(this.sessionState === WhisperSessionState.INITIALIZING) {
this.packetBuffer.push(packet);
while(this.packetBuffer.length > kMaxUninitializedBuffers) {
this.packetBuffer.pop_front();
}
} else {
this.voicePlayer?.enqueueAudioPacket(packet.voiceId, packet.codec, packet.head, packet.payload);
}
}
setSessionState(state: WhisperSessionState) {
if(this.sessionState === state) {
return;
}
const oldState = this.sessionState;
this.sessionState = state;
this.events.fire("notify_state_changed", { oldState: oldState, newState: state });
}
private resetSessionTimeout() {
clearTimeout(this.sessionTimeoutId);
if(this.sessionState === WhisperSessionState.PLAYING) {
/* no need to reschedule a session timeout if we're currently playing */
return;
} else if(this.sessionState === WhisperSessionState.INITIALIZING) {
/* we're still initializing; a session timeout hasn't been set */
return;
}
this.sessionTimeoutId = setTimeout(() => {
this.events.fire("notify_timed_out");
}, Math.max(this.sessionTimeout, 1000));
}
}

View File

@ -1,163 +0,0 @@
import {AbstractInput, InputConsumerType, NodeInputConsumer} from "tc-shared/voice/RecorderBase";
import * as aplayer from "tc-backend/web/audio/player";
import * as log from "tc-shared/log";
import {LogCategory, logDebug, logWarn} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import {WebRTCVoiceBridge} from "./WebRTCVoiceBridge";
import {VoiceWhisperPacket} from "./VoiceBridge";
import {CryptoHelper} from "tc-shared/profiles/identities/TeamSpeakIdentity";
import arraybuffer_to_string = CryptoHelper.arraybufferToString;
export class NativeWebRTCVoiceBridge extends WebRTCVoiceBridge {
static isSupported(): boolean {
const context = window.webkitAudioContext || window.AudioContext;
if (!context)
return false;
if (!context.prototype.createMediaStreamDestination)
return false; /* Required, but not available within edge */
return true;
}
private readonly localVoiceDestinationNode: MediaStreamAudioDestinationNode;
private readonly localWhisperDestinationNode: MediaStreamAudioDestinationNode;
private currentInputNode: AudioNode;
private currentInput: AbstractInput;
private whispering: boolean;
constructor() {
super();
this.whispering = false;
this.localVoiceDestinationNode = aplayer.context().createMediaStreamDestination();
this.localWhisperDestinationNode = aplayer.context().createMediaStreamDestination();
}
protected generateRtpOfferOptions(): RTCOfferOptions {
let options: RTCOfferOptions = {};
options.offerToReceiveAudio = false;
options.offerToReceiveVideo = false;
options.voiceActivityDetection = true;
return options;
}
protected initializeRtpConnection(connection: RTCPeerConnection) {
connection.addTrack(this.localVoiceDestinationNode.stream.getAudioTracks()[0]);
connection.addTrack(this.localWhisperDestinationNode.stream.getAudioTracks()[0]);
}
protected handleVoiceDataChannelMessage(message: MessageEvent) {
super.handleVoiceDataChannelMessage(message);
let bin = new Uint8Array(message.data);
let clientId = bin[2] << 8 | bin[3];
let packetId = bin[0] << 8 | bin[1];
let codec = bin[4];
this.callback_incoming_voice({
clientId: clientId,
voiceId: packetId,
codec: codec,
head: false,
payload: new Uint8Array(message.data, 5)
});
}
protected handleWhisperDataChannelMessage(message: MessageEvent) {
super.handleWhisperDataChannelMessage(message);
let payload = new Uint8Array(message.data);
let payload_offset = 0;
const flags = payload[payload_offset++];
let packet = {
head: (flags & 0x01) === 1
} as VoiceWhisperPacket;
if(packet.head) {
packet.clientUniqueId = arraybuffer_to_string(payload.subarray(payload_offset, payload_offset + 28));
payload_offset += 28;
packet.clientNickname = arraybuffer_to_string(payload.subarray(payload_offset + 1, payload_offset + 1 + payload[payload_offset]));
payload_offset += payload[payload_offset] + 1;
}
packet.voiceId = payload[payload_offset] << 8 | payload[payload_offset + 1];
payload_offset += 2;
packet.clientId = payload[payload_offset] << 8 | payload[payload_offset + 1];
payload_offset += 2;
packet.codec = payload[payload_offset++];
packet.payload = new Uint8Array(message.data, payload_offset);
this.callback_incoming_whisper(packet);
}
getInput(): AbstractInput | undefined {
return this.currentInput;
}
async setInput(input: AbstractInput | undefined) {
if (this.currentInput === input)
return;
if (this.currentInput) {
await this.currentInput.setConsumer(undefined);
this.currentInput = undefined;
}
this.currentInput = input;
if (this.currentInput) {
try {
await this.currentInput.setConsumer({
type: InputConsumerType.NODE,
callbackNode: node => {
this.currentInputNode = node;
node.connect(this.whispering ? this.localWhisperDestinationNode : this.localVoiceDestinationNode);
},
callbackDisconnect: node => {
this.currentInputNode = undefined;
node.disconnect(this.whispering ? this.localWhisperDestinationNode : this.localVoiceDestinationNode);
}
} as NodeInputConsumer);
logDebug(LogCategory.VOICE, tr("Successfully set/updated to the new input for the recorder"));
} catch (e) {
logWarn(LogCategory.VOICE, tr("Failed to set consumer to the new recorder input: %o"), e);
}
}
}
sendStopSignal(codec: number) {
/*
* No stop signal needs to be send.
* The server will automatically send one, when the stream contains silence.
*/
}
startWhispering() {
if(this.whispering) {
return;
}
this.whispering = true;
if(this.currentInputNode) {
this.currentInputNode.disconnect(this.localVoiceDestinationNode);
this.currentInputNode.connect(this.localWhisperDestinationNode);
}
}
stopWhispering() {
if(!this.whispering) {
return;
}
this.whispering = false;
if(this.currentInputNode) {
this.currentInputNode.connect(this.localVoiceDestinationNode);
this.currentInputNode.disconnect(this.localWhisperDestinationNode);
}
}
}

View File

@ -1,56 +0,0 @@
import {AbstractInput} from "tc-shared/voice/RecorderBase";
export type VoiceBridgeConnectResult = {
type: "success"
} | {
type: "canceled"
} | {
type: "failed",
message: string,
allowReconnect: boolean
};
export interface VoicePacket {
voiceId: number;
clientId: number;
codec: number;
head: boolean;
payload: Uint8Array;
}
export interface VoiceWhisperPacket extends VoicePacket {
clientUniqueId?: string;
clientNickname?: string;
}
export abstract class VoiceBridge {
protected muted: boolean;
callback_send_control_data: (request: string, payload: any) => void;
callback_incoming_voice: (packet: VoicePacket) => void;
callback_incoming_whisper: (packet: VoiceWhisperPacket) => void;
callbackDisconnect: () => void;
setMuted(flag: boolean) {
this.muted = flag;
}
isMuted(): boolean {
return this.muted;
}
handleControlData(request: string, payload: any) { }
abstract connect(): Promise<VoiceBridgeConnectResult>;
abstract disconnect();
abstract getInput(): AbstractInput | undefined;
abstract setInput(input: AbstractInput | undefined): Promise<void>;
abstract sendStopSignal(codec: number);
abstract startWhispering();
abstract stopWhispering();
}

View File

@ -1,411 +0,0 @@
import * as aplayer from "tc-backend/web/audio/player";
import * as log from "tc-shared/log";
import {LogCategory, logDebug, logError, logInfo, logTrace, logWarn} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import {VoiceBridge, VoiceBridgeConnectResult} from "./VoiceBridge";
export abstract class WebRTCVoiceBridge extends VoiceBridge {
private readonly muteAudioNode: GainNode;
private connectionState: "unconnected" | "connecting" | "connected";
private rtcConnection: RTCPeerConnection;
private voiceDataChannel: RTCDataChannel;
private whisperDataChannel: RTCDataChannel;
private cachedIceCandidates: RTCIceCandidateInit[];
private localIceCandidateCount: number;
private callbackRtcAnswer: (answer: any) => void;
private callbackConnectCanceled: (() => void)[] = [];
private callbackRtcConnected: () => void;
private callbackRtcConnectFailed: (error: any) => void;
private callbackVoiceDataChannelOpened: (() => void)[] = [];
private allowReconnect: boolean;
protected constructor() {
super();
this.connectionState = "unconnected";
const audioContext = aplayer.context();
this.muteAudioNode = audioContext.createGain();
}
connect(): Promise<VoiceBridgeConnectResult> {
this.disconnect(); /* just to ensure */
this.connectionState = "connecting";
this.allowReconnect = true;
return new Promise<VoiceBridgeConnectResult>(resolve => {
let cancelState = { value: false };
const cancelHandler = () => {
cancelState.value = true;
resolve({ type: "canceled" });
}
this.callbackConnectCanceled.push(cancelHandler);
this.doConnect(cancelState).then(() => {
if(cancelState.value) return;
this.callbackConnectCanceled.remove(cancelHandler);
this.connectionState = "connected";
resolve({ type: "success" });
}).catch(error => {
if(cancelState.value) return;
this.callbackConnectCanceled.remove(cancelHandler);
this.connectionState = "unconnected";
this.cleanupRtcResources();
resolve({ type: "failed", message: error, allowReconnect: this.allowReconnect === true });
})
});
}
disconnect() {
switch (this.connectionState) {
case "connecting":
this.abortConnectionAttempt();
break;
case "connected":
this.doDisconnect();
break;
}
}
private async doConnect(canceled: { value: boolean }) {
{
let rtcConfig: RTCConfiguration = {};
rtcConfig.iceServers = [];
rtcConfig.iceServers.push({ urls: ['stun:stun.l.google.com:19302'] });
this.rtcConnection = new RTCPeerConnection(rtcConfig);
this.rtcConnection.onicegatheringstatechange = this.handleIceGatheringStateChange.bind(this);
this.rtcConnection.oniceconnectionstatechange = this.handleIceConnectionStateChange.bind(this);
this.rtcConnection.onicecandidate = this.handleIceCandidate.bind(this);
this.rtcConnection.onicecandidateerror = this.handleIceCandidateError.bind(this);
this.rtcConnection.onconnectionstatechange = this.handleRtcConnectionStateChange.bind(this);
this.initializeRtpConnection(this.rtcConnection);
}
{
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.voiceDataChannel = this.rtcConnection.createDataChannel('main', dataChannelConfig);
this.voiceDataChannel.onmessage = this.handleVoiceDataChannelMessage.bind(this);
this.voiceDataChannel.onopen = this.handleVoiceDataChannelOpen.bind(this);
this.voiceDataChannel.binaryType = "arraybuffer";
}
{
const dataChannelConfig = { ordered: false, maxRetransmits: 0 };
this.whisperDataChannel = this.rtcConnection.createDataChannel('voice-whisper', dataChannelConfig);
this.whisperDataChannel.onmessage = this.handleWhisperDataChannelMessage.bind(this);
this.whisperDataChannel.onopen = this.handleWhisperDataChannelOpen.bind(this);
this.whisperDataChannel.binaryType = "arraybuffer";
}
/* setting a dummy connect failed handler in case the rtc peer connection changes it's state to failed */
const connectFailedPromise = new Promise((resolve, reject) => this.callbackRtcConnectFailed = reject);
const wrapWithError = <T>(promise: Promise<T>) : Promise<T> => Promise.race([ promise, connectFailedPromise ]) as any;
let offer: RTCSessionDescriptionInit;
try {
offer = await wrapWithError(this.rtcConnection.createOffer(this.generateRtpOfferOptions()));
if(canceled.value) return;
} catch (error) {
logError(LogCategory.VOICE, tr("Failed to generate RTC offer: %o"), error);
throw tr("failed to generate local offer");
}
this.localIceCandidateCount = 0;
try {
await wrapWithError(this.rtcConnection.setLocalDescription(offer));
if(canceled.value) return;
} catch (error) {
logError(LogCategory.VOICE, tr("Failed to apply local description: %o"), error);
throw tr("failed to apply local description");
}
/* cache all ICE candidates until we've received out answer */
this.cachedIceCandidates = [];
/* exchange the offer and answer */
let answer;
{
this.callback_send_control_data("create", {
msg: {
type: offer.type,
sdp: offer.sdp
}
});
answer = await wrapWithError(new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
if(canceled.value) {
resolve();
return;
}
this.callbackRtcAnswer = undefined;
reject(tr("failed to received a WebRTC answer (timeout)"));
}, 5000);
this.callbackRtcAnswer = answer => {
this.callbackRtcAnswer = undefined;
clearTimeout(timeout);
resolve(answer);
};
}));
if(canceled.value) return;
}
if(!('msg' in answer)) {
throw tr("Missing msg in servers answer");
}
try {
await wrapWithError(this.rtcConnection.setRemoteDescription(new RTCSessionDescription(answer.msg)));
if(canceled.value) return;
} catch (error) {
const kParseErrorPrefix = "Failed to execute 'setRemoteDescription' on 'RTCPeerConnection': ";
if(error instanceof DOMException && error.message.startsWith(kParseErrorPrefix))
throw error.message.substring(kParseErrorPrefix.length);
logError(LogCategory.VOICE, tr("Failed to apply remotes description: %o"), error);
throw tr("failed to apply remotes description");
}
while(this.cachedIceCandidates.length > 0) {
this.registerRemoteIceCandidate(this.cachedIceCandidates.pop_front());
}
/* ATTENTION: Do not use wrapWithError from now on (this.callbackRtcConnectFailed has been changed) */
await new Promise((resolve, reject) => {
if(this.rtcConnection.connectionState === "connected") {
resolve();
return;
}
const timeout = setTimeout(() => {
reject(tr("failed to establish a connection"));
}, 20 * 1000);
this.callbackRtcConnected = () => {
clearTimeout(timeout);
resolve();
};
this.callbackRtcConnectFailed = error => {
clearTimeout(timeout);
reject(error);
};
});
if(canceled.value) return;
logDebug(LogCategory.WEBRTC, tr("Successfully connected to server. Awaiting main data channel to open."));
try {
await this.awaitMainChannelOpened(10 * 1000);
} catch {
throw tr("failed to open the main data channel");
}
logInfo(LogCategory.WEBRTC, tr("Successfully initialized session with server."));
}
private doDisconnect() {
this.cleanupRtcResources();
this.connectionState = "unconnected";
if(this.callbackDisconnect)
this.callbackDisconnect();
}
private abortConnectionAttempt() {
while(this.callbackConnectCanceled.length > 0)
this.callbackConnectCanceled.pop()();
this.cleanupRtcResources();
this.connectionState = "unconnected";
}
private cleanupRtcResources() {
if(this.voiceDataChannel) {
this.voiceDataChannel.onclose = undefined;
this.voiceDataChannel.close();
this.voiceDataChannel = undefined;
}
if(this.rtcConnection) {
this.rtcConnection.onicegatheringstatechange = undefined;
this.rtcConnection.oniceconnectionstatechange = undefined;
this.rtcConnection.onicecandidate = undefined;
this.rtcConnection.onicecandidateerror = undefined;
this.rtcConnection.onconnectionstatechange = undefined;
this.rtcConnection.close();
this.rtcConnection = undefined;
}
this.cachedIceCandidates = undefined;
}
protected async awaitMainChannelOpened(timeout: number) {
if(typeof this.voiceDataChannel === "undefined")
throw tr("missing main data channel");
if(this.voiceDataChannel.readyState === "open") {
return;
}
await new Promise((resolve, reject) => {
const id = setTimeout(reject, timeout);
this.callbackVoiceDataChannelOpened.push(() => {
clearTimeout(id);
resolve();
});
})
}
private registerRemoteIceCandidate(candidate: RTCIceCandidateInit) {
if(!this.rtcConnection) {
logDebug(LogCategory.WEBRTC, tr("Tried to register a remote ICE candidate without a RTC connection. Dropping candidate."));
return;
}
if(candidate.candidate === "") {
logDebug(LogCategory.WEBRTC, tr("Remote send candidate finish for channel %d."), candidate.sdpMLineIndex);
this.rtcConnection.addIceCandidate(candidate).catch(error => {
logWarn(LogCategory.WEBRTC, tr("Failed to add remote ICE end candidate to local rtc connection: %o"), error);
});
} else {
const pcandidate = new RTCIceCandidate(candidate);
if(pcandidate.protocol !== "tcp") return; /* UDP does not work currently */
logTrace(LogCategory.WEBRTC, tr("Adding remote ICE candidate %s for media line %d: %s"), pcandidate.foundation, candidate.sdpMLineIndex, candidate.candidate);
this.rtcConnection.addIceCandidate(pcandidate).catch(error => {
logWarn(LogCategory.WEBRTC, tr("Failed to add remote ICE candidate %s: %o"), pcandidate.foundation, error);
})
}
}
private handleRtcConnectionStateChange() {
logDebug(LogCategory.WEBRTC, tr("Connection state changed to %s (Local connection state: %s)"), this.rtcConnection.connectionState, this.connectionState);
switch (this.rtcConnection.connectionState) {
case "connected":
if(this.callbackRtcConnected)
this.callbackRtcConnected();
break;
case "failed":
if(this.callbackRtcConnectFailed)
this.callbackRtcConnectFailed(tr("connect attempt failed"));
else if(this.callbackDisconnect)
this.callbackDisconnect();
break;
case "disconnected":
case "closed":
if(this.callbackDisconnect)
this.callbackDisconnect();
break;
}
}
private handleIceGatheringStateChange() {
logTrace(LogCategory.WEBRTC, tr("ICE gathering state changed to %s"), this.rtcConnection.iceGatheringState);
}
private handleIceConnectionStateChange() {
logTrace(LogCategory.WEBRTC, tr("ICE connection state changed to %s"), this.rtcConnection.iceConnectionState);
}
private handleIceCandidate(event: RTCPeerConnectionIceEvent) {
if(event.candidate && event.candidate.protocol !== "tcp") {
return;
}
if(event.candidate) {
this.localIceCandidateCount++;
logDebug(LogCategory.WEBRTC, tr("Gathered local ice candidate for stream %d: %s"), event.candidate.sdpMLineIndex, event.candidate.candidate);
this.callback_send_control_data("ice", { msg: event.candidate.toJSON() });
} else if(this.localIceCandidateCount === 0) {
logError(LogCategory.WEBRTC, tr("Failed to gather any local ice candidates... This is a fatal error."));
/* we don't allow a reconnect here since it's most the times not fixable by just trying again */
this.allowReconnect = false;
if(this.callbackRtcConnectFailed) {
this.callbackRtcConnectFailed(tr("failed to gather any local ICE candidates"));
}
} else {
logDebug(LogCategory.WEBRTC, tr("Local ICE candidate gathering finish."));
this.callback_send_control_data("ice_finish", {});
}
}
private handleIceCandidateError(event: RTCPeerConnectionIceErrorEvent) {
if(this.rtcConnection.iceGatheringState === "gathering") {
logWarn(LogCategory.WEBRTC, tr("Received error while gathering the ice candidates: %d/%s for %s (url: %s)"),
event.errorCode, event.errorText, event.hostCandidate, event.url);
} else {
logTrace(LogCategory.WEBRTC, tr("Ice candidate %s (%s) errored: %d/%s"),
event.url, event.hostCandidate, event.errorCode, event.errorText);
}
}
protected handleVoiceDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Voice data channel is open now"));
while(this.callbackVoiceDataChannelOpened.length > 0)
this.callbackVoiceDataChannelOpened.pop()();
}
protected handleVoiceDataChannelMessage(message: MessageEvent) { }
protected handleWhisperDataChannelOpen() {
logDebug(LogCategory.WEBRTC, tr("Whisper data channel is open now"));
}
protected handleWhisperDataChannelMessage(message: MessageEvent) { }
handleControlData(request: string, payload: any) {
super.handleControlData(request, payload);
if(request === "answer") {
if(typeof this.callbackRtcAnswer === "function") {
this.callbackRtcAnswer(payload);
} else {
logWarn(LogCategory.WEBRTC, tr("Received answer, but we're not expecting one. Dropping it."));
}
return;
} else if(request === "ice" || request === "ice_finish") {
if(this.cachedIceCandidates) {
this.cachedIceCandidates.push(payload["msg"]);
} else {
this.registerRemoteIceCandidate(payload["msg"]);
}
} else if(request === "status") {
if(request["state"] === "failed") {
if(this.callbackRtcConnectFailed) {
this.allowReconnect = request["allow_reconnect"];
this.callbackRtcConnectFailed(payload["reason"]);
}
return;
}
}
}
public getMainDataChannel() : RTCDataChannel {
return this.voiceDataChannel;
}
protected abstract initializeRtpConnection(connection: RTCPeerConnection);
protected abstract generateRtpOfferOptions() : RTCOfferOptions;
}

View File

@ -1,2 +0,0 @@
target
pkg

890
web/audio-lib/Cargo.lock generated
View File

@ -1,890 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "arc-swap"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034"
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "bumpalo"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
[[package]]
name = "bytes"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38"
[[package]]
name = "cc"
version = "1.0.59"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66120af515773fb005778dc07c261bd201ec8ce50bd6e7144c927753fe013381"
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "chrono"
version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "942f72db697d8767c22d46a598e01f2d3b475501ea43d0db4f16d90259182d0b"
dependencies = [
"num-integer",
"num-traits",
"time",
]
[[package]]
name = "cloudabi"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4344512281c643ae7638bbabc3af17a11307803ec8f0fcad9fae512a8bf36467"
dependencies = [
"bitflags",
]
[[package]]
name = "cmake"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e56268c17a6248366d66d4a47a3381369d068cce8409bb1716ed77ea32163bb"
dependencies = [
"cc",
]
[[package]]
name = "colored"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4ffc801dacf156c5854b9df4f425a626539c3a6ef7893cc0c5084a23f0b6c59"
dependencies = [
"atty",
"lazy_static",
"winapi 0.3.9",
]
[[package]]
name = "console_error_panic_hook"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8d976903543e0c48546a91908f21588a680a8c8f984df9a5d69feccb2b2a211"
dependencies = [
"cfg-if",
"wasm-bindgen",
]
[[package]]
name = "console_log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501a375961cef1a0d44767200e66e4a559283097e91d0730b1d75dfb2f8a1494"
dependencies = [
"log",
"web-sys",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
dependencies = [
"bitflags",
"fuchsia-zircon-sys",
]
[[package]]
name = "fuchsia-zircon-sys"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "futures"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e05b85ec287aac0dc34db7d4a569323df697f9c55b99b15d6b4ef8cde49f613"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f366ad74c28cca6ba456d95e6422883cfb4b252a83bed929c83abfdbbf2967d5"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
name = "futures-core"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59f5fff90fd5d971f936ad674802482ba441b6f09ba5e15fd8b39145582ca399"
[[package]]
name = "futures-executor"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10d6bb888be1153d3abeb9006b11b02cf5e9b209fda28693c31ae1e4e012e314"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de27142b013a8e869c14957e6d2edeef89e97c289e69d042ee3a49acd8b51789"
[[package]]
name = "futures-macro"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0b5a30a4328ab5473878237c447333c093297bded83a4983d10f4deea240d39"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "futures-sink"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f2032893cb734c7a05d85ce0cc8b8c4075278e93b24b66f9de99d6eb0fa8acc"
[[package]]
name = "futures-task"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bdb66b5f09e22019b1ab0830f7785bcea8e7a42148683f99214f73f8ec21a626"
dependencies = [
"once_cell",
]
[[package]]
name = "futures-util"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8764574ff08b701a084482c3c7031349104b07ac897393010494beaa18ce32c6"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project",
"pin-utils",
"proc-macro-hack",
"proc-macro-nested",
"slab",
]
[[package]]
name = "hermit-abi"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3deed196b6e7f9e44a2ae8d94225d80302d81208b1bb673fd21fe634645c85a9"
dependencies = [
"libc",
]
[[package]]
name = "instant"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b141fdc7836c525d4d594027d318c84161ca17aaf8113ab1f81ab93ae897485"
[[package]]
name = "iovec"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
dependencies = [
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85a7e2c92a4804dd459b86c339278d0fe87cf93757fae222c3fa3ae75458bc73"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "755456fae044e6fa1ebbbd1b3e902ae19e73097ed4ed87bb79934a867c007bc3"
[[package]]
name = "lock_api"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28247cc5a5be2f05fbcd76dd0cf2c7d3b5400cb978a28042abcd4fa0b3f8261c"
dependencies = [
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
dependencies = [
"cfg-if",
]
[[package]]
name = "memchr"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
[[package]]
name = "mio"
version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
dependencies = [
"cfg-if",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"iovec",
"kernel32-sys",
"libc",
"log",
"miow 0.2.1",
"net2",
"slab",
"winapi 0.2.8",
]
[[package]]
name = "mio-named-pipes"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656"
dependencies = [
"log",
"mio",
"miow 0.3.5",
"winapi 0.3.9",
]
[[package]]
name = "mio-uds"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
dependencies = [
"iovec",
"libc",
"mio",
]
[[package]]
name = "miow"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
dependencies = [
"kernel32-sys",
"net2",
"winapi 0.2.8",
"ws2_32-sys",
]
[[package]]
name = "miow"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07b88fb9795d4d36d62a012dfbf49a8f5cf12751f36d31a9dbe66d528e58979e"
dependencies = [
"socket2",
"winapi 0.3.9",
]
[[package]]
name = "net2"
version = "0.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
dependencies = [
"cfg-if",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "ntest"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bcdcff64ae6b41b371185e5b477f70564c5029dbc976df49cc6dda2318734e"
dependencies = [
"ntest_proc_macro_helper",
"ntest_test_cases",
"ntest_timeout",
]
[[package]]
name = "ntest_proc_macro_helper"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e706ef0984ef05fb01dc543caec79860e15db933a1082d81a6c85c47cb97e76"
[[package]]
name = "ntest_test_cases"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f33171238da099f222e7a6cd93146d6fe5b83ea454d40870fcc7e3d060f3034"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "ntest_timeout"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "399f8e1b696f74e4eb62afce1a86c547fe007e84dfb10b58ba4d758190d5d108"
dependencies = [
"ntest_proc_macro_helper",
"proc-macro-crate",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "num-integer"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d59457e662d541ba17869cf51cf177c0b5f0cbf476c66bdc90bf1edac4f875b"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "260e51e7efe62b592207e9e13a68e43692a7a279171d6ba57abd208bf23645ad"
[[package]]
name = "opus-cmake-sys"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23a8c33afebd07b24b2497e4a0d8c390f62a100fd0ce1bace892c66424cdadaf"
dependencies = [
"cc",
"cmake",
"pkg-config",
]
[[package]]
name = "opus-prebuild-wasm"
version = "0.1.1"
source = "git+https://github.com/WolverinDEV/opus-prebuild-wasm.git#2b42102f979954a930b3716f9892a413a60458eb"
[[package]]
name = "parking_lot"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4893845fa2ca272e647da5d0e46660a314ead9c2fdd9a883aabc32e481a8733"
dependencies = [
"instant",
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b"
dependencies = [
"cfg-if",
"cloudabi",
"instant",
"libc",
"redox_syscall",
"smallvec",
"winapi 0.3.9",
]
[[package]]
name = "pin-project"
version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca4433fff2ae79342e497d9f8ee990d174071408f28f726d6d83af93e58e48aa"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c0e815c3ee9a031fdf5af21c10aa17c573c9c6a566328d99e3936c34e36461f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-lite"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282adbf10f2698a7a77f8e983a74b2d18176c19a7fd32a45446139ae7b02b715"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkg-config"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d36492546b6af1463394d46f0c834346f31548646f6ba10849802c9c9a27ac33"
[[package]]
name = "proc-macro-crate"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785"
dependencies = [
"toml",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99c605b9a0adc77b7211c6b1f722dcb613d68d66859a44f3d485a6da332b0598"
[[package]]
name = "proc-macro-nested"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
[[package]]
name = "proc-macro2"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12"
dependencies = [
"unicode-xid",
]
[[package]]
name = "quote"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.1.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.115"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e54c9a88f2da7238af84b5101443f0c0d0a3bbdc455e34a5c9497b1903ed55d5"
[[package]]
name = "signal-hook-registry"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e12110bc539e657a646068aaf5eb5b63af9d0c1f7b29c97113fad80e15f035"
dependencies = [
"arc-swap",
"libc",
]
[[package]]
name = "simple_logger"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13a53ed2efd04911c8280f2da7bf9abd350c931b86bc7f9f2386fbafbf525ff9"
dependencies = [
"atty",
"chrono",
"colored",
"log",
"winapi 0.3.9",
]
[[package]]
name = "slab"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
[[package]]
name = "socket2"
version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"winapi 0.3.9",
]
[[package]]
name = "syn"
version = "1.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "891d8d6567fe7c7f8835a3a98af4208f3846fba258c1bc3c31d6e506239f11f9"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "teaweb-audo-lib"
version = "0.1.0"
dependencies = [
"console_error_panic_hook",
"console_log",
"futures",
"js-sys",
"log",
"ntest",
"once_cell",
"opus-cmake-sys",
"opus-prebuild-wasm",
"simple_logger",
"tokio",
"tokio-test",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-timer",
]
[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"libc",
"wasi",
"winapi 0.3.9",
]
[[package]]
name = "tokio"
version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d34ca54d84bf2b5b4d7d31e901a8464f7b60ac145a284fba25ceb801f2ddccd"
dependencies = [
"bytes",
"fnv",
"futures-core",
"iovec",
"lazy_static",
"libc",
"memchr",
"mio",
"mio-named-pipes",
"mio-uds",
"num_cpus",
"pin-project-lite",
"signal-hook-registry",
"slab",
"tokio-macros",
"winapi 0.3.9",
]
[[package]]
name = "tokio-macros"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0c3acc6aa564495a0f2e1d59fab677cd7f81a19994cfc7f3ad0e64301560389"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tokio-test"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0049c119b6d505c4447f5c64873636c7af6c75ab0d45fd9f618d82acb8016d"
dependencies = [
"bytes",
"futures-core",
"tokio",
]
[[package]]
name = "toml"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a"
dependencies = [
"serde",
]
[[package]]
name = "unicode-xid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "wasm-bindgen"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0563a9a4b071746dd5aedbc3a28c6fe9be4586fb3fbadb67c400d4f53c6b16c"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc71e4c5efa60fb9e74160e89b93353bc24059999c0ae0fb03affc39770310b0"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95f8d235a77f880bcef268d379810ea6c0af2eacfa90b1ad5af731776e0c4699"
dependencies = [
"cfg-if",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97c57cefa5fa80e2ba15641578b44d36e7a64279bc5ed43c6dbaf329457a2ed2"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841a6d1c35c6f596ccea1f82504a192a60378f64b3bb0261904ad8f2f5657556"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93b162580e34310e5931c4b792560108b10fd14d64915d7fff8ff00180e70092"
[[package]]
name = "wasm-timer"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f"
dependencies = [
"futures",
"js-sys",
"parking_lot",
"pin-utils",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]]
name = "web-sys"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dda38f4e5ca63eda02c059d243aa25b5f35ab98451e518c51612cd0f1bd19a47"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "ws2_32-sys"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]

View File

@ -1,36 +0,0 @@
[package]
name = "teaweb-audo-lib"
version = "0.1.0"
authors = ["WolverinDEV <git@teaspeak.de>"]
edition = "2018"
[lib]
crate-type = ["cdylib"]
[dependencies]
wasm-bindgen = "0.2"
wasm-bindgen-futures = "0.4.17"
js-sys = "0.3.44"
wasm-timer = "0.2.4"
futures = "0.3.5"
log = "0.4"
once_cell = "1.4.1"
[dev-dependencies]
tokio = { version = "0.2", features = ["full"] }
tokio-test = "0.2.1"
ntest = "0.7.1"
[target.'cfg(target_arch = "wasm32")'.dependencies]
# opus-prebuild-wasm = { path = "D:\\git\\web\\opus-prebuild-wasm" }
opus-prebuild-wasm = { git = "https://github.com/WolverinDEV/opus-prebuild-wasm.git" }
console_log = "0.2.0"
console_error_panic_hook = "0.1.6"
# Used for the tests as an alternative (Attention: The opus library version differs!)
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
opus-cmake-sys = "1.0.6"
simple_logger = "1.6.0"
[package.metadata.wasm-pack.profile.release]
wasm-opt = ["-O2", "--enable-mutable-globals"]

View File

@ -1,184 +0,0 @@
use std::ops::{Add, Sub};
pub mod packet_queue;
pub mod codec;
pub mod decoder;
pub mod converter;
/// A wrapper around an u16 to represent an audio packet it
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct PacketId {
pub packet_id: u16
}
impl PacketId {
pub fn new(packet_id: u16) -> PacketId {
PacketId{ packet_id }
}
pub fn is_less(&self, other: &Self, clipping_window: Option<u16>) -> bool {
if let Some(window) = clipping_window {
if self.packet_id < window {
self.packet_id < other.packet_id && other.packet_id < 0xFFFF - window
} else if self.packet_id > 0xFFFF - window {
self.packet_id < other.packet_id || self.packet_id.wrapping_add(window) >= other.packet_id
} else {
self.packet_id < other.packet_id
}
} else {
self.packet_id < other.packet_id
}
}
pub fn difference(&self, other: &Self, clipping_window: Option<u16>) -> u16 {
if let Some(window) = clipping_window {
if self.packet_id < window {
return if other.packet_id > 0xFFFF - window {
(0xFFFF - other.packet_id) + self.packet_id + 1
} else if other.packet_id > self.packet_id {
other.packet_id - self.packet_id
} else {
self.packet_id - other.packet_id
}
} else if other.packet_id < window {
return if self.packet_id > 0xFFFF - window {
(0xFFFF - self.packet_id) + other.packet_id + 1
} else if self.packet_id > other.packet_id {
self.packet_id - other.packet_id
} else {
other.packet_id - self.packet_id
}
}
}
if self.packet_id > other.packet_id {
self.packet_id - other.packet_id
} else {
other.packet_id - self.packet_id
}
}
}
impl Add<u16> for PacketId {
type Output = PacketId;
fn add(self, rhs: u16) -> Self::Output {
PacketId::new(self.packet_id.wrapping_add(rhs))
}
}
impl Sub<u16> for PacketId {
type Output = PacketId;
fn sub(self, rhs: u16) -> Self::Output {
PacketId::new(self.packet_id.wrapping_sub(rhs))
}
}
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum Codec {
/// Speex narrow band, not supported any more
SpeexNarrow = 0x00,
/// Speex wide band, not supported any more
SpeexWide = 0x01,
/// Speex ultra wide band, not supported any more
SpeexUltraWide = 0x02,
/// Celt, not supported any more
Celt = 0x03,
/// Opus using the VoIP quality
Opus = 0x04,
/// Opus using the stereo music quality
OpusMusic = 0x05,
/// A lossless compression codec, currently not yet supported, but planned to
Flac = 0x10,
/// The codec is unknown
Unknown = 0xFF
}
impl Codec {
pub fn from_u8(value: u8) -> Codec {
match value {
x if x == Codec::SpeexNarrow as u8 => Codec::SpeexNarrow,
x if x == Codec::SpeexWide as u8 => Codec::SpeexWide,
x if x == Codec::SpeexUltraWide as u8 => Codec::SpeexUltraWide,
x if x == Codec::Celt as u8 => Codec::Celt,
x if x == Codec::Opus as u8 => Codec::Opus,
x if x == Codec::OpusMusic as u8 => Codec::OpusMusic,
x if x == Codec::Flac as u8 => Codec::Flac,
_ => Codec::Unknown
}
}
}
#[derive(PartialEq, Debug)]
pub struct AudioPacket {
pub client_id: u16,
pub packet_id: PacketId,
pub codec: Codec,
pub payload: Vec<u8>,
}
impl AudioPacket {
pub fn is_stop(&self) -> bool {
self.payload.is_empty()
}
}
#[cfg(test)]
mod tests {
use crate::audio::PacketId;
fn test_packet_id(a: u16, b: u16, result: bool, clipping_window: Option<u16>) {
let a = PacketId{ packet_id: a };
let b = PacketId{ packet_id: b };
assert_eq!(a.is_less(&b, clipping_window), result);
}
fn test_packet_difference(a: u16, b: u16, expected: u16, clipping_window: Option<u16>) {
let a = PacketId{ packet_id: a };
let b = PacketId{ packet_id: b };
assert_eq!(a.difference(&b, clipping_window), expected);
assert_eq!(b.difference(&a, clipping_window), expected);
}
#[test]
fn packet_id_is_less_basic() {
test_packet_id(2, 3, true, None);
test_packet_id(4, 3, false, None);
}
#[test]
fn packet_id_is_less_clipping() {
test_packet_id(0xFFFF, 0, false, None);
test_packet_id(0xFFFF, 1, false, None);
test_packet_id(0xFFFF, 2, false, None);
test_packet_id(0xFFFF, 2, true, Some(4));
test_packet_id(0xFFFF, 2, false, Some(2));
test_packet_id(2, 0xFFFF, false, Some(4));
for i in 1..0x2Fu16 {
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF1), true, Some(2));
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF5), true, Some(6));
test_packet_id(i.wrapping_add(0xFFF6), i.wrapping_add(0xFFF0), false, Some(6));
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF6), true, Some(6));
}
}
#[test]
fn packet_id_difference() {
test_packet_difference(0, 0, 0, None);
test_packet_difference(0xFFFF, 0, 0xFFFF, None);
test_packet_difference(0xFFFF, 0, 1, Some(1));
for i in 0..0xFFu16 {
test_packet_difference(0xFF8F_u16.wrapping_add(i), 0xFF9F_u16.wrapping_add(i), 16, Some(16));
}
}
}

View File

@ -1 +0,0 @@
pub mod opus;

View File

@ -1,845 +0,0 @@
// Copyright 2016 Tad Hardesty
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! High-level bindings for libopus.
//!
//! Only brief descriptions are included here. For detailed information, consult
//! the [libopus documentation](https://opus-codec.org/docs/opus_api-1.1.2/).
#![warn(missing_docs)]
#![allow(dead_code)]
#[cfg(target_arch = "wasm32")]
extern crate opus_prebuild_wasm as ffi;
#[cfg(not(target_arch = "wasm32"))]
extern crate opus_cmake_sys as ffi;
use std::marker::PhantomData;
use std::os::raw::c_int;
use std::ffi::CStr;
// ============================================================================
// Constants
// Generic CTLs
const OPUS_RESET_STATE: c_int = 4028; // void
const OPUS_GET_FINAL_RANGE: c_int = 4031; // out *u32
const OPUS_GET_BANDWIDTH: c_int = 4009; // out *i32
const OPUS_GET_SAMPLE_RATE: c_int = 4029; // out *i32
// Encoder CTLs
const OPUS_SET_BITRATE: c_int = 4002; // in i32
const OPUS_GET_BITRATE: c_int = 4003; // out *i32
const OPUS_SET_VBR: c_int = 4006; // in i32
const OPUS_GET_VBR: c_int = 4007; // out *i32
const OPUS_SET_VBR_CONSTRAINT: c_int = 4020; // in i32
const OPUS_GET_VBR_CONSTRAINT: c_int = 4021; // out *i32
const OPUS_SET_INBAND_FEC: c_int = 4012; // in i32
const OPUS_GET_INBAND_FEC: c_int = 4013; // out *i32
const OPUS_SET_PACKET_LOSS_PERC: c_int = 4014; // in i32
const OPUS_GET_PACKET_LOSS_PERC: c_int = 4015; // out *i32
const OPUS_GET_LOOKAHEAD: c_int = 4027; // out *i32
// Decoder CTLs
const OPUS_SET_GAIN: c_int = 4034; // in i32
const OPUS_GET_GAIN: c_int = 4045; // out *i32
const OPUS_GET_LAST_PACKET_DURATION: c_int = 4039; // out *i32
const OPUS_GET_PITCH: c_int = 4033; // out *i32
// Bitrate
const OPUS_AUTO: c_int = -1000;
const OPUS_BITRATE_MAX: c_int = -1;
/// The possible applications for the codec.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Application {
/// Best for most VoIP/videoconference applications where listening quality
/// and intelligibility matter most.
Voip = 2048,
/// Best for broadcast/high-fidelity application where the decoded audio
/// should be as close as possible to the input.
Audio = 2049,
/// Only use when lowest-achievable latency is what matters most.
LowDelay = 2051,
}
/// The available channel setings.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Channels {
/// One channel.
Mono = 1,
/// Two channels, left and right.
Stereo = 2,
}
/// The available bandwidth level settings.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Bandwidth {
/// Auto/default setting.
Auto = -1000,
/// 4kHz bandpass.
Narrowband = 1101,
/// 6kHz bandpass.
Mediumband = 1102,
/// 8kHz bandpass.
Wideband = 1103,
/// 12kHz bandpass.
Superwideband = 1104,
/// 20kHz bandpass.
Fullband = 1105,
}
impl Bandwidth {
fn from_int(value: i32) -> Option<Bandwidth> {
Some(match value {
-1000 => Bandwidth::Auto,
1101 => Bandwidth::Narrowband,
1102 => Bandwidth::Mediumband,
1103 => Bandwidth::Wideband,
1104 => Bandwidth::Superwideband,
1105 => Bandwidth::Fullband,
_ => return None,
})
}
fn decode(value: i32, what: &'static str) -> Result<Bandwidth> {
match Bandwidth::from_int(value) {
Some(bandwidth) => Ok(bandwidth),
None => Err(Error::bad_arg(what)),
}
}
}
/// Possible error codes.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ErrorCode {
/// One or more invalid/out of range arguments.
BadArg = -1,
/// Not enough bytes allocated in the buffer.
BufferTooSmall = -2,
/// An internal error was detected.
InternalError = -3,
/// The compressed data passed is corrupted.
InvalidPacket = -4,
/// Invalid/unsupported request number.
Unimplemented = -5,
/// An encoder or decoder structure is invalid or already freed.
InvalidState = -6,
/// Memory allocation has failed.
AllocFail = -7,
/// An unknown failure.
Unknown = -8,
}
impl ErrorCode {
fn from_int(value: c_int) -> ErrorCode {
use ErrorCode::*;
match value {
ffi::OPUS_BAD_ARG => BadArg,
ffi::OPUS_BUFFER_TOO_SMALL => BufferTooSmall,
ffi::OPUS_INTERNAL_ERROR => InternalError,
ffi::OPUS_INVALID_PACKET => InvalidPacket,
ffi::OPUS_UNIMPLEMENTED => Unimplemented,
ffi::OPUS_INVALID_STATE => InvalidState,
ffi::OPUS_ALLOC_FAIL => AllocFail,
_ => Unknown,
}
}
/// Get a human-readable error string for this error code.
pub fn description(self) -> &'static str {
// should always be ASCII and non-null for any input
unsafe { CStr::from_ptr(ffi::opus_strerror(self as c_int)) }.to_str().unwrap()
}
}
/// Possible bitrates.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Bitrate {
/// Explicit bitrate choice (in bits/second).
Bits(i32),
/// Maximum bitrate allowed (up to maximum number of bytes for the packet).
Max,
/// Default bitrate decided by the encoder (not recommended).
Auto,
}
/// Get the libopus version string.
///
/// Applications may look for the substring "-fixed" in the version string to
/// determine whether they have a fixed-point or floating-point build at
/// runtime.
pub fn version() -> &'static str {
// verison string should always be ASCII
unsafe { CStr::from_ptr(ffi::opus_get_version_string()) }.to_str().unwrap()
}
macro_rules! ffi {
($f:ident $(, $rest:expr)*) => {
match unsafe { ffi::$f($($rest),*) } {
code if code < 0 => return Err(Error::from_code(stringify!($f), code)),
code => code,
}
}
}
macro_rules! ctl {
($f:ident, $this:ident, $ctl:ident, $($rest:expr),*) => {
match unsafe { ffi::$f($this.ptr, $ctl, $($rest),*) } {
code if code < 0 => return Err(Error::from_code(
concat!(stringify!($f), "(", stringify!($ctl), ")"),
code,
)),
_ => (),
}
}
}
// ============================================================================
// Encoder
macro_rules! enc_ctl {
($this:ident, $ctl:ident $(, $rest:expr)*) => {
ctl!(opus_encoder_ctl, $this, $ctl, $($rest),*)
}
}
/// An Opus encoder with associated state.
#[derive(Debug)]
pub struct Encoder {
ptr: *mut ffi::OpusEncoder,
channels: Channels,
}
impl Encoder {
/// Create and initialize an encoder.
pub fn new(sample_rate: u32, channels: Channels, mode: Application) -> Result<Encoder> {
let mut error = 0;
let ptr = unsafe { ffi::opus_encoder_create(
sample_rate as i32,
channels as c_int,
mode as c_int,
&mut error) };
if error != ffi::OPUS_OK || ptr.is_null() {
Err(Error::from_code("opus_encoder_create", error))
} else {
Ok(Encoder { ptr: ptr, channels: channels })
}
}
/// Encode an Opus frame.
pub fn encode(&mut self, input: &[i16], output: &mut [u8]) -> Result<usize> {
let len = ffi!(opus_encode, self.ptr,
input.as_ptr(), len(input) / self.channels as c_int,
output.as_mut_ptr(), len(output));
Ok(len as usize)
}
/// Encode an Opus frame from floating point input.
pub fn encode_float(&mut self, input: &[f32], output: &mut [u8]) -> Result<usize> {
let len = ffi!(opus_encode_float, self.ptr,
input.as_ptr(), len(input) / self.channels as c_int,
output.as_mut_ptr(), len(output));
Ok(len as usize)
}
/// Encode an Opus frame to a new buffer.
pub fn encode_vec(&mut self, input: &[i16], max_size: usize) -> Result<Vec<u8>> {
let mut output: Vec<u8> = vec![0; max_size];
let result = self.encode(input, output.as_mut_slice()).unwrap();
output.truncate(result);
Ok(output)
}
/// Encode an Opus frame from floating point input to a new buffer.
pub fn encode_vec_float(&mut self, input: &[f32], max_size: usize) -> Result<Vec<u8>> {
let mut output: Vec<u8> = vec![0; max_size];
let result = self.encode_float(input, output.as_mut_slice()).unwrap();
output.truncate(result);
Ok(output)
}
// ------------
// Generic CTLs
/// Reset the codec state to be equivalent to a freshly initialized state.
pub fn reset_state(&mut self) -> Result<()> {
enc_ctl!(self, OPUS_RESET_STATE);
Ok(())
}
/// Get the final range of the codec's entropy coder.
pub fn get_final_range(&mut self) -> Result<u32> {
let mut value: u32 = 0;
enc_ctl!(self, OPUS_GET_FINAL_RANGE, &mut value);
Ok(value)
}
/// Get the encoder's configured bandpass.
pub fn get_bandwidth(&mut self) -> Result<Bandwidth> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_BANDWIDTH, &mut value);
Bandwidth::decode(value, "opus_encoder_ctl(OPUS_GET_BANDWIDTH)")
}
/// Get the samping rate the encoder was intialized with.
pub fn get_sample_rate(&mut self) -> Result<u32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_SAMPLE_RATE, &mut value);
Ok(value as u32)
}
// ------------
// Encoder CTLs
/// Set the encoder's bitrate.
pub fn set_bitrate(&mut self, value: Bitrate) -> Result<()> {
let value: i32 = match value {
Bitrate::Auto => OPUS_AUTO,
Bitrate::Max => OPUS_BITRATE_MAX,
Bitrate::Bits(b) => b,
};
enc_ctl!(self, OPUS_SET_BITRATE, value);
Ok(())
}
/// Get the encoder's bitrate.
pub fn get_bitrate(&mut self) -> Result<Bitrate> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_BITRATE, &mut value);
Ok(match value {
OPUS_AUTO => Bitrate::Auto,
OPUS_BITRATE_MAX => Bitrate::Max,
_ => Bitrate::Bits(value),
})
}
/// Enable or disable variable bitrate.
pub fn set_vbr(&mut self, vbr: bool) -> Result<()> {
let value: i32 = if vbr { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_VBR, value);
Ok(())
}
/// Determine if variable bitrate is enabled.
pub fn get_vbr(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_VBR, &mut value);
Ok(value != 0)
}
/// Enable or disable constrained VBR.
pub fn set_vbr_constraint(&mut self, vbr: bool) -> Result<()> {
let value: i32 = if vbr { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_VBR_CONSTRAINT, value);
Ok(())
}
/// Determine if constrained VBR is enabled.
pub fn get_vbr_constraint(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_VBR_CONSTRAINT, &mut value);
Ok(value != 0)
}
/// Configures the encoder's use of inband forward error correction (FEC).
pub fn set_inband_fec(&mut self, value: bool) -> Result<()> {
let value: i32 = if value { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_INBAND_FEC, value);
Ok(())
}
/// Gets encoder's configured use of inband forward error correction.
pub fn get_inband_fec(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_INBAND_FEC, &mut value);
Ok(value != 0)
}
/// Sets the encoder's expected packet loss percentage.
pub fn set_packet_loss_perc(&mut self, value: i32) -> Result<()> {
enc_ctl!(self, OPUS_SET_PACKET_LOSS_PERC, value);
Ok(())
}
/// Gets the encoder's expected packet loss percentage.
pub fn get_packet_loss_perc(&mut self) -> Result<i32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_PACKET_LOSS_PERC, &mut value);
Ok(value)
}
/// Gets the total samples of delay added by the entire codec.
pub fn get_lookahead(&mut self) -> Result<i32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_LOOKAHEAD, &mut value);
Ok(value)
}
// TODO: Encoder-specific CTLs
}
impl Drop for Encoder {
fn drop(&mut self) {
unsafe { ffi::opus_encoder_destroy(self.ptr) }
}
}
// "A single codec state may only be accessed from a single thread at
// a time and any required locking must be performed by the caller. Separate
// streams must be decoded with separate decoder states and can be decoded
// in parallel unless the library was compiled with NONTHREADSAFE_PSEUDOSTACK
// defined."
//
// In other words, opus states may be moved between threads at will. A special
// compilation mode intended for embedded platforms forbids multithreaded use
// of the library as a whole rather than on a per-state basis, but the opus-sys
// crate does not use this mode.
unsafe impl Send for Encoder {}
// ============================================================================
// Decoder
macro_rules! dec_ctl {
($this:ident, $ctl:ident $(, $rest:expr)*) => {
ctl!(opus_decoder_ctl, $this, $ctl, $($rest),*)
}
}
/// An Opus decoder with associated state.
#[derive(Debug)]
pub struct Decoder {
ptr: *mut ffi::OpusDecoder,
channels: Channels,
}
impl Decoder {
/// Create and initialize a decoder.
pub fn new(sample_rate: u32, channels: Channels) -> Result<Decoder> {
let mut error = 0;
let ptr = unsafe { ffi::opus_decoder_create(
sample_rate as i32,
channels as c_int,
&mut error) };
if error != ffi::OPUS_OK || ptr.is_null() {
Err(Error::from_code("opus_decoder_create", error))
} else {
Ok(Decoder { ptr: ptr, channels: channels })
}
}
/// Decode an Opus packet.
pub fn decode(&mut self, input: &[u8], output: &mut [i16], fec: bool) -> Result<usize> {
let ptr = match input.len() {
0 => std::ptr::null(),
_ => input.as_ptr(),
};
let len = ffi!(opus_decode, self.ptr,
ptr, len(input),
output.as_mut_ptr(), len(output) / self.channels as c_int,
fec as c_int);
Ok(len as usize)
}
/// Decode an Opus packet with floating point output.
pub fn decode_float(&mut self, input: &[u8], output: &mut [f32], fec: bool) -> Result<usize> {
let ptr = match input.len() {
0 => std::ptr::null(),
_ => input.as_ptr(),
};
let len = ffi!(opus_decode_float, self.ptr,
ptr, len(input),
output.as_mut_ptr(), len(output) / self.channels as c_int,
fec as c_int);
Ok(len as usize)
}
/// Get the number of samples of an Opus packet.
pub fn get_nb_samples(&self, packet: &[u8]) -> Result<usize> {
let len = ffi!(opus_decoder_get_nb_samples, self.ptr,
packet.as_ptr(), packet.len() as i32);
Ok(len as usize)
}
// ------------
// Generic CTLs
/// Reset the codec state to be equivalent to a freshly initialized state.
pub fn reset_state(&mut self) -> Result<()> {
dec_ctl!(self, OPUS_RESET_STATE);
Ok(())
}
/// Get the final range of the codec's entropy coder.
pub fn get_final_range(&mut self) -> Result<u32> {
let mut value: u32 = 0;
dec_ctl!(self, OPUS_GET_FINAL_RANGE, &mut value);
Ok(value)
}
/// Get the decoder's last bandpass.
pub fn get_bandwidth(&mut self) -> Result<Bandwidth> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_BANDWIDTH, &mut value);
Bandwidth::decode(value, "opus_decoder_ctl(OPUS_GET_BANDWIDTH)")
}
/// Get the samping rate the decoder was intialized with.
pub fn get_sample_rate(&mut self) -> Result<u32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_SAMPLE_RATE, &mut value);
Ok(value as u32)
}
// ------------
// Decoder CTLs
/// Configures decoder gain adjustment.
///
/// Scales the decoded output by a factor specified in Q8 dB units. This has
/// a maximum range of -32768 to 32768 inclusive, and returns `BadArg`
/// otherwise. The default is zero indicating no adjustment. This setting
/// survives decoder reset.
///
/// `gain = pow(10, x / (20.0 * 256))`
pub fn set_gain(&mut self, gain: i32) -> Result<()> {
dec_ctl!(self, OPUS_SET_GAIN, gain);
Ok(())
}
/// Gets the decoder's configured gain adjustment.
pub fn get_gain(&mut self) -> Result<i32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_GAIN, &mut value);
Ok(value)
}
/// Gets the duration (in samples) of the last packet successfully decoded
/// or concealed.
pub fn get_last_packet_duration(&mut self) -> Result<u32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_LAST_PACKET_DURATION, &mut value);
Ok(value as u32)
}
/// Gets the pitch of the last decoded frame, if available.
///
/// This can be used for any post-processing algorithm requiring the use of
/// pitch, e.g. time stretching/shortening. If the last frame was not
/// voiced, or if the pitch was not coded in the frame, then zero is
/// returned.
pub fn get_pitch(&mut self) -> Result<i32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_PITCH, &mut value);
Ok(value)
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe { ffi::opus_decoder_destroy(self.ptr) }
}
}
// See `unsafe impl Send for Encoder`.
unsafe impl Send for Decoder {}
// ============================================================================
// Packet Analysis
/// Analyze raw Opus packets.
pub mod packet {
use super::*;
use super::ffi;
use std::{ptr, slice};
/// Get the bandwidth of an Opus packet.
pub fn get_bandwidth(packet: &[u8]) -> Result<Bandwidth> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_bandwidth"));
}
let bandwidth = ffi!(opus_packet_get_bandwidth, packet.as_ptr());
Bandwidth::decode(bandwidth, "opus_packet_get_bandwidth")
}
/// Get the number of channels from an Opus packet.
pub fn get_nb_channels(packet: &[u8]) -> Result<Channels> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_nb_channels"));
}
let channels = ffi!(opus_packet_get_nb_channels, packet.as_ptr());
match channels {
1 => Ok(Channels::Mono),
2 => Ok(Channels::Stereo),
_ => Err(Error::bad_arg("opus_packet_get_nb_channels")),
}
}
/// Get the number of frames in an Opus packet.
pub fn get_nb_frames(packet: &[u8]) -> Result<usize> {
let frames = ffi!(opus_packet_get_nb_frames, packet.as_ptr(), len(packet));
Ok(frames as usize)
}
/// Get the number of samples of an Opus packet.
pub fn get_nb_samples(packet: &[u8], sample_rate: u32) -> Result<usize> {
let frames = ffi!(opus_packet_get_nb_samples,
packet.as_ptr(), len(packet),
sample_rate as c_int);
Ok(frames as usize)
}
/// Get the number of samples per frame from an Opus packet.
pub fn get_samples_per_frame(packet: &[u8], sample_rate: u32) -> Result<usize> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_samples_per_frame"))
}
let samples = ffi!(opus_packet_get_samples_per_frame,
packet.as_ptr(), sample_rate as c_int);
Ok(samples as usize)
}
/// Parse an Opus packet into one or more frames.
pub fn parse(packet: &[u8]) -> Result<Packet> {
let mut toc: u8 = 0;
let mut frames = [ptr::null(); 48];
let mut sizes = [0i16; 48];
let mut payload_offset: i32 = 0;
let num_frames = ffi!(opus_packet_parse,
packet.as_ptr(), len(packet),
&mut toc, frames.as_mut_ptr(),
sizes.as_mut_ptr(), &mut payload_offset);
let mut frames_vec = Vec::with_capacity(num_frames as usize);
for i in 0..num_frames as usize {
frames_vec.push(unsafe { slice::from_raw_parts(frames[i], sizes[i] as usize) });
}
Ok(Packet {
toc: toc,
frames: frames_vec,
payload_offset: payload_offset as usize,
})
}
/// A parsed Opus packet, retuned from `parse`.
#[derive(Debug)]
pub struct Packet<'a> {
/// The TOC byte of the packet.
pub toc: u8,
/// The frames contained in the packet.
pub frames: Vec<&'a [u8]>,
/// The offset into the packet at which the payload is located.
pub payload_offset: usize,
}
/// Pad a given Opus packet to a larger size.
///
/// The packet will be extended from the first `prev_len` bytes of the
/// buffer into the rest of the available space.
pub fn pad(packet: &mut [u8], prev_len: usize) -> Result<usize> {
let result = ffi!(opus_packet_pad, packet.as_mut_ptr(),
check_len(prev_len), len(packet));
Ok(result as usize)
}
/// Remove all padding from a given Opus packet and rewrite the TOC sequence
/// to minimize space usage.
pub fn unpad(packet: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_packet_unpad, packet.as_mut_ptr(), len(packet));
Ok(result as usize)
}
}
// ============================================================================
// Float Soft Clipping
/// Soft-clipping to bring a float signal within the [-1,1] range.
#[derive(Debug)]
pub struct SoftClip {
channels: Channels,
memory: [f32; 2],
}
impl SoftClip {
/// Initialize a new soft-clipping state.
pub fn new(channels: Channels) -> SoftClip {
SoftClip { channels: channels, memory: [0.0; 2] }
}
/// Apply soft-clipping to a float signal.
pub fn apply(&mut self, signal: &mut [f32]) {
unsafe { ffi::opus_pcm_soft_clip(
signal.as_mut_ptr(),
len(signal) / self.channels as c_int,
self.channels as c_int,
self.memory.as_mut_ptr()) };
}
}
// ============================================================================
// Repacketizer
/// A repacketizer used to merge together or split apart multiple Opus packets.
#[derive(Debug)]
pub struct Repacketizer {
ptr: *mut ffi::OpusRepacketizer,
}
impl Repacketizer {
/// Create and initialize a repacketizer.
pub fn new() -> Result<Repacketizer> {
let ptr = unsafe { ffi::opus_repacketizer_create() };
if ptr.is_null() {
Err(Error::from_code("opus_repacketizer_create", ffi::OPUS_ALLOC_FAIL))
} else {
Ok(Repacketizer { ptr: ptr })
}
}
/// Shortcut to combine several smaller packets into one larger one.
pub fn combine(&mut self, input: &[&[u8]], output: &mut [u8]) -> Result<usize> {
let mut state = self.begin();
for &packet in input {
state.cat(packet).unwrap();
}
state.out(output)
}
/// Begin using the repacketizer.
pub fn begin<'rp, 'buf>(&'rp mut self) -> RepacketizerState<'rp, 'buf> {
unsafe { ffi::opus_repacketizer_init(self.ptr); }
RepacketizerState { rp: self, phantom: PhantomData }
}
}
impl Drop for Repacketizer {
fn drop(&mut self) {
unsafe { ffi::opus_repacketizer_destroy(self.ptr) }
}
}
// See `unsafe impl Send for Encoder`.
unsafe impl Send for Repacketizer {}
// To understand why these lifetime bounds are needed, imagine that the
// repacketizer keeps an internal Vec<&'buf [u8]>, which is added to by cat()
// and accessed by get_nb_frames(), out(), and out_range(). To prove that these
// lifetime bounds are correct, a dummy implementation with the same signatures
// but a real Vec<&'buf [u8]> rather than unsafe blocks may be substituted.
/// An in-progress repacketization.
#[derive(Debug)]
pub struct RepacketizerState<'rp, 'buf> {
rp: &'rp mut Repacketizer,
phantom: PhantomData<&'buf [u8]>,
}
impl<'rp, 'buf> RepacketizerState<'rp, 'buf> {
/// Add a packet to the current repacketizer state.
pub fn cat(&mut self, packet: &'buf [u8]) -> Result<()> {
ffi!(opus_repacketizer_cat, self.rp.ptr,
packet.as_ptr(), len(packet));
Ok(())
}
/// Add a packet to the current repacketizer state, moving it.
#[inline]
pub fn cat_move<'b2>(self, packet: &'b2 [u8]) -> Result<RepacketizerState<'rp, 'b2>> where 'buf: 'b2 {
let mut shorter = self;
shorter.cat(packet).unwrap();
Ok(shorter)
}
/// Get the total number of frames contained in packet data submitted so
/// far via `cat`.
pub fn get_nb_frames(&mut self) -> usize {
unsafe { ffi::opus_repacketizer_get_nb_frames(self.rp.ptr) as usize }
}
/// Construct a new packet from data previously submitted via `cat`.
///
/// All previously submitted frames are used.
pub fn out(&mut self, buffer: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_repacketizer_out, self.rp.ptr,
buffer.as_mut_ptr(), len(buffer));
Ok(result as usize)
}
/// Construct a new packet from data previously submitted via `cat`, with
/// a manually specified subrange.
///
/// The `end` index should not exceed the value of `get_nb_frames()`.
pub fn out_range(&mut self, begin: usize, end: usize, buffer: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_repacketizer_out_range, self.rp.ptr,
check_len(begin), check_len(end),
buffer.as_mut_ptr(), len(buffer));
Ok(result as usize)
}
}
// ============================================================================
// TODO: Multistream API
// ============================================================================
// Error Handling
/// Opus error Result alias.
pub type Result<T> = std::result::Result<T, Error>;
/// An error generated by the Opus library.
#[derive(Debug)]
pub struct Error {
function: &'static str,
code: ErrorCode,
}
impl Error {
fn bad_arg(what: &'static str) -> Error {
Error { function: what, code: ErrorCode::BadArg }
}
fn from_code(what: &'static str, code: c_int) -> Error {
Error { function: what, code: ErrorCode::from_int(code) }
}
/// Get the name of the Opus function from which the error originated.
#[inline]
pub fn function(&self) -> &'static str { self.function }
/// Get a textual description of the error provided by Opus.
#[inline]
pub fn description(&self) -> &'static str { self.code.description() }
/// Get the Opus error code of the error.
#[inline]
pub fn code(&self) -> ErrorCode { self.code }
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}: {}", self.function, self.description())
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
self.code.description()
}
}
fn check_len(val: usize) -> c_int {
let len = val as c_int;
if len as usize != val {
panic!("length out of range: {}", val);
}
len
}
#[inline]
fn len<T>(slice: &[T]) -> c_int {
check_len(slice.len())
}

View File

@ -1,31 +0,0 @@
#![allow(dead_code)]
/* source and target should not be intersecting! */
pub fn sequenced2interleaved(src: &[f32], dest: &mut [f32], sample_count: u32, channel_count: u32) {
for channel in 0..channel_count {
let mut source_index = (channel * sample_count) as usize;
let mut dest_index = channel as usize;
for _ in 0..sample_count {
dest[dest_index] = src[source_index];
source_index += 1 as usize;
dest_index += channel_count as usize;
}
}
}
/* source and target should not be intersecting! */
pub fn interleaved2sequenced(src: &[f32], dest: &mut [f32], sample_count: u32, channel_count: u32) {
for channel in 0..channel_count {
let mut source_index = channel as usize;
let mut dest_index = (channel * sample_count) as usize;
for _ in 0..sample_count {
dest[dest_index] = src[source_index];
source_index += channel_count as usize;
dest_index += 1 as usize;
}
}
}

View File

@ -1,264 +0,0 @@
use crate::audio::{AudioPacket, Codec};
use crate::audio::codec::opus::{Channels};
use std::rc::Rc;
use std::cell::RefCell;
use std::fmt::Formatter;
#[derive(Debug, PartialEq)]
pub enum AudioDecodeError {
UnknownCodec,
UnsupportedCodec,
DecoderInitializeFailed(String, bool /* just now initialized */),
DecoderUninitialized,
InvalidPacket,
UnknownDecodeError(String)
}
enum DecoderState {
Unset,
Initialized(Rc<RefCell<dyn AudioCodecDecoder>>),
InitializeFailed(String)
}
impl std::fmt::Debug for DecoderState {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
DecoderState::Unset => {
f.write_str("DecoderState::Unset")
}
DecoderState::Initialized(_) => {
f.write_str("DecoderState::Initialized")
}
DecoderState::InitializeFailed(error) => {
f.write_str(&format!("DecoderState::InitializeFailed({:?})", error))
}
}
}
}
pub struct AudioDecoder {
opus_decoder: DecoderState,
opus_music_decoder: DecoderState,
last_decoded_codec: Codec,
}
impl AudioDecoder {
pub fn new() -> AudioDecoder {
AudioDecoder {
opus_music_decoder: DecoderState::Unset,
opus_decoder: DecoderState::Unset,
last_decoded_codec: Codec::Opus,
}
}
fn decoder_state(&mut self, codec: Codec) -> Result<&mut DecoderState, AudioDecodeError> {
match codec {
Codec::Opus => {
Ok(&mut self.opus_decoder)
}
Codec::OpusMusic => {
Ok(&mut self.opus_music_decoder)
}
Codec::SpeexNarrow | Codec::SpeexWide | Codec::SpeexUltraWide | Codec::Celt | Codec::Flac => {
Err(AudioDecodeError::UnsupportedCodec)
}
_ => {
Err(AudioDecodeError::UnknownCodec)
}
}
}
fn get_decoder(&mut self, codec: Codec, initialize: bool) -> Result<Rc<RefCell<dyn AudioCodecDecoder>>, AudioDecodeError> {
let decoder_state = self.decoder_state(codec)?;
match decoder_state {
DecoderState::Initialized(decoder) => {
Ok(decoder.clone())
}
DecoderState::InitializeFailed(error) => {
Err(AudioDecodeError::DecoderInitializeFailed(error.clone(), false))
}
DecoderState::Unset => {
if !initialize {
return Err(AudioDecodeError::DecoderUninitialized);
}
let decoder: Option<Rc<RefCell<dyn AudioCodecDecoder>>>;
match codec {
Codec::Opus => {
decoder = Some(Rc::new(RefCell::new(decoder::AudioOpusDecoder::new(Channels::Mono))));
}
Codec::OpusMusic => {
decoder = Some(Rc::new(RefCell::new(decoder::AudioOpusDecoder::new(Channels::Stereo))));
}
_ => {
panic!("This should never be reached");
}
}
let decoder = decoder.unwrap();
if let Err(error) = decoder.borrow_mut().initialize() {
*decoder_state = DecoderState::InitializeFailed(error.clone());
return Err(AudioDecodeError::DecoderInitializeFailed(error, true));
}
*decoder_state = DecoderState::Initialized(decoder.clone());
Ok(decoder)
}
}
}
pub fn decode(&mut self, packet: &AudioPacket, dest: &mut Vec<f32>) -> Result<(usize /* samples */, u8 /* channels */), AudioDecodeError> {
let audio_decoder = self.get_decoder(packet.codec, true)?;
let mut audio_decoder = audio_decoder.borrow_mut();
let result = audio_decoder.decode(&packet.payload, dest)?;
self.last_decoded_codec = packet.codec;
Ok(result)
}
pub fn decode_lost(&mut self, _packet_count: usize) -> Result<(), AudioDecodeError> {
/* if the decoder hasn't been initialized or something similar it's not worth creating one */
if let Ok(decoder) = self.get_decoder(self.last_decoded_codec, false) {
decoder.borrow_mut().decode_lost()?;
}
Ok(())
}
}
trait AudioCodecDecoder {
/// Initialize the decoder.
/// On error occurrence, the error message will be returned
fn initialize(&mut self) -> Result<(), String>;
/// Decode the audio packet to float 32 interleaved samples.
/// Returns the amount of samples decoded.
fn decode(&mut self, src: &Vec<u8>, dest: &mut Vec<f32>) -> Result<(usize /* samples */, u8 /* channels */), AudioDecodeError>;
fn decode_lost(&mut self) -> Result<(), AudioDecodeError>;
}
mod decoder {
/* the opus implementation */
use crate::audio::codec::opus::{Decoder, Channels, ErrorCode};
use crate::audio::decoder::{AudioCodecDecoder, AudioDecodeError};
use log::warn;
pub struct AudioOpusDecoder {
pub channel_count: Channels,
pub sample_rate: u32,
decoder: Option<Decoder>,
/// If it's set it indicates that we have to do some FEC decoding.
/// The option will hold the packet size, used for the FEC decoding.
fec_decode: Option<usize>
}
impl AudioOpusDecoder {
pub fn new(channels: Channels) -> AudioOpusDecoder {
AudioOpusDecoder {
decoder: None,
channel_count: channels,
sample_rate: 48_000,
fec_decode: None
}
}
}
impl AudioCodecDecoder for AudioOpusDecoder {
fn initialize(&mut self) -> Result<(), String> {
let decoder = Decoder::new(self.sample_rate, self.channel_count).map_err(|error| String::from(error.description()))?;
self.decoder = Some(decoder);
Ok(())
}
fn decode(&mut self, src: &Vec<u8>, dest: &mut Vec<f32>) -> Result<(usize, u8), AudioDecodeError> {
if let Some(ref mut decoder) = self.decoder {
let sample_count = decoder.get_nb_samples(src.as_slice())
.map_err(|_error| AudioDecodeError::InvalidPacket)?;
let mut total_sample_count = 0;
if let Some(fec_size) = self.fec_decode {
self.fec_decode = None;
dest.resize(
fec_size as usize * self.channel_count as usize +
sample_count * self.channel_count as usize, 0f32);
match decoder.decode_float(src.as_slice(), &mut dest[0..(fec_size * self.channel_count as usize)], true) {
Ok(sample_count) => total_sample_count += sample_count,
Err(error) => {
warn!("Failed to FEC decode opus packet: {}", error.description());
}
};
} else {
dest.resize(sample_count * self.channel_count as usize, 0f32);
}
match decoder.decode_float(src.as_slice(), &mut dest[(total_sample_count * self.channel_count as usize)..], false) {
Ok(sample_count) => Ok((total_sample_count + sample_count, self.channel_count as u8)),
Err(error) => match error.code() {
ErrorCode::InvalidPacket => {
Err(AudioDecodeError::InvalidPacket)
}
_ => {
Err(AudioDecodeError::UnknownDecodeError(String::from(error.description())))
}
}
}
} else {
Err(AudioDecodeError::DecoderUninitialized)
}
}
fn decode_lost(&mut self) -> Result<(), AudioDecodeError> {
if let Some(ref mut decoder) = self.decoder {
/* 960 is the default packet size for TeaSpeak */
let packet_size = decoder.get_last_packet_duration().unwrap_or(960) as usize;
self.fec_decode = Some(packet_size);
Ok(())
} else {
Err(AudioDecodeError::DecoderUninitialized)
}
}
}
}
#[cfg(test)]
mod tests {
use crate::audio::decoder::{AudioDecoder, AudioDecodeError};
use crate::audio::{AudioPacket, PacketId, Codec};
#[test]
fn test_invalid_packet() {
let mut decoder = AudioDecoder::new();
let mut buffer: Vec<f32> = Vec::new();
let packet = AudioPacket {
codec: Codec::Opus,
payload: vec![],
packet_id: PacketId::new(0),
client_id: 0
};
assert_eq!(decoder.decode(&packet, &mut buffer), Err(AudioDecodeError::InvalidPacket));
let packet = AudioPacket {
codec: Codec::Opus,
payload: vec![0, 0, 1],
packet_id: PacketId::new(0),
client_id: 0
};
decoder.decode(&packet, &mut buffer).expect("expected a result");
let packet = AudioPacket {
codec: Codec::Flac,
payload: vec![],
packet_id: PacketId::new(0),
client_id: 0
};
assert_eq!(decoder.decode(&packet, &mut buffer), Err(AudioDecodeError::UnsupportedCodec));
}
}

View File

@ -1,548 +0,0 @@
#![allow(dead_code)]
use std::task::{Poll, Context, Waker};
use std::collections::VecDeque;
use std::ops::{ Deref };
use std::time::{SystemTime, Duration, UNIX_EPOCH};
use futures::{FutureExt};
use crate::audio::{AudioPacket, PacketId};
#[derive(Debug, PartialEq)]
pub enum AudioPacketQueueEvent {
AudioPacket(Box<AudioPacket>),
PacketsLost(
PacketLostReason /* reason for these packets to be counted as lost*/,
u16 /* first lost packet id */,
u16 /* lost packets */
)
}
#[derive(Debug, PartialEq)]
pub enum PacketLostReason {
/// The packets have been failed to be received within a certain timeout
Timeout,
/// A packet sequence has been found after this packet.
/// We've declared this packet as lost
Sequence,
/// We've enough new packets, which can be replayed.
/// This is is also the reason if we're resetting the sequence.
ForceEnqueue
}
#[derive(Debug)]
pub struct AudioPacketQueue {
/// The window size for packet id clipping.
/// Must be at least 1!
pub clipping_window: u16,
/// Number of packets in a sequence to skip ahead to these packets and count the missing pieces as dropped
pub skip_sequence_length: u32,
/// Number of packets in the sequence to forcently replay the first packet
pub force_enqueue_buffer_length: u32,
/// Timeout after which a packet will forcently be replayed.
/// The missing slices will be counted as lost
pub packet_buffer_timeout: u32,
/// Max size of the event queue
pub event_queue_max_size: u32,
/// Timestamp of the last handled packet
last_packet_timestamp: i64,
/// Last packet which has been handled
last_packet_id: PacketId,
/// The event waker will be called as soon new events have been scheduled.
event_waker: Option<Waker>,
/// The event queue contains all audio queue events which needs to get polled
event_queue: VecDeque<AudioPacketQueueEvent>,
/// Buffer for the out of order packets.
/// The buffer should be at least the capacity of force_enqueue_buffer_length + 1 to prevent
/// unwanted allocations.
packet_buffer: VecDeque<EnqueuedPacket>,
/// A timer which is used for processing non sequence packets after a certain timeout
packet_buffer_timer: wasm_timer::Delay
}
#[derive(Debug, PartialEq)]
pub enum EnqueueError {
/// A packet with that id already exists
PacketAlreadyExists,
/// The packet is too old
PacketTooOld,
/// Containing the current sequence packet id
PacketSequenceMismatch(PacketId),
/// Event queue is too long (You need to poll some events first)
EventQueueOverflow
}
fn current_time_millis() -> i64 {
#[cfg(target_arch = "wasm32")]
let value = js_sys::Date::now() as i64;
#[cfg(not(target_arch = "wasm32"))]
let value = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as i64;
value
}
#[derive(Debug)]
struct EnqueuedPacket {
/// The actual audio packet
packet: Box<AudioPacket>,
/// The timestamp of the enqueueing used for the packet timeout
enqueue_timestamp: i64
}
impl Deref for EnqueuedPacket {
type Target = AudioPacket;
fn deref(&self) -> &Self::Target {
self.packet.as_ref()
}
}
impl AudioPacketQueue {
const DEFAULT_CLIPPING_WINDOW: u16 = 256;
pub fn new() -> AudioPacketQueue {
let instance = AudioPacketQueue {
clipping_window: AudioPacketQueue::DEFAULT_CLIPPING_WINDOW,
skip_sequence_length: 3,
force_enqueue_buffer_length: 5,
packet_buffer_timeout: 50,
event_queue_max_size: 64,
/* Decrease by one since we expect the initial packet to be enqueued soonly. */
last_packet_id: PacketId{ packet_id: 0 },
last_packet_timestamp: 0,
packet_buffer: VecDeque::with_capacity(30),
packet_buffer_timer: wasm_timer::Delay::new(Duration::from_millis(0)),
event_waker: None,
event_queue: VecDeque::with_capacity(30)
};
instance
}
fn test_sequence(&self, packet: &Box<AudioPacket>) -> Result<(), EnqueueError> {
if !self.last_packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
return Err(EnqueueError::PacketTooOld);
} else if self.last_packet_id.difference(&packet.packet_id, Some(self.clipping_window)) > 20 {
return Err(EnqueueError::PacketSequenceMismatch(self.last_packet_id.clone()));
}
Ok(())
}
fn initialize_sequence(&mut self, packet: &Box<AudioPacket>) {
self.reset_sequence(false);
self.last_packet_timestamp = current_time_millis();
self.last_packet_id = packet.packet_id - 1; /* reduce the last packet id by one so this packet is the next packet */
}
/// Enqueue a new audio packet
pub fn enqueue_packet(&mut self, packet: Box<AudioPacket>, is_head_packet: bool) -> Result<(), EnqueueError> {
let current_time = current_time_millis();
/* check if we're expecting a sequence */
if current_time - self.last_packet_timestamp < 1000 {
let sequence_result = self.test_sequence(&packet);
if let Err(error) = sequence_result {
if !is_head_packet {
return Err(error);
}
/* enforce a new sequence */
self.initialize_sequence(&packet);
}
} else {
/* we've a new sequence */
self.initialize_sequence(&packet);
}
let mut index = 0;
while index < self.packet_buffer.len() {
let element = &self.packet_buffer[index];
if !element.packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
break;
}
index += 1;
}
let packet = EnqueuedPacket{ packet, enqueue_timestamp: current_time };
if self.event_queue.len() > self.event_queue_max_size as usize {
return Err(EnqueueError::EventQueueOverflow);
}
if index >= self.packet_buffer.len() {
self.packet_buffer.push_back(packet);
} else if self.packet_buffer[index].packet_id == packet.packet_id {
return Err(EnqueueError::PacketAlreadyExists);
} else {
self.packet_buffer.insert(index, packet);
}
self.try_assemble_packets();
Ok(())
}
/// Reset the current packet sequence.
/// If you want to enqueue the pending packet buffer, which sequence hasn't been finished yet,
/// set the first parameter to false
pub fn reset_sequence(&mut self, drop_pending_buffers: bool) {
self.last_packet_id = PacketId{ packet_id: 0 };
self.last_packet_timestamp = 0;
if drop_pending_buffers {
self.clear_buffers();
} else if !self.packet_buffer.is_empty() {
for packet in self.packet_buffer.drain(..).collect::<Vec<EnqueuedPacket>>() {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::ForceEnqueue);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
}
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
}
}
/// Advance the last packet it to the target packet it.
/// If the new packet id isn't consecutive to the current one, an PacketsLost event will be enqueued.
/// The event waker will not be called.
fn advance_last_packet(&mut self, packet_id: PacketId, drop_reason: PacketLostReason) {
if self.last_packet_id + 1 != packet_id {
self.event_queue.push_back(AudioPacketQueueEvent::PacketsLost(
drop_reason,
self.last_packet_id.packet_id.wrapping_add(1),
self.last_packet_id.difference(&packet_id, Some(self.clipping_window)) - 1
));
}
self.last_packet_id = packet_id;
}
/// Clear all pending audio packets
fn clear_buffers(&mut self) {
self.packet_buffer.clear();
}
/// Get the number of pending events
pub fn pending_events(&self) -> usize {
self.event_queue.len()
}
/// Get the next event, manly used for testing purposes
pub fn pop_event(&mut self) -> Option<AudioPacketQueueEvent> {
self.event_queue.pop_front()
}
/// Poll for a events.
/// This method should be invoked regularly, else not every packet will be processed property.
pub fn poll_event(&mut self, cx: &mut Context<'_>) -> Poll<AudioPacketQueueEvent> {
if let Poll::Ready(_) = self.packet_buffer_timer.poll_unpin(cx) {
self.update_packet_timeouts(Some(cx));
}
if let Some(event) = self.pop_event() {
Poll::Ready(event)
} else {
self.event_waker = Some(cx.waker().clone());
Poll::Pending
}
}
fn try_assemble_packets(&mut self) {
while let Some(head) = self.packet_buffer.front() {
if head.packet_id == self.last_packet_id + 1 {
/* yeah, we received the next packet in the sequence */
let packet = self.packet_buffer.pop_front().unwrap();
self.last_packet_id = packet.packet_id;
self.last_packet_timestamp = current_time_millis();
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
} else {
break;
}
}
if self.packet_buffer.is_empty() {
return;
}
/* test if somewhere are more than three packets in a row */
{
let mut index = 0;
let mut sequence_index = 0;
let mut sequence_count = 0;
let mut expected_packet_id = self.packet_buffer.front().unwrap().packet_id;
while index < self.packet_buffer.len() {
if self.packet_buffer[index].packet_id != expected_packet_id {
sequence_index = index;
sequence_count = 1;
expected_packet_id = self.packet_buffer[index].packet_id + 1;
} else {
sequence_count += 1;
expected_packet_id = expected_packet_id + 1;
if sequence_count == self.skip_sequence_length {
break;
}
}
index += 1;
}
if sequence_count == self.skip_sequence_length {
/* okey we can skip */
/* include the first packet of the sequence */
let packets: Vec<EnqueuedPacket> = self.packet_buffer.drain(0..(sequence_index + 1)).collect();
for packet in packets {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::Sequence);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
}
self.last_packet_timestamp = current_time_millis();
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
/* now lets replay the next sequence */
self.try_assemble_packets();
return;
} else {
/* we've no sequence in a row */
}
}
/* force replay first packet, the a bit seek behind mode */
{
if self.packet_buffer.len() > self.force_enqueue_buffer_length as usize {
let packets: Vec<EnqueuedPacket> = self.packet_buffer.drain(0..(self.packet_buffer.len() - self.force_enqueue_buffer_length as usize)).collect();
for packet in packets {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::ForceEnqueue);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
self.last_packet_timestamp = current_time_millis();
}
}
}
self.update_packet_timeouts(None);
}
fn update_packet_timeouts(&mut self, cx: Option<&mut Context<'_>>) {
let timeout_time = current_time_millis() - self.packet_buffer_timeout as i64;
let mut packet_scheduled = false;
while let Some(head) = self.packet_buffer.front() {
if timeout_time > head.enqueue_timestamp {
let packet = self.packet_buffer.pop_front().unwrap();
self.advance_last_packet(packet.packet_id, PacketLostReason::Timeout);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
packet_scheduled = true;
}
break;
}
if packet_scheduled {
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
}
if let Some(head) = self.packet_buffer.front() {
let current_time = current_time_millis();
if let Some(cx) = cx {
let passed_millis = current_time - head.enqueue_timestamp;
if passed_millis >= timeout_time {
cx.waker().wake_by_ref();
} else {
self.packet_buffer_timer.reset(Duration::from_millis((self.packet_buffer_timeout as i64 - passed_millis) as u64));
let _ = self.packet_buffer_timer.poll_unpin(cx);
}
}
}
}
}
unsafe impl Send for AudioPacketQueue {}
impl Drop for AudioPacketQueue {
fn drop(&mut self) {
self.clear_buffers();
}
}
#[cfg(test)]
mod tests {
use super::{ AudioPacketQueue, EnqueueError };
use crate::audio::packet_queue::{AudioPacketQueueEvent, PacketLostReason};
use tokio::future::poll_fn;
use tokio_test::block_on;
use std::sync::{Arc, Mutex};
use ntest::timeout;
use crate::audio::{AudioPacket, PacketId, Codec};
fn enqueue_packet(queue: &mut AudioPacketQueue, packet_id: u16) -> Result<(), EnqueueError> {
queue.enqueue_packet(Box::new(AudioPacket {
packet_id: PacketId{ packet_id },
client_id: 0,
codec: Codec::Opus,
payload: vec![]
}), false)
}
fn darin_queued_events(queue: &mut AudioPacketQueue, _expect_events: bool) {
let mut events_processed = 0;
while let Some(event) = queue.pop_event() {
match event {
AudioPacketQueueEvent::AudioPacket(packet) => {
println!("Having an audio packet: {:?}", packet);
},
AudioPacketQueueEvent::PacketsLost(reason, first_packet, count) => {
println!("{:?} packets got lost due to {:?} (first packet id: {:?})", count, reason, first_packet);
}
}
events_processed += 1;
}
if !_expect_events && events_processed > 0 {
assert!(false, "we haven't expected any events but processed {} events", events_processed);
}
}
fn expect_queued_packet_event(queue: &mut AudioPacketQueue, packet_id: Option<u16>) {
if let Some(event) = queue.pop_event() {
match event {
AudioPacketQueueEvent::AudioPacket(packet) => {
if let Some(packet_id) = packet_id {
assert_eq!(packet_id, packet.packet_id.packet_id);
} else {
println!("Having an audio packet: {:?}", packet);
}
},
_ => {
assert!(false, "Expected a packet event");
}
}
} else {
assert!(false, "expected an event, but there wasn't one");
}
}
#[test]
//#[timeout(3000)]
fn queue_insert_0() {
let mut queue =AudioPacketQueue::new();
enqueue_packet(&mut queue, 0xFFFC).unwrap();
//enqueue_packet(&mut queue, 0xFFFF).unwrap();
//enqueue_packet(&mut queue, 0xFFFD).unwrap();
enqueue_packet(&mut queue, 0xFFFE).unwrap();
enqueue_packet(&mut queue, 2).unwrap();
enqueue_packet(&mut queue, 0).unwrap();
enqueue_packet(&mut queue, 2).expect_err("packet should be already registered");
enqueue_packet(&mut queue, 1).unwrap();
enqueue_packet(&mut queue, 2).expect_err("packet should be already registered");
expect_queued_packet_event(&mut queue,Some(0xFFFC));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 0xFFFD, 1));
expect_queued_packet_event(&mut queue,Some(0xFFFE));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 0xFFFF, 1));
expect_queued_packet_event(&mut queue,Some(0));
expect_queued_packet_event(&mut queue,Some(1));
expect_queued_packet_event(&mut queue,Some(2));
darin_queued_events(&mut queue, false);
}
#[test]
fn test_queue_force_window() {
let mut queue = AudioPacketQueue::new();
queue.force_enqueue_buffer_length = 5;
queue.skip_sequence_length = 3;
enqueue_packet(&mut queue, 0).expect("failed to enqueue packet");
expect_queued_packet_event(&mut queue, Some(0));
enqueue_packet(&mut queue, 2).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 4).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 6).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 8).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 10).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 12).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::ForceEnqueue, 1, 1));
expect_queued_packet_event(&mut queue, Some(2));
enqueue_packet(&mut queue, 13).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::ForceEnqueue, 3, 1));
expect_queued_packet_event(&mut queue, Some(4));
enqueue_packet(&mut queue, 14).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 5, 1));
expect_queued_packet_event(&mut queue, Some(6));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 7, 1));
expect_queued_packet_event(&mut queue, Some(8));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 9, 1));
expect_queued_packet_event(&mut queue, Some(10));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 11, 1));
expect_queued_packet_event(&mut queue, Some(12));
expect_queued_packet_event(&mut queue, Some(13));
expect_queued_packet_event(&mut queue, Some(14));
darin_queued_events(&mut queue, false);
}
#[test]
#[timeout(500)]
fn test_queue_packet_timeout() {
block_on(async {
let queue = Arc::new(Mutex::new(AudioPacketQueue::new()));
{
let mut queue = queue.lock().unwrap();
enqueue_packet(&mut queue, 0).expect("failed to enqueue packet");
expect_queued_packet_event(&mut queue, Some(0));
darin_queued_events(&mut queue, false);
enqueue_packet(&mut queue, 2).expect("failed to enqueue packet");
darin_queued_events(&mut queue, false);
}
{
let queue = queue.clone();
let next_event = poll_fn(move |cx| { queue.lock().unwrap().poll_event(cx) }).await;
assert_eq!(next_event, AudioPacketQueueEvent::PacketsLost(PacketLostReason::Timeout, 1, 1));
}
{
let mut queue = queue.lock().unwrap();
darin_queued_events(&mut queue, true);
}
});
}
}

View File

@ -1,133 +0,0 @@
use std::collections::HashMap;
use std::sync::{ Arc, Mutex };
use std::sync::atomic::{ AtomicU32, Ordering };
use once_cell::sync::Lazy;
use crate::audio::packet_queue::{AudioPacketQueue, AudioPacketQueueEvent, EnqueueError};
use futures;
use crate::audio::decoder::{AudioDecoder};
use wasm_bindgen_futures::spawn_local;
use futures::future::{ poll_fn };
use crate::audio::{AudioPacket};
use log::*;
pub type AudioClientId = u32;
pub trait AudioCallback {
/// Allocate the vector the result should be stored into
fn callback_buffer(&mut self) -> &mut Vec<f32>;
fn handle_audio(&mut self, sample_count: usize, channel_count: u8);
fn handle_stop(&mut self);
}
pub struct AudioClient {
pub client_id: AudioClientId,
packet_queue: Mutex<AudioPacketQueue>,
decoder: Mutex<AudioDecoder>,
audio_process_abort_handle: Mutex<Option<futures::future::AbortHandle>>,
audio_callback: Mutex<Option<Box<dyn AudioCallback>>>,
}
type AudioClientRegistry = Mutex<HashMap<AudioClientId, Arc<AudioClient>>>;
static AUDIO_CLIENT_ID: AtomicU32 = AtomicU32::new(1);
static AUDIO_CLIENT_INSTANCES: Lazy<AudioClientRegistry> = Lazy::new(|| Mutex::new(HashMap::new()));
impl AudioClient {
pub fn find_client(client_id: AudioClientId) -> Option<Arc<AudioClient>> {
AUDIO_CLIENT_INSTANCES.lock().unwrap().get(&client_id).map(|client| client.clone())
}
pub fn new() -> Arc<AudioClient> {
let client_id = AUDIO_CLIENT_ID.fetch_add(1, Ordering::Relaxed);
let instance = Arc::new(AudioClient {
client_id,
packet_queue: Mutex::new(AudioPacketQueue::new()),
decoder: Mutex::new(AudioDecoder::new()),
audio_callback: Mutex::new(None),
audio_process_abort_handle: Mutex::new(None)
});
AUDIO_CLIENT_INSTANCES.lock().unwrap().insert(client_id, instance.clone());
instance
}
pub fn destroy(&self) {
AUDIO_CLIENT_INSTANCES.lock().unwrap().remove(&self.client_id);
self.abort_audio_processing();
}
pub fn enqueue_audio_packet(&self, packet: Box<AudioPacket>, is_head_packet: bool) -> Result<(), EnqueueError> {
self.packet_queue.lock().unwrap().enqueue_packet(packet, is_head_packet)?;
Ok(())
}
pub fn set_audio_callback(&self, callback: Option<Box<dyn AudioCallback>>) {
*self.audio_callback.lock().unwrap() = callback;
}
pub fn abort_audio_processing(&self) {
let handle = &mut *self.audio_process_abort_handle.lock().unwrap();
if let Some(ref abort_handle) = handle {
abort_handle.abort()
}
*handle = None;
}
pub fn dispatch_processing_in_this_thread(client: Arc<AudioClient>) {
let client_copy = client.clone();
let (future, abort_handle) = futures::future::abortable(async move {
loop {
let client = client_copy.clone();
let packet_event = poll_fn(|cx| client.packet_queue.lock().unwrap().poll_event(cx)).await;
let client = client_copy.clone();
match packet_event {
AudioPacketQueueEvent::PacketsLost(_reason, _first_packet, count) => {
//debug!("{:?} packets got lost due to {:?} (first packet id: {:?})", count, reason, first_packet);
if let Err(error) = client.decoder.lock().unwrap().decode_lost(count.into()) {
error!("Failed to execute decode lost packet: {:?}", error);
};
}
AudioPacketQueueEvent::AudioPacket(packet) => {
if packet.is_stop() {
if let Some(ref mut callback) = *client.audio_callback.lock().unwrap() {
callback.handle_stop();
}
} else {
let mut callback = client.audio_callback.lock().unwrap();
if callback.is_none() {
break;
}
let callback = callback.as_mut().unwrap();
let callback_buffer = callback.callback_buffer();
let decode_result = client.decoder.lock().unwrap().decode(&*packet, callback_buffer);
if let Ok(decoded) = decode_result {
callback.handle_audio(decoded.0, decoded.1);
} else {
warn!("Failed to decode audio packet: {:?}", decode_result.unwrap_err());
}
}
}
}
}
});
*client.audio_process_abort_handle.lock().unwrap() = Some(abort_handle);
spawn_local(async { let _ = future.await; });
}
}
impl Drop for AudioClient {
fn drop(&mut self) {
self.abort_audio_processing();
debug!("Audio client destroyed");
}
}
unsafe impl Sync for AudioClient {}
unsafe impl Send for AudioClient {}

View File

@ -1,141 +0,0 @@
#![feature(c_variadic)]
extern crate wasm_bindgen;
#[cfg(target_arch = "wasm32")]
extern crate console_error_panic_hook;
mod audio;
mod audio_client;
use wasm_bindgen::prelude::*;
use js_sys;
use log::*;
use crate::audio::codec::opus;
use crate::audio_client::{AudioClientId, AudioClient, AudioCallback};
use crate::audio::{AudioPacket, Codec, PacketId};
use crate::audio::packet_queue::EnqueueError;
use crate::audio::converter::interleaved2sequenced;
use once_cell::unsync::Lazy;
#[cfg(not(target_arch = "wasm32"))]
extern crate simple_logger;
#[wasm_bindgen]
extern {
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
#[wasm_bindgen]
fn alert(s: &str);
}
/// If the initialization failed, optional result will contain the error.
#[wasm_bindgen]
pub fn initialize() -> Option<String> {
#[cfg(target_arch = "wasm32")]
console_log::init_with_level(Level::Trace);
#[cfg(target_arch = "wasm32")]
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
info!("Initializing audio lib with opus version: {}", opus::version());
None
}
#[wasm_bindgen]
pub fn audio_client_create() -> AudioClientId {
let client = AudioClient::new();
AudioClient::dispatch_processing_in_this_thread(client.clone());
client.client_id
}
/// If an error occurs or the client isn't known an exception will be thrown.
#[wasm_bindgen]
pub fn audio_client_enqueue_buffer(client_id: AudioClientId, buffer: &[u8], packet_id: u16, codec: u8, is_head_packet: bool) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
let result = client.enqueue_audio_packet(Box::new(AudioPacket{
client_id: 0,
codec: Codec::from_u8(codec),
packet_id: PacketId{ packet_id },
payload: buffer.to_vec()
}), is_head_packet);
if let Err(error) = result {
return Err(match error {
EnqueueError::PacketAlreadyExists => JsValue::from_str("packet already exists"),
EnqueueError::PacketSequenceMismatch(_) => JsValue::from_str("packet belongs to an invalid sequence"),
EnqueueError::PacketTooOld => JsValue::from_str("packet is too old"),
EnqueueError::EventQueueOverflow => JsValue::from_str("event queue overflow")
});
}
Ok(())
}
struct JsAudioCallback {
callback: js_sys::Function,
}
/* No locking needed, within the web client no multi threading is needed */
static mut AUDIO_SEQUENCED_BUFFER: Lazy<Vec<f32>> = Lazy::new(|| Vec::new());
static mut AUDIO_BUFFER: Lazy<Vec<f32>> = Lazy::new(|| Vec::new());
impl AudioCallback for JsAudioCallback {
fn callback_buffer(&mut self) -> &mut Vec<f32> {
unsafe { &mut *AUDIO_BUFFER }
}
fn handle_audio(&mut self, sample_count: usize, channel_count: u8) {
if channel_count > 1 {
let sequenced_buffer = unsafe { &mut *AUDIO_SEQUENCED_BUFFER };
sequenced_buffer.resize(sample_count * channel_count as usize, 0f32);
interleaved2sequenced(
unsafe { &mut *AUDIO_BUFFER }.as_slice(),
sequenced_buffer.as_mut_slice(),
sample_count as u32,
channel_count as u32
);
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::from(sequenced_buffer.as_ptr() as u32),
&JsValue::from(sample_count as u16),
&JsValue::from(channel_count)
);
} else {
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::from(unsafe { &mut *AUDIO_BUFFER }.as_ptr() as u32),
&JsValue::from(sample_count as u16),
&JsValue::from(channel_count)
);
}
}
fn handle_stop(&mut self) {
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::undefined(),
&JsValue::from(0),
&JsValue::from(0)
);
}
}
#[wasm_bindgen]
pub fn audio_client_buffer_callback(client_id: AudioClientId, callback: js_sys::Function) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
client.set_audio_callback(Some(Box::new(JsAudioCallback{
callback
})));
Ok(())
}
#[wasm_bindgen]
pub fn audio_client_destroy(client_id: AudioClientId) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
client.destroy();
debug!("Destroying client");
Ok(())
}

View File

@ -1,6 +0,0 @@
generated/
build_/
libraries/opus/build_
libraries/opus/out
cmake-build-*/
libraries/opus/*

View File

@ -1,25 +0,0 @@
cmake_minimum_required(VERSION 3.9)
project(TeaWeb-Native)
set (CMAKE_CXX_STANDARD 17)
function(import_opus)
# Native SIMD isn't supported yet by most browsers (only experimental)
# But since opus already detects if emscripten is able to handle SIMD we have no need to disable this explicitly
# Disable the math.h warning spam:
# #warning "Don't have the functions lrint() and lrintf ()."
# #warning "Replacing these functions with a standard C cast."
set(CMAKE_C_FLAGS "-Wno-#warnings")
set(OPUS_STACK_PROTECTOR OFF CACHE BOOL "" FORCE)
add_subdirectory(libraries/opus/)
endfunction()
import_opus()
set(CMAKE_CXX_FLAGS "-O3 --llvm-lto 1 --memory-init-file 0 -s WASM=1 -s ASSERTIONS=1") # -s ALLOW_MEMORY_GROWTH=1 -O3
set(CMAKE_EXE_LINKER_FLAGS "-s MODULARIZE=1 -s EXPORTED_FUNCTIONS='[\"_malloc\", \"_free\"]' -s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -s ENVIRONMENT='worker' --pre-js ${CMAKE_SOURCE_DIR}/init.js") #
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/generated/")
add_executable(TeaWeb-Worker-Codec-Opus src/opus.cpp)
target_link_libraries(TeaWeb-Worker-Codec-Opus opus)

View File

@ -1,23 +0,0 @@
#!/bin/bash
cd "$(dirname "$0")" || { echo "Failed to enter base dir"; exit 1; }
[[ -d build_ ]] && {
rm -r build_ || { echo "failed to remove old build directory"; exit 1; }
}
mkdir build_ || exit 1
cd build_ || exit 1
emcmake cmake .. || {
echo "emcmake cmake failed for the first time, trying it again" #IDKW but sometimes it does not work the first try
cd . # Sometimes helps
emcmake cmake .. || {
echo "Failed to execute cmake"
exit 1
}
}
emmake make -j"$(nproc --all)" || {
echo "Failed to build file"
exit 1
}

View File

@ -1,35 +0,0 @@
#!/usr/bin/env bash
cd "$(dirname "$0")" || exit 1
if [[ -d generated/ ]]; then
rm -r generated
[[ $? -ne 0 ]] && {
echo "Failed to remove old directory!"
exit 1
}
fi
mkdir generated
[[ $? -ne 0 ]] && {
echo "Failed to create the 'generated' directory!"
exit 1
}
curl --version &> /dev/null; _exit_code=$?
[[ $_exit_code -ne 0 ]] && {
echo "Missing CURL. Please install it"
exit 1
}
curl https://web.teaspeak.de/wasm/TeaWeb-Worker-Codec-Opus.js --output generated/TeaWeb-Worker-Codec-Opus.js; _exit_code=$?
[[ $_exit_code -ne 0 ]] && {
echo "Failed to download opus worker library"
exit 1
}
curl https://web.teaspeak.de/wasm/TeaWeb-Worker-Codec-Opus.wasm --output generated/TeaWeb-Worker-Codec-Opus.wasm; _exit_code=$?
[[ $_exit_code -ne 0 ]] && {
echo "Failed to download opus worker library natives"
exit 1
}
echo "Files downloaded successfully"

View File

@ -1,2 +0,0 @@
for(const callback of Array.isArray(self.__init_em_module) ? self.__init_em_module : [])
callback(Module);

View File

@ -1,68 +0,0 @@
//
// Created by WolverinDEV on 12/06/2020.
//
/* source and target should not be intersecting! */
template <size_t kChannelCount>
void sequenced2interleaved(float* source, float* target, size_t sample_count) {
#pragma unroll
for(size_t channel = 0; channel < kChannelCount; channel++) {
auto src_ptr = source + channel * sample_count;
auto dest_ptr = target + channel;
auto samples_left = sample_count;
while(samples_left--) {
*dest_ptr = *src_ptr;
src_ptr++;
dest_ptr += kChannelCount;
}
}
}
/* source and target should not be intersecting! */
template <size_t kChannelCount>
void interleaved2sequenced(float* source, float* target, size_t sample_count) {
#pragma unroll
for(size_t channel = 0; channel < kChannelCount; channel++) {
auto src_ptr = source + channel;
auto dest_ptr = target + channel * sample_count;
auto samples_left = sample_count;
while(samples_left--) {
*dest_ptr = *src_ptr;
src_ptr += kChannelCount;
dest_ptr++;
}
}
}
#define kTempBufferSize (1024 * 8)
/* since js is single threaded we need no lock here */
float temp_buffer[kTempBufferSize];
template <size_t kChannelCount>
void interleaved2sequenced_intersecting(float* buffer, size_t sample_count) {
auto temp = temp_buffer;
if(sample_count * kChannelCount > kTempBufferSize)
temp = (float*) malloc(sample_count * sizeof(float) * kChannelCount);
memcpy(temp, buffer, sample_count * sizeof(float) * kChannelCount);
interleaved2sequenced<kChannelCount>(temp, buffer, sample_count);
if(temp != temp_buffer)
free(temp);
}
template <size_t kChannelCount>
void sequenced2interleaved_intersecting(float* buffer, size_t sample_count) {
auto temp = temp_buffer;
if(sample_count * kChannelCount > kTempBufferSize)
temp = (float*) malloc(sample_count * sizeof(float) * kChannelCount);
memcpy(temp, buffer, sample_count * sizeof(float) * kChannelCount);
sequenced2interleaved<kChannelCount>(temp, buffer, sample_count);
if(temp != temp_buffer)
free(temp);
}

View File

@ -1,113 +0,0 @@
#include <opus.h>
#include <array>
#include <string_view>
#include <emscripten.h>
#include <string>
#include "./ilarvecon.cpp"
typedef std::unique_ptr<OpusEncoder, decltype(opus_encoder_destroy)*> opus_encoder_t;
typedef std::unique_ptr<OpusDecoder, decltype(opus_decoder_destroy)*> opus_decoder_t;
struct OpusHandle {
opus_encoder_t encoder{nullptr, opus_encoder_destroy};
opus_decoder_t decoder{nullptr, opus_decoder_destroy};
size_t channelCount{1};
size_t sampleRate{48000};
int opusType{OPUS_APPLICATION_AUDIO};
};
constexpr std::array<std::string_view, 7> opus_errors = {
"One or more invalid/out of range arguments", //-1 (OPUS_BAD_ARG)
"Not enough bytes allocated in the buffer", //-2 (OPUS_BUFFER_TOO_SMALL)
"An internal error was detected", //-3 (OPUS_INTERNAL_ERROR)
"The compressed data passed is corrupted", //-4 (OPUS_INVALID_PACKET)
"Invalid/unsupported request number", //-5 (OPUS_UNIMPLEMENTED)
"An encoder or decoder structure is invalid or already freed", //-6 (OPUS_INVALID_STATE)
"Memory allocation has failed" //-7 (OPUS_ALLOC_FAIL)
};
inline std::string_view opus_error_message(int error) {
error = abs(error);
if (error > 0 && error <= 7) return opus_errors[error - 1];
return "undefined error";
}
inline bool reinitialize_decoder(OpusHandle *handle) {
int error;
handle->decoder.reset(opus_decoder_create(handle->sampleRate, handle->channelCount, &error));
if(error != OPUS_OK) {
printf("Failed to create decoder (%s)\n", opus_error_message(error).data());
return false;
}
return true;
}
inline bool reinitialize_encoder(OpusHandle *handle) {
int error;
handle->encoder.reset(opus_encoder_create(handle->sampleRate, handle->channelCount, handle->opusType, &error));
if (error != OPUS_OK) {
printf("Failed to create encoder (%s)\n", opus_error_message(error).data());
return false;
}
if(error = opus_encoder_ctl(&*handle->encoder, OPUS_SET_COMPLEXITY(1)); error != OPUS_OK) {
printf("Failed to setup encoder (%s)\n", opus_error_message(error).data());
return false;
}
//TODO: May set OPUS_SET_BITRATE(4740)?
//TODO: Is the encoder event needed anymore? Or is it just overhead
return true;
}
#ifdef __cplusplus
extern "C" {
#endif
EMSCRIPTEN_KEEPALIVE
OpusHandle *codec_opus_createNativeHandle(size_t channelCount, int type) {
auto codec = new OpusHandle{};
codec->opusType = type;
codec->channelCount = channelCount;
codec->sampleRate = 48000;
if (!reinitialize_decoder(codec)) return nullptr;
if (!reinitialize_encoder(codec)) return nullptr;
return codec;
}
EMSCRIPTEN_KEEPALIVE
void codec_opus_deleteNativeHandle(OpusHandle *codec) {
if (!codec) return;
codec->decoder.reset();
codec->encoder.reset();
delete codec;
}
EMSCRIPTEN_KEEPALIVE
int codec_opus_encode(OpusHandle *handle, uint8_t *buffer, size_t byte_length, size_t maxLength) {
if(handle->channelCount == 2)
sequenced2interleaved_intersecting<2>((float *) buffer, byte_length / (sizeof(float) * 2));
auto result = opus_encode_float(&*handle->encoder, (float *) buffer, byte_length / (handle->channelCount * sizeof(float)), buffer, maxLength);
if (result < 0) return result;
return result;
}
EMSCRIPTEN_KEEPALIVE
int codec_opus_decode(OpusHandle *handle, uint8_t *buffer, size_t byte_length, size_t buffer_max_byte_length) {
auto result = opus_decode_float(&*handle->decoder, buffer, byte_length, (float *) buffer, buffer_max_byte_length / sizeof(float) / handle->channelCount, false);
if (result < 0) return result;
if(handle->channelCount == 2)
interleaved2sequenced_intersecting<2>((float *) buffer, result);
return result;
}
EMSCRIPTEN_KEEPALIVE
int codec_opus_reset(OpusHandle *handle) {
if (!reinitialize_decoder(handle)) return 0;
if (!reinitialize_encoder(handle)) return 0;
return 1;
}
#ifdef __cplusplus
}
#endif

View File

@ -1,6 +1,5 @@
import * as path from "path";
import * as config_base from "./webpack.config";
const WasmPackPlugin = require("@wasm-tool/wasm-pack-plugin");
export = () => config_base.config("web").then(config => {
Object.assign(config.entry, {
@ -10,19 +9,11 @@ export = () => config_base.config("web").then(config => {
Object.assign(config.resolve.alias, {
"tc-shared": path.resolve(__dirname, "shared/js"),
"tc-backend/audio-lib": path.resolve(__dirname, "web/audio-lib/pkg"),
"tc-backend/web": path.resolve(__dirname, "web/app"),
"tc-backend": path.resolve(__dirname, "web/app"),
});
config.node = config.node || {};
config.node["fs"] = "empty";
config.plugins.push(new WasmPackPlugin({
crateDirectory: path.resolve(__dirname, "web", "audio-lib"),
outName: "index",
//forceMode: "profiling",
outDir: "pkg"
}));
return Promise.resolve(config);
});

View File

@ -16,6 +16,7 @@ const ManifestGenerator = require("./webpack/ManifestPlugin");
const WorkerPlugin = require('worker-plugin');
const TerserPlugin = require('terser-webpack-plugin');
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
const { WebpackManifestPlugin } = require('webpack-manifest-plugin');
export let isDevelopment = process.env.NODE_ENV === 'development';
console.log("Webpacking for %s (%s)", isDevelopment ? "development" : "production", process.env.NODE_ENV || "NODE_ENV not specified");
@ -119,7 +120,6 @@ export const config = async (target: "web" | "client"): Promise<Configuration> =
}
}
}),
new LoaderIndexGenerator({
buildTarget: target,
output: path.join(__dirname, "dist/index.html"),