Reworked the web audio decoding

canary
WolverinDEV 2020-09-01 12:53:42 +02:00
parent a98503285f
commit 1c50051a4a
39 changed files with 3183 additions and 1364 deletions

View File

@ -1,4 +1,9 @@
# Changelog:
* **31.08.20**
- Reworked the audio decode system
- Improved audio decode performance
- Heavily improved the audio quality for users with packet loss
* **24.08.20**
- Fixed the country icon path for the native client
- Fixed the context menu for the native client (It errored because some icons generated by the sprite generator where miss aligned)

54
package-lock.json generated
View File

@ -1578,6 +1578,48 @@
"integrity": "sha512-l5ID65aPDctN/dZYkDgLOEBuoHrD8S9TyfD5soORUtVHKyOs7Wr66iNxAtcmT/tER1GeYqp51jR6l08gmsRcZg==",
"dev": true
},
"@wasm-tool/wasm-pack-plugin": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/@wasm-tool/wasm-pack-plugin/-/wasm-pack-plugin-1.3.1.tgz",
"integrity": "sha512-8AXgN80fbbLvuROYuNsBow/MiK+VeNCzZ3WyCxwZKMIyISd1WwompVG0pLMypXd4rYnttsRyXvQqW3vDdoXZRg==",
"dev": true,
"requires": {
"chalk": "^2.4.1",
"command-exists": "^1.2.7",
"watchpack": "^1.6.0"
},
"dependencies": {
"ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"requires": {
"color-convert": "^1.9.0"
}
},
"chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"requires": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
}
},
"supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"requires": {
"has-flag": "^3.0.0"
}
}
}
},
"@webassemblyjs/ast": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.9.0.tgz",
@ -3344,6 +3386,12 @@
"delayed-stream": "~1.0.0"
}
},
"command-exists": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz",
"integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==",
"dev": true
},
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
@ -15104,9 +15152,9 @@
}
},
"worker-plugin": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/worker-plugin/-/worker-plugin-4.0.2.tgz",
"integrity": "sha512-V+1zSZMOOKk+uBzKyNIODLQLsx59zSIOaI75J1EMS0iR1qy+KQR3y/pQ3T0vIhvPfDFapGRMsoMvQNEL3okqSA==",
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/worker-plugin/-/worker-plugin-4.0.3.tgz",
"integrity": "sha512-7hFDYWiKcE3yHZvemsoM9lZis/PzurHAEX1ej8PLCu818Rt6QqUAiDdxHPCKZctzmhqzPpcFSgvMCiPbtooqAg==",
"dev": true,
"requires": {
"loader-utils": "^1.1.0"

View File

@ -41,6 +41,7 @@
"@types/twemoji": "^12.1.1",
"@types/websocket": "0.0.40",
"@types/xml-parser": "^1.2.29",
"@wasm-tool/wasm-pack-plugin": "^1.3.1",
"babel-loader": "^8.1.0",
"chunk-manifest-webpack-plugin": "^1.1.2",
"circular-dependency-plugin": "^5.2.0",
@ -78,7 +79,7 @@
"webpack-bundle-analyzer": "^3.6.1",
"webpack-cli": "^3.3.11",
"webpack-svg-sprite-generator": "^1.0.16",
"worker-plugin": "^4.0.2",
"worker-plugin": "^4.0.3",
"xml-parser": "^1.2.1"
},
"repository": {

View File

@ -0,0 +1,35 @@
export type MessageTimings = {
upstream: number;
downstream: number;
handle: number;
};
export type ExecuteResultSuccess<T> = {
timings: MessageTimings,
success: true,
result: T
}
export type ExecuteResultError = {
timings: MessageTimings,
success: false,
error: string
}
export type ExecuteResult<Result = any> = ExecuteResultError | ExecuteResultSuccess<Result>;
export type GenericCommands = {[key: string]: any};
export type GenericCommandMapping<CommandsToWorker extends GenericCommands, CommandsFromWorker extends GenericCommands> = {
[Key in keyof CommandsToWorker | keyof CommandsFromWorker]: any
}
export type CommandResponseType<
SendCommands extends GenericCommands,
ReceiveCommands extends GenericCommands,
Mapping extends GenericCommandMapping<SendCommands, ReceiveCommands>,
Command extends keyof SendCommands> =
Mapping[Command] extends string ? ReceiveCommands[Mapping[Command]] : Mapping[Command];
export type GenericNotify = {[key: string]: any};

View File

@ -0,0 +1,43 @@
type WorkerMessageCommandResponseBase = {
type: "response";
token: string;
timestampReceived: number;
timestampSend: number;
}
export type WorkerMessageCommandResponseSuccess = WorkerMessageCommandResponseBase & {
status: "success";
result: any;
}
export type WorkerMessageCommandResponseError = WorkerMessageCommandResponseBase & {
status: "error";
error: string;
}
export type WorkerMessageCommandResponse = WorkerMessageCommandResponseError | WorkerMessageCommandResponseSuccess;
export type WorkerMessageCommand<Commands = {[key: string]: any}> = {
type: "command";
token: string;
command: keyof Commands;
payload: any;
}
export type WorkerMessageNotify = {
type: "notify",
notify: string,
payload: any
}
export type WorkerMessage<Commands> = WorkerMessageCommand<Commands> | WorkerMessageCommandResponse | WorkerMessageNotify;
export type MessageHandler<Payload, Response> = (payload: Payload, context: MessageContext) => Response | Promise<Response>;
export type MessageContext = {
transferObjects: Transferable[]
};
export type NotifyHandler<Payload> = (payload: Payload) => void;

237
shared/js/workers/Worker.ts Normal file
View File

@ -0,0 +1,237 @@
import {CommandResponseType, ExecuteResult, GenericCommandMapping, GenericCommands, GenericNotify} from "./Definitions";
import {
MessageContext,
MessageHandler, NotifyHandler,
WorkerMessage,
WorkerMessageCommand, WorkerMessageCommandResponseError,
WorkerMessageCommandResponseSuccess,
WorkerMessageNotify
} from "./Protocol";
type PendingCommand = {
timeout?: any,
timestampSend: number,
callbackResolve: (_: ExecuteResult) => void;
}
export interface WorkerEvents {
notify_worker_died: {}
}
export abstract class WorkerCommunication<
CommandsSend extends GenericCommands,
CommandsReceive extends GenericCommands,
CommandMapping extends GenericCommandMapping<CommandsSend, CommandsReceive>,
NotifySend extends GenericNotify,
NotifyReceive extends GenericNotify
> {
private tokenIndex = 0;
protected pendingCommands: {[key: string]: PendingCommand} = {};
protected messageHandlers: {[key: string]: MessageHandler<any, any>} = {};
protected notifyHandlers: {[key: string]: NotifyHandler<any>} = {};
protected constructor() { }
registerMessageHandler<Command extends keyof CommandsReceive>(command: Command, handler: MessageHandler<CommandsReceive[Command], CommandResponseType<CommandsReceive, CommandsSend, CommandMapping, Command>>) {
this.messageHandlers[command as any] = handler;
}
registerNotifyHandler<Notify extends keyof NotifyReceive>(notify: Notify, handler: NotifyHandler<NotifyReceive[Notify]>) {
this.notifyHandlers[notify as any] = handler;
}
execute<T extends keyof CommandsSend>(
command: T,
data: CommandsSend[T],
timeout?: number,
transfer?: Transferable[]
) : Promise<ExecuteResult<CommandResponseType<CommandsSend, CommandsReceive, CommandMapping, T>>> {
return new Promise<ExecuteResult>(resolve => {
const token = this.tokenIndex++ + "_token";
this.pendingCommands[token] = {
timeout: typeof timeout === "number" ? setTimeout(() => {
this.pendingCommands[token]?.callbackResolve({
success: false,
error: "command timed out",
timings: { upstream: 0, handle: 0, downstream: 0 }
});
}, timeout) : undefined,
callbackResolve: result => {
clearTimeout(this.pendingCommands[token]?.timeout);
delete this.pendingCommands[token];
resolve(result);
},
timestampSend: Date.now()
};
try {
this.postMessage({
command: command,
type: "command",
payload: data,
token: token
} as WorkerMessageCommand, transfer);
} catch (error) {
let message;
if(typeof error === "string") {
message = error;
} else if(error instanceof Error) {
message = error.message;
} else {
console.error("Failed to post a message: %o", error);
message = "lookup the console";
}
this.pendingCommands[token].callbackResolve({
success: false,
error: message,
timings: {
downstream: 0,
handle: 0,
upstream: 0
}
});
}
});
}
async executeThrow<T extends keyof CommandsSend>(
command: T,
data: CommandsSend[T],
timeout?: number,
transfer?: Transferable[]
) : Promise<CommandResponseType<CommandsSend, CommandsReceive, CommandMapping, T>> {
const response = await this.execute(command, data, timeout, transfer);
if(response.success === false) {
throw response.error;
}
return response.result;
}
notify<T extends keyof NotifySend>(notify: T, payload: NotifySend[T], transfer?: Transferable[]) {
this.postMessage({
type: "notify",
notify: notify,
payload: payload
} as WorkerMessageNotify, transfer);
}
protected handleMessage(message: WorkerMessage<CommandsReceive>) {
const timestampReceived = Date.now();
if(message.type === "notify") {
const notifyHandler = this.notifyHandlers[message.notify];
if(typeof notifyHandler !== "function") {
console.warn("Received unknown notify (%s)", message.notify);
return;
}
notifyHandler(message.payload);
return;
} else if(message.type === "response") {
const request = this.pendingCommands[message.token];
if(typeof request !== "object") {
console.warn("Received execute result for unknown token (%s)", message.token);
return;
}
delete this.pendingCommands[message.token];
clearTimeout(request.timeout);
if(message.status === "success") {
request.callbackResolve({
timings: {
downstream: message.timestampReceived - request.timestampSend,
handle: message.timestampSend - message.timestampReceived,
upstream: Date.now() - message.timestampSend
},
success: true,
result: message.result
});
} else {
request.callbackResolve({
timings: {
downstream: message.timestampReceived - request.timestampSend,
handle: message.timestampSend - message.timestampReceived,
upstream: Date.now() - message.timestampSend
},
success: false,
error: message.error
});
}
} else if(message.type === "command") {
const command = message as WorkerMessageCommand;
const sendExecuteError = error => {
let errorMessage;
if(typeof error === "string") {
errorMessage = error;
} else if(error instanceof Error) {
console.error("Message handle error: %o", error);
errorMessage = error.message;
} else {
console.error("Message handle error: %o", error);
errorMessage = "lookup the console";
}
postMessage({
type: "response",
status: "error",
error: errorMessage,
timestampReceived: timestampReceived,
timestampSend: Date.now(),
token: command.token
} as WorkerMessageCommandResponseError, undefined);
};
const sendExecuteResult = (result, transfer) => {
postMessage({
type: "response",
status: "success",
result: result,
timestampReceived: timestampReceived,
timestampSend: Date.now(),
token: command.token
} as WorkerMessageCommandResponseSuccess, undefined, transfer);
};
const handler = this.messageHandlers[message.command as any];
if(!handler) {
sendExecuteError("unknown command");
return;
}
let context = {
transferObjects: []
} as MessageContext;
let response;
try {
response = handler(command.payload, context);
} catch(error) {
response = Promise.reject(error);
}
(response instanceof Promise ? response : Promise.resolve(response)).then(result => {
sendExecuteResult(result, context.transferObjects);
}).catch(error => sendExecuteError(error));
return;
} else {
console.warn("Received unknown message of type %s. This should never happen!", (message as any).type);
return;
}
}
protected abstract postMessage(message: WorkerMessage<CommandsSend>, transfer?: Transferable[]);
}

View File

@ -0,0 +1,23 @@
import {GenericCommandMapping, GenericCommands, GenericNotify} from "./Definitions";
import {WorkerCommunication} from "tc-shared/workers/Worker";
import {WorkerMessage} from "tc-shared/workers/Protocol";
export class WorkerHandler<
CommandsToWorker extends GenericCommands,
CommandsFromWorker extends GenericCommands,
CommandMapping extends GenericCommandMapping<CommandsToWorker, CommandsFromWorker>,
NotifyToWorker extends GenericNotify = never,
NotifyFromWorker extends GenericNotify = never
> extends WorkerCommunication<CommandsFromWorker, CommandsToWorker, CommandMapping, NotifyFromWorker, NotifyToWorker> {
constructor() {
super();
}
initialize() {
addEventListener("message", event => this.handleMessage(event.data));
}
protected postMessage(message: WorkerMessage<CommandsFromWorker>, transfer?: Transferable[]) {
postMessage(message, undefined, transfer);
}
}

View File

@ -0,0 +1,74 @@
import * as log from "../log";
import {LogCategory, logDebug, logWarn} from "../log";
import {WorkerMessage, WorkerMessageCommand, WorkerMessageNotify} from "./Protocol";
import {CommandResponseType, ExecuteResult, GenericCommandMapping, GenericCommands, GenericNotify} from "./Definitions";
import {tr} from "../i18n/localize";
import {Registry} from "../events";
import {WorkerCommunication} from "tc-shared/workers/Worker";
type PendingCommand = {
timeout?: any,
timestampSend: number,
callbackResolve: (_: ExecuteResult) => void;
}
export interface WorkerEvents {
notify_worker_died: {}
}
export type WorkerFactory = () => Worker;
export class WorkerOwner<
CommandsToWorker extends GenericCommands,
CommandsFromWorker extends GenericCommands,
CommandMapping extends GenericCommandMapping<CommandsToWorker, CommandsFromWorker>,
NotifyToWorker extends GenericNotify = never,
NotifyFromWorker extends GenericNotify = never
> extends WorkerCommunication<CommandsToWorker, CommandsFromWorker, CommandMapping, NotifyToWorker, NotifyFromWorker> {
readonly events: Registry<WorkerEvents>;
private readonly factory: WorkerFactory;
private worker: Worker;
constructor(factory: WorkerFactory) {
super();
this.events = new Registry<WorkerEvents>();
this.factory = factory;
}
isAlive() : boolean {
return !!this.worker;
}
async spawnWorker() {
this.worker = this.factory();
this.worker.onmessage = event => this.handleWorkerMessage(event.data);
this.worker.onerror = () => this.handleWorkerError();
}
private handleWorkerMessage(message: WorkerMessage<any>) {
super.handleMessage(message);
}
private handleWorkerError() {
logDebug(LogCategory.GENERAL, tr("A worker died. Closing worker."));
this.worker = undefined;
for(const token of Object.keys(this.pendingCommands)) {
this.pendingCommands[token].callbackResolve({
success: false,
error: tr("worker terminated with an error"),
timings: { downstream: 0, handle: 0, upstream: 0}
});
delete this.pendingCommands[token];
}
this.events.fire("notify_worker_died");
}
protected postMessage(message: WorkerMessage<CommandsToWorker>, transfer?: Transferable[]) {
if(!this.worker) {
throw tr("worker is not alive");
}
this.worker.postMessage(message, transfer);
}
}

2
web/.gitignore vendored
View File

@ -7,3 +7,5 @@ app/**/*.css.map
app/**/*.js
app/**/*.js.map
!app/audio-lib/async_require.js

View File

@ -0,0 +1,35 @@
import {AudioLibrary} from "tc-backend/web/audio-lib/index";
import {LogCategory, logWarn} from "tc-shared/log";
export class AudioClient {
private readonly handle: AudioLibrary;
private readonly clientId: number;
public callback_decoded: (buffer: AudioBuffer) => void;
public callback_ended: () => void;
constructor(handle: AudioLibrary, clientId: number) {
this.handle = handle;
this.clientId = clientId;
}
async initialize() { }
destroy() {
this.handle.destroyClient(this.clientId);
}
enqueueBuffer(buffer: Uint8Array, packetId: number, codec: number) {
this.handle.getWorker().executeThrow("enqueue-audio-packet", {
clientId: this.clientId,
codec: codec,
packetId: packetId,
buffer: buffer.buffer,
byteLength: buffer.byteLength,
byteOffset: buffer.byteOffset,
}, 5000, [buffer.buffer]).catch(error => {
logWarn(LogCategory.AUDIO, tr("Failed to enqueue audio buffer for audio client %d: %o"), this.clientId, error);
});
}
}

View File

@ -0,0 +1,50 @@
/* from handle to worker */
export interface AWCommand {
"initialize": {},
"create-client": {},
"enqueue-audio-packet": {
clientId: number,
packetId: number,
codec: number,
buffer: ArrayBuffer,
byteLength: number,
byteOffset: number,
},
"destroy-client": {
clientId: number
}
}
/* from worker to handle */
export interface AWCommandResponse {
"create-client-result": { clientId: number }
}
export interface AWMessageRelations {
"initialize": void,
"create-client": "create-client-result",
"create-client-result": never,
"enqueue-audio-packet": void,
"destroy-client": void
}
/* host to worker notifies */
export interface AWNotifies {}
/* worker to host notifies */
export interface AWNotifiesWorker {
"notify-decoded-audio": {
clientId: number,
buffer: ArrayBuffer,
byteLength: number,
byteOffset: number,
channelCount: number,
sampleRate: number
}
}

View File

@ -0,0 +1,92 @@
import {WorkerOwner} from "tc-shared/workers/WorkerOwner";
import {
AWCommand,
AWCommandResponse,
AWMessageRelations,
AWNotifies,
AWNotifiesWorker
} from "tc-backend/web/audio-lib/WorkerMessages";
import {AudioClient} from "tc-backend/web/audio-lib/AudioClient";
import {LogCategory, logWarn} from "tc-shared/log";
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
export type AudioLibraryWorker = WorkerOwner<AWCommand, AWCommandResponse, AWMessageRelations, AWNotifies, AWNotifiesWorker>;
export class AudioLibrary {
private readonly worker: AudioLibraryWorker;
private registeredClients: {[key: number]: AudioClient} = {};
constructor() {
this.worker = new WorkerOwner(() => {
/*
* Attention don't use () => new Worker(...).
* This confuses the worker plugin and will not emit any modules
*/
return new Worker("./worker/index.ts", { type: "module" });
});
}
async initialize() {
await this.worker.spawnWorker();
await this.worker.executeThrow("initialize", {}, 10000);
this.worker.registerNotifyHandler("notify-decoded-audio", payload => {
if(payload.channelCount === 0 || payload.byteLength === 0) {
this.registeredClients[payload.clientId]?.callback_ended();
return;
}
let buffer = new Float32Array(payload.buffer, payload.byteOffset, payload.byteLength / 4);
let audioBuffer = new AudioBuffer({ length: buffer.length / payload.channelCount, numberOfChannels: payload.channelCount, sampleRate: payload.sampleRate });
for(let channel = 0; channel < payload.channelCount; channel++) {
audioBuffer.copyToChannel(buffer.subarray(channel * audioBuffer.length), channel);
}
this.registeredClients[payload.clientId]?.callback_decoded(audioBuffer);
});
}
async createClient() {
const { clientId } = await this.worker.executeThrow("create-client", {}, 5000);
const wrapper = new AudioClient(this, clientId);
try {
await wrapper.initialize();
} catch (error) {
this.worker.executeThrow("destroy-client", { clientId: clientId }).catch(error => {
logWarn(LogCategory.AUDIO, tr("Failed to destroy client after a failed initialialization: %o"), error);
});
throw error;
}
this.registeredClients[clientId] = wrapper;
return wrapper;
}
destroyClient(clientId: number) {
delete this.registeredClients[clientId];
this.worker.execute("destroy-client", { clientId: clientId }).then(result => {
if(result.success === false) {
logWarn(LogCategory.AUDIO, tr("Failed to destroy audio client %d: %s"), clientId, result.error);
}
});
}
getWorker() : AudioLibraryWorker {
return this.worker;
}
}
let audioLibrary: AudioLibrary;
export function getAudioLibrary() {
return audioLibrary;
}
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
name: "audio lib init",
priority: 10,
function: async () => {
audioLibrary = new AudioLibrary();
await audioLibrary.initialize();
}
});

View File

@ -0,0 +1,7 @@
import * as lib from "../../../audio-lib/pkg/index";
export type AudioLibrary = (typeof lib) & {
memory: WebAssembly.Memory
}
export function getAudioLibraryInstance() : Promise<AudioLibrary>;

View File

@ -0,0 +1,62 @@
import {WorkerHandler} from "tc-shared/workers/WorkerHandler";
import {
AWCommand,
AWCommandResponse,
AWMessageRelations,
AWNotifies,
AWNotifiesWorker
} from "tc-backend/web/audio-lib/WorkerMessages";
import {AudioLibrary, getAudioLibraryInstance} from "./async_require";
/*
* Fix since rust wasm is used to run in normal space, not as worker.
*/
(self as any).Window = (self as any).DedicatedWorkerGlobalScope;
let audioLibrary: AudioLibrary;
export async function initializeAudioLib() {
audioLibrary = await getAudioLibraryInstance();
const error = audioLibrary.initialize();
if(typeof error === "string") {
console.error("Failed to initialize the audio lib: %s", error);
}
}
const workerHandler = new WorkerHandler<AWCommand, AWCommandResponse, AWMessageRelations, AWNotifies, AWNotifiesWorker>();
workerHandler.initialize();
workerHandler.registerMessageHandler("create-client", () => {
const client = audioLibrary.audio_client_create();
audioLibrary.audio_client_buffer_callback(client, (ptr, samples, channels) => {
try {
const sendBuffer = new Uint8Array(samples * channels * 4);
sendBuffer.set(new Uint8Array(audioLibrary.memory.buffer, ptr, samples * channels * 4));
workerHandler.notify("notify-decoded-audio", {
buffer: sendBuffer.buffer,
byteLength: sendBuffer.byteLength,
byteOffset: sendBuffer.byteOffset,
clientId: client,
sampleRate: 48000,
channelCount: channels
});
} catch (error) {
console.error(error);
}
});
return {
clientId: client
}
});
workerHandler.registerMessageHandler("initialize", async () => {
await initializeAudioLib();
})
workerHandler.registerMessageHandler("enqueue-audio-packet", payload => {
audioLibrary.audio_client_enqueue_buffer(payload.clientId, new Uint8Array(payload.buffer, payload.byteOffset, payload.byteLength), payload.packetId, payload.codec);
});

View File

@ -1,96 +0,0 @@
import * as log from "tc-shared/log";
import * as aplayer from "../audio/player";
import {LogCategory} from "tc-shared/log";
import {BufferChunk, Codec, CodecClientCache} from "./Codec";
import {AudioResampler} from "../voice/AudioResampler";
class AVGCalculator {
history_size: number = 100;
history: number[] = [];
push(entry: number) {
while(this.history.length > this.history_size)
this.history.pop();
this.history.unshift(entry);
}
avg() : number {
let count = 0;
for(let entry of this.history)
count += entry;
return count / this.history.length;
}
}
export abstract class BasicCodec implements Codec {
protected _audioContext: OfflineAudioContext;
protected _decodeResampler: AudioResampler;
protected _encodeResampler: AudioResampler;
protected _codecSampleRate: number;
protected _latenz: AVGCalculator = new AVGCalculator();
on_encoded_data: (Uint8Array) => void = $ => {};
channelCount: number = 1;
samplesPerUnit: number = 960;
protected constructor(codecSampleRate: number) {
this.channelCount = 1;
this.samplesPerUnit = 960;
this._audioContext = new (window.webkitOfflineAudioContext || window.OfflineAudioContext)(aplayer.destination().channelCount, 1024, aplayer.context().sampleRate);
this._codecSampleRate = codecSampleRate;
this._decodeResampler = new AudioResampler(aplayer.context().sampleRate);
this._encodeResampler = new AudioResampler(codecSampleRate);
}
abstract name() : string;
abstract initialise() : Promise<Boolean>;
abstract initialized() : boolean;
abstract deinitialise();
abstract reset() : boolean;
protected abstract decode(data: Uint8Array) : Promise<AudioBuffer>;
protected abstract encode(data: AudioBuffer) : Promise<Uint8Array | string>;
encodeSamples(cache: CodecClientCache, pcm: AudioBuffer) {
this._encodeResampler.resample(pcm)
.catch(error => log.error(LogCategory.VOICE, tr("Could not resample PCM data for codec. Error: %o"), error))
.then(buffer => this.encodeSamples0(cache, buffer as any))
.catch(error => console.error(tr("Could not encode PCM data for codec. Error: %o"), error))
}
private encodeSamples0(cache: CodecClientCache, buffer: AudioBuffer) {
cache._chunks.push(new BufferChunk(buffer)); //TODO multi channel!
while(cache.bufferedSamples(this.samplesPerUnit) >= this.samplesPerUnit) {
let buffer = this._audioContext.createBuffer(this.channelCount, this.samplesPerUnit, this._codecSampleRate);
let index = 0;
while(index < this.samplesPerUnit) {
let buf = cache._chunks[0];
let cpyBytes = buf.copyRangeTo(buffer, this.samplesPerUnit - index, index);
index += cpyBytes;
buf.index += cpyBytes;
if(buf.index == buf.buffer.length)
cache._chunks.pop_front();
}
let encodeBegin = Date.now();
this.encode(buffer).then(result => {
if(result instanceof Uint8Array) {
let time = Date.now() - encodeBegin;
if(time > 20)
log.warn(LogCategory.VOICE, tr("Voice buffer stalled in WorkerPipe longer then expected: %d"), time);
//if(time > 20)
// chat.serverChat().appendMessage("Required decode time: " + time);
this.on_encoded_data(result);
}
else log.error(LogCategory.VOICE, "[Codec][" + this.name() + "] Could not encode buffer. Result: " + result); //TODO tr
});
}
return true;
}
decodeSamples(cache: CodecClientCache, data: Uint8Array) : Promise<AudioBuffer> {
return this.decode(data).then(buffer => this._decodeResampler.resample(buffer));
}
}

View File

@ -1,62 +0,0 @@
export interface CodecConstructor {
new (codecSampleRate: number) : Codec;
}
export enum CodecType {
OPUS_VOICE,
OPUS_MUSIC,
SPEEX_NARROWBAND,
SPEEX_WIDEBAND,
SPEEX_ULTRA_WIDEBAND,
CELT_MONO
}
export class BufferChunk {
buffer: AudioBuffer;
index: number;
constructor(buffer: AudioBuffer) {
this.buffer = buffer;
this.index = 0;
}
copyRangeTo(target: AudioBuffer, maxLength: number, offset: number) {
let copy = Math.min(this.buffer.length - this.index, maxLength);
//TODO may warning if channel counts are not queal?
for(let channel = 0; channel < Math.min(target.numberOfChannels, this.buffer.numberOfChannels); channel++) {
target.getChannelData(channel).set(
this.buffer.getChannelData(channel).subarray(this.index, this.index + copy),
offset
);
}
return copy;
}
}
export class CodecClientCache {
_chunks: BufferChunk[] = [];
bufferedSamples(max: number = 0) : number {
let value = 0;
for(let i = 0; i < this._chunks.length && value < max; i++)
value += this._chunks[i].buffer.length - this._chunks[i].index;
return value;
}
}
export interface Codec {
on_encoded_data: (Uint8Array) => void;
channelCount: number;
samplesPerUnit: number;
name() : string;
initialise();
deinitialise();
decodeSamples(cache: CodecClientCache, data: Uint8Array) : Promise<AudioBuffer>;
encodeSamples(cache: CodecClientCache, pcm: AudioBuffer);
reset() : boolean;
}

View File

@ -1,47 +0,0 @@
import {BasicCodec} from "./BasicCodec";
export class CodecRaw extends BasicCodec {
converterRaw: any;
converter: Uint8Array;
bufferSize: number = 4096 * 4;
constructor(codecSampleRate: number){
super(codecSampleRate);
}
name(): string {
return "raw";
}
initialise() : Promise<Boolean> {
this.converterRaw = Module._malloc(this.bufferSize);
this.converter = new Uint8Array(Module.HEAPU8.buffer, this.converterRaw, this.bufferSize);
return new Promise<Boolean>(resolve => resolve());
}
initialized(): boolean {
return true;
}
deinitialise() { }
protected decode(data: Uint8Array): Promise<AudioBuffer> {
return new Promise<AudioBuffer>((resolve, reject) => {
this.converter.set(data);
let buf = Module.HEAPF32.slice(this.converter.byteOffset / 4, (this.converter.byteOffset / 4) + data.length / 4);
let audioBuf = this._audioContext.createBuffer(1, data.length / 4, this._codecSampleRate);
audioBuf.copyToChannel(buf, 0);
resolve(audioBuf);
});
}
protected encode(data: AudioBuffer): Promise<Uint8Array> {
return new Promise<Uint8Array>(resolve => resolve(new Uint8Array(data.getChannelData(0))));
}
reset() : boolean { return true; }
processLatency(): number {
return 0;
}
}

View File

@ -1,92 +0,0 @@
import {CodecType} from "tc-backend/web/codec/Codec";
export type CWMessageResponse = {
type: "success";
token: string;
response: any;
timestampReceived: number;
timestampSend: number;
};
export type CWMessageErrorResponse = {
type: "error";
token: string;
error: string;
timestampReceived: number;
timestampSend: number;
}
export type CWMessageCommand<T = CWCommand | CWCommandResponse> = {
type: "command";
token: string;
command: keyof T;
payload: any;
}
export type CWMessageNotify = {
type: "notify";
}
export type CWMessage = CWMessageCommand | CWMessageErrorResponse | CWMessageResponse | CWMessageNotify;
/* from handle to worker */
export interface CWCommand {
"global-initialize": {},
"initialise": {
type: CodecType,
channelCount: number
},
"reset": {}
"finalize": {},
"decode-payload": {
buffer: ArrayBuffer;
byteLength: number;
byteOffset: number;
maxByteLength: number;
},
"encode-payload": {
buffer: ArrayBuffer;
byteLength: number;
byteOffset: number;
maxByteLength: number;
},
}
/* from worker to handle */
export interface CWCommandResponse {
"decode-payload-result": {
buffer: ArrayBuffer;
byteLength: number;
byteOffset: number;
},
"encode-payload-result": {
buffer: ArrayBuffer;
byteLength: number;
byteOffset: number;
}
}
export interface CWMessageRelations {
"decode-payload": "decode-payload-result",
"decode-payload-result": never,
"encode-payload": "encode-payload-result",
"encode-payload-result": never,
"global-initialize": void,
"initialise": void,
"reset": void,
"finalize": void
}
export type CWCommandResponseType<T extends keyof CWCommand | keyof CWCommandResponse> = CWMessageRelations[T] extends string ? CWCommandResponse[CWMessageRelations[T]] : CWMessageRelations[T];

View File

@ -1,289 +0,0 @@
import {BasicCodec} from "./BasicCodec";
import {CodecType} from "./Codec";
import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {
CWCommand,
CWCommandResponseType,
CWMessage, CWMessageCommand,
CWMessageErrorResponse,
CWMessageResponse
} from "tc-backend/web/codec/CodecWorkerMessages";
type MessageTimings = {
upstream: number;
downstream: number;
handle: number;
};
interface ExecuteResultBase {
success: boolean;
timings: MessageTimings
}
interface SuccessExecuteResult<T> extends ExecuteResultBase {
success: true;
result: T;
}
interface ErrorExecuteResult extends ExecuteResultBase {
success: false;
error: string;
}
type ExecuteResult<T = any> = SuccessExecuteResult<T> | ErrorExecuteResult;
const cachedBufferSize = 1024 * 8;
let cachedBuffers: ArrayBuffer[] = [];
function nextCachedBuffer() : ArrayBuffer {
if(cachedBuffers.length === 0) {
return new ArrayBuffer(cachedBufferSize);
}
return cachedBuffers.pop();
}
function freeCachedBuffer(buffer: ArrayBuffer) {
if(cachedBuffers.length > 32)
return;
else if(buffer.byteLength < cachedBufferSize)
return;
cachedBuffers.push(buffer);
}
export class CodecWrapperWorker extends BasicCodec {
private _worker: Worker;
private _initialized: boolean = false;
private _initialize_promise: Promise<Boolean>;
private _token_index: number = 0;
readonly type: CodecType;
private pending_executes: {[key: string]: {
timeout?: any;
timestampSend: number,
resolve: (_: ExecuteResult) => void;
}} = {};
constructor(type: CodecType) {
super(48000);
this.type = type;
switch (type) {
case CodecType.OPUS_MUSIC:
this.channelCount = 2;
break;
case CodecType.OPUS_VOICE:
this.channelCount = 1;
break;
default:
throw "invalid codec type!";
}
}
name(): string {
return "Worker for " + CodecType[this.type] + " Channels " + this.channelCount;
}
async initialise() : Promise<Boolean> {
if(this._initialized) return;
if(this._initialize_promise)
return await this._initialize_promise;
this._initialize_promise = this.spawn_worker().then(() => this.execute("initialise", {
type: this.type,
channelCount: this.channelCount,
})).then(result => {
if(result.success === true) {
this._initialized = true;
return Promise.resolve(true);
}
log.error(LogCategory.VOICE, tr("Failed to initialize codec %s: %s"), CodecType[this.type], result.error);
return Promise.reject(result.error);
});
await this._initialize_promise;
}
initialized() : boolean {
return this._initialized;
}
deinitialise() {
this.execute("finalize", {});
this._initialized = false;
this._initialize_promise = undefined;
}
async decode(data: Uint8Array): Promise<AudioBuffer> {
if(!this.initialized()) throw "codec not initialized/initialize failed";
const cachedBuffer = nextCachedBuffer();
new Uint8Array(cachedBuffer).set(data);
const result = await this.execute("decode-payload", {
byteLength: data.byteLength,
buffer: cachedBuffer,
byteOffset: 0,
maxByteLength: cachedBuffer.byteLength
}, 5000, [ cachedBuffer ]);
if(result.timings.downstream > 5 || result.timings.upstream > 5 || result.timings.handle > 5)
log.warn(LogCategory.VOICE, tr("Worker message stock time: {downstream: %dms, handle: %dms, upstream: %dms}"), result.timings.downstream, result.timings.handle, result.timings.upstream);
if(result.success === false)
throw result.error;
const chunkLength = result.result.byteLength / this.channelCount;
const audioBuffer = this._audioContext.createBuffer(this.channelCount, chunkLength / 4, this._codecSampleRate);
for(let channel = 0; channel < this.channelCount; channel++) {
const buffer = new Float32Array(result.result.buffer, result.result.byteOffset + chunkLength * channel, chunkLength / 4);
audioBuffer.copyToChannel(buffer, channel, 0);
}
freeCachedBuffer(result.result.buffer);
return audioBuffer;
}
async encode(data: AudioBuffer) : Promise<Uint8Array> {
if(!this.initialized()) throw "codec not initialized/initialize failed";
const buffer = nextCachedBuffer();
const f32Buffer = new Float32Array(buffer);
for(let channel = 0; channel < this.channelCount; channel++)
data.copyFromChannel(f32Buffer, channel, data.length * channel);
const result = await this.execute("encode-payload", { byteLength: data.length * this.channelCount * 4, buffer: buffer, byteOffset: 0, maxByteLength: buffer.byteLength });
if(result.timings.downstream > 5 || result.timings.upstream > 5)
log.warn(LogCategory.VOICE, tr("Worker message stock time: {downstream: %dms, handle: %dms, upstream: %dms}"), result.timings.downstream, result.timings.handle, result.timings.upstream);
if(result.success === false)
throw result.error;
const encodedResult = new Uint8Array(result.result.buffer, result.result.byteOffset, result.result.byteLength).slice(0);
freeCachedBuffer(result.result.buffer);
return encodedResult;
}
reset() : boolean {
//TODO: Await result!
this.execute("reset", {});
return true;
}
private handleWorkerMessage(message: CWMessage) {
if(message.type === "notify") {
log.warn(LogCategory.VOICE, tr("Received unknown notify from worker."));
return;
} else if(message.type === "error") {
const request = this.pending_executes[message.token];
if(typeof request !== "object") {
log.warn(LogCategory.VOICE, tr("Received worker execute error for unknown token (%s)"), message.token);
return;
}
delete this.pending_executes[message.token];
clearTimeout(request.timeout);
const eresponse = message as CWMessageErrorResponse;
request.resolve({
success: false,
timings: {
downstream: eresponse.timestampReceived - request.timestampSend,
handle: eresponse.timestampSend - eresponse.timestampReceived,
upstream: Date.now() - eresponse.timestampSend
},
error: eresponse.error
});
} else if(message.type === "success") {
const request = this.pending_executes[message.token];
if(typeof request !== "object") {
log.warn(LogCategory.VOICE, tr("Received worker execute result for unknown token (%s)"), message.token);
return;
}
delete this.pending_executes[message.token];
clearTimeout(request.timeout);
const response = message as CWMessageResponse;
request.resolve({
success: true,
timings: {
downstream: response.timestampReceived - request.timestampSend,
handle: response.timestampSend - response.timestampReceived,
upstream: Date.now() - response.timestampSend
},
result: response.response
});
} else if(message.type === "command") {
log.warn(LogCategory.VOICE, tr("Received command %s from voice worker. This should never happen!"), (message as CWMessageCommand).command);
return;
} else {
log.warn(LogCategory.VOICE, tr("Received unknown message of type %s from voice worker. This should never happen!"), (message as any).type);
return;
}
}
private handleWorkerError() {
log.debug(LogCategory.VOICE, tr("Received error from codec worker. Closing worker."));
for(const token of Object.keys(this.pending_executes)) {
this.pending_executes[token].resolve({
success: false,
error: tr("worker terminated with an error"),
timings: { downstream: 0, handle: 0, upstream: 0}
});
delete this.pending_executes[token];
}
this._worker = undefined;
}
private execute<T extends keyof CWCommand>(command: T, data: CWCommand[T], timeout?: number, transfer?: Transferable[]) : Promise<ExecuteResult<CWCommandResponseType<T>>> {
return new Promise<ExecuteResult>(resolve => {
if(!this._worker) {
resolve({
success: false,
error: tr("worker does not exists"),
timings: {
downstream: 0,
handle: 0,
upstream: 0
}
});
return;
}
const token = this._token_index++ + "_token";
this.pending_executes[token] = {
timeout: typeof timeout === "number" ? setTimeout(() => {
delete this.pending_executes[token];
resolve({
success: false,
error: tr("command timed out"),
timings: { upstream: 0, handle: 0, downstream: 0 }
})
}, timeout) : undefined,
resolve: resolve,
timestampSend: Date.now()
};
this._worker.postMessage({
command: command,
type: "command",
payload: data,
token: token
} as CWMessageCommand, transfer);
});
}
private async spawn_worker() : Promise<void> {
this._worker = new Worker("tc-backend/web/workers/codec", { type: "module" });
this._worker.onmessage = event => this.handleWorkerMessage(event.data);
this._worker.onerror = () => this.handleWorkerError();
const result = await this.execute("global-initialize", {}, 15000);
if(result.success === false)
throw result.error;
}
}

View File

@ -2,6 +2,8 @@ import "webrtc-adapter";
import "./index.scss";
import "./FileTransfer";
import "./audio-lib";
import "./hooks/ServerConnection";
import "./hooks/ExternalModal";
import "./hooks/AudioRecorder";

View File

@ -1,47 +1,55 @@
import {LogCategory} from "tc-shared/log";
import * as log from "tc-shared/log";
import {LogCategory, logWarn} from "tc-shared/log";
const OfflineAudioContext = window.webkitOfflineAudioContext || window.OfflineAudioContext;
export class AudioResampler {
readonly targetSampleRate: number;
private _use_promise: boolean;
private readonly targetSampleRate: number;
private readonly isPromiseResponse: boolean;
constructor(targetSampleRate: number){
this.targetSampleRate = targetSampleRate;
if(this.targetSampleRate < 3000 || this.targetSampleRate > 384000) throw tr("The target sample rate is outside the range [3000, 384000].");
this.isPromiseResponse = navigator.browserSpecs.name != 'Safari';
if(this.targetSampleRate < 3000 || this.targetSampleRate > 384000) {
throw tr("The target sample rate is outside the range [3000, 384000].");
}
}
resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
getTargetSampleRate() : number {
return this.targetSampleRate;
}
async resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
if(!buffer) {
log.warn(LogCategory.AUDIO, tr("Received empty buffer as input! Returning empty output!"));
return Promise.resolve(buffer);
logWarn(LogCategory.AUDIO, tr("Received empty buffer as input! Returning empty output!"));
return buffer;
}
if(buffer.sampleRate == this.targetSampleRate)
return Promise.resolve(buffer);
if(buffer.sampleRate == this.targetSampleRate) {
return buffer;
}
let context;
context = new (window.webkitOfflineAudioContext || window.OfflineAudioContext)(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
const context = new OfflineAudioContext(
buffer.numberOfChannels,
Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate),
this.targetSampleRate
);
let source = context.createBufferSource();
source.buffer = buffer;
source.start(0);
source.connect(context.destination);
source.start(0);
if(typeof(this._use_promise) === "undefined") {
this._use_promise = navigator.browserSpecs.name != 'Safari';
}
if(this._use_promise)
return context.startRendering();
else {
return new Promise<AudioBuffer>((resolve, reject) => {
if(this.isPromiseResponse) {
return await context.startRendering();
} else {
return await new Promise<AudioBuffer>((resolve, reject) => {
context.oncomplete = event => resolve(event.renderedBuffer);
try {
context.startRendering();
} catch (ex) {
reject(ex);
}
})
});
}
}
}

View File

@ -1,139 +0,0 @@
import * as loader from "tc-loader";
import * as aplayer from "tc-backend/web/audio/player";
import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
import {CodecType} from "tc-backend/web/codec/Codec";
import {VoiceConnection} from "tc-backend/web/voice/VoiceHandler";
import {BasicCodec} from "tc-backend/web/codec/BasicCodec";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
import {CodecWrapperWorker} from "tc-backend/web/codec/CodecWrapperWorker";
class CacheEntry {
instance: BasicCodec;
owner: number;
last_access: number;
}
export function codec_supported(type: CodecType) {
return type == CodecType.OPUS_MUSIC || type == CodecType.OPUS_VOICE;
}
export class CodecPool {
codecIndex: number;
name: string;
type: CodecType;
entries: CacheEntry[] = [];
maxInstances: number = 2;
private _supported: boolean = true;
initialize(cached: number) {
/* test if we're able to use this codec */
const dummy_client_id = 0xFFEF;
this.ownCodec(dummy_client_id, _ => {}).then(codec => {
log.trace(LogCategory.VOICE, tr("Releasing codec instance (%o)"), codec);
this.releaseCodec(dummy_client_id);
}).catch(error => {
if(this._supported) {
log.warn(LogCategory.VOICE, tr("Disabling codec support for "), this.name);
createErrorModal(tr("Could not load codec driver"), tr("Could not load or initialize codec ") + this.name + "<br>" +
"Error: <code>" + JSON.stringify(error) + "</code>").open();
log.error(LogCategory.VOICE, tr("Failed to initialize the opus codec. Error: %o"), error);
} else {
log.debug(LogCategory.VOICE, tr("Failed to initialize already disabled codec. Error: %o"), error);
}
this._supported = false;
});
}
supported() { return this._supported; }
ownCodec?(clientId: number, callback_encoded: (buffer: Uint8Array) => any, create: boolean = true) : Promise<BasicCodec | undefined> {
return new Promise<BasicCodec>((resolve, reject) => {
if(!this._supported) {
reject(tr("unsupported codec!"));
return;
}
let free_slot = 0;
for(let index = 0; index < this.entries.length; index++) {
if(this.entries[index].owner == clientId) {
this.entries[index].last_access = Date.now();
if(this.entries[index].instance.initialized())
resolve(this.entries[index].instance);
else {
this.entries[index].instance.initialise().then((flag) => {
//TODO test success flag
this.ownCodec(clientId, callback_encoded, false).then(resolve).catch(reject);
}).catch(reject);
}
return;
} else if(this.entries[index].owner == 0) {
free_slot = index;
}
}
if(!create) {
resolve(undefined);
return;
}
if(free_slot == 0){
free_slot = this.entries.length;
let entry = new CacheEntry();
entry.instance = new CodecWrapperWorker(this.type);
this.entries.push(entry);
}
this.entries[free_slot].owner = clientId;
this.entries[free_slot].last_access = new Date().getTime();
this.entries[free_slot].instance.on_encoded_data = callback_encoded;
if(this.entries[free_slot].instance.initialized())
this.entries[free_slot].instance.reset();
else {
this.ownCodec(clientId, callback_encoded, false).then(resolve).catch(reject);
return;
}
resolve(this.entries[free_slot].instance);
});
}
releaseCodec(clientId: number) {
for(let index = 0; index < this.entries.length; index++)
if(this.entries[index].owner == clientId) this.entries[index].owner = 0;
}
constructor(index: number, name: string, type: CodecType){
this.codecIndex = index;
this.name = name;
this.type = type;
this._supported = this.type !== undefined && codec_supported(this.type);
}
}
export let codecPool: CodecPool[];
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
priority: 10,
function: async () => {
aplayer.on_ready(() => {
log.info(LogCategory.VOICE, tr("Initializing voice handler after AudioController has been initialized!"));
codecPool = [
new CodecPool(0, tr("Speex Narrowband"), CodecType.SPEEX_NARROWBAND),
new CodecPool(1, tr("Speex Wideband"), CodecType.SPEEX_WIDEBAND),
new CodecPool(2, tr("Speex Ultra Wideband"), CodecType.SPEEX_ULTRA_WIDEBAND),
new CodecPool(3, tr("CELT Mono"), CodecType.CELT_MONO),
new CodecPool(4, tr("Opus Voice"), CodecType.OPUS_VOICE),
new CodecPool(5, tr("Opus Music"), CodecType.OPUS_MUSIC)
];
codecPool[4].initialize(2);
codecPool[5].initialize(2);
});
},
name: "registering codec initialisation"
});

View File

@ -1,8 +1,10 @@
import {CodecClientCache} from "../codec/Codec";
import * as aplayer from "../audio/player";
import {LogCategory} from "tc-shared/log";
import * as log from "tc-shared/log";
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
import {LatencySettings, PlayerState, VoiceClient} from "tc-shared/connection/VoiceConnection";
import {AudioResampler} from "tc-backend/web/voice/AudioResampler";
import {AudioClient} from "tc-backend/web/audio-lib/AudioClient";
import {getAudioLibrary} from "tc-backend/web/audio-lib";
import {VoicePacket} from "tc-backend/web/voice/bridge/VoiceBridge";
export class VoiceClientController implements VoiceClient {
callback_playback: () => any;
@ -10,227 +12,281 @@ export class VoiceClientController implements VoiceClient {
callback_stopped: () => any;
client_id: number;
speakerContext: AudioContext;
private _player_state: PlayerState = PlayerState.STOPPED;
private _codecCache: CodecClientCache[] = [];
private speakerContext: AudioContext;
private gainNode: GainNode;
private _time_index: number = 0;
private _latency_buffer_length: number = 3;
private _buffer_timeout: number;
private playerState: PlayerState = PlayerState.STOPPED;
private _buffered_samples: AudioBuffer[] = [];
private _playing_nodes: AudioBufferSourceNode[] = [];
private currentPlaybackTime: number = 0;
private bufferTimeout: number;
private _volume: number = 1;
allowBuffering: boolean = true;
private bufferQueueTime: number = 0;
private bufferQueue: AudioBuffer[] = [];
private playingNodes: AudioBufferSourceNode[] = [];
private currentVolume: number = 1;
private latencySettings: LatencySettings;
private audioInitializePromise: Promise<void>;
private audioClient: AudioClient;
private resampler: AudioResampler;
constructor(client_id: number) {
this.client_id = client_id;
this.reset_latency_settings();
aplayer.on_ready(() => this.speakerContext = aplayer.context());
this.resampler = new AudioResampler(48000);
aplayer.on_ready(() => {
this.speakerContext = aplayer.context();
this.gainNode = aplayer.context().createGain();
this.gainNode.connect(this.speakerContext.destination);
this.gainNode.gain.value = this.currentVolume;
});
}
public initialize() { }
private initializeAudio() : Promise<void> {
if(this.audioInitializePromise) {
return this.audioInitializePromise;
}
public close(){ }
this.audioInitializePromise = (async () => {
this.audioClient = await getAudioLibrary().createClient();
this.audioClient.callback_decoded = buffer => {
this.resampler.resample(buffer).then(buffer => {
this.playbackAudioBuffer(buffer);
});
}
this.audioClient.callback_ended = () => {
this.stopAudio(false);
};
})();
return this.audioInitializePromise;
}
playback_buffer(buffer: AudioBuffer) {
public enqueuePacket(packet: VoicePacket) {
if(!this.audioClient && packet.payload.length === 0) {
return;
} else {
this.initializeAudio().then(() => {
if(!this.audioClient) {
/* we've already been destroyed */
return;
}
this.audioClient.enqueueBuffer(packet.payload, packet.voiceId, packet.codec);
});
}
}
public destroy() {
this.audioClient?.destroy();
this.audioClient = undefined;
}
playbackAudioBuffer(buffer: AudioBuffer) {
if(!buffer) {
log.warn(LogCategory.VOICE, tr("[AudioController] Got empty or undefined buffer! Dropping it"));
logWarn(LogCategory.VOICE, tr("[AudioController] Got empty or undefined buffer! Dropping it"));
return;
}
if(!this.speakerContext) {
log.warn(LogCategory.VOICE, tr("[AudioController] Failed to replay audio. Global audio context not initialized yet!"));
logWarn(LogCategory.VOICE, tr("[AudioController] Failed to replay audio. Global audio context not initialized yet!"));
return;
}
if (buffer.sampleRate != this.speakerContext.sampleRate)
log.warn(LogCategory.VOICE, tr("[AudioController] Source sample rate isn't equal to playback sample rate! (%o | %o)"), buffer.sampleRate, this.speakerContext.sampleRate);
this.apply_volume_to_buffer(buffer);
this._buffered_samples.push(buffer);
if(this._player_state == PlayerState.STOPPED || this._player_state == PlayerState.STOPPING) {
log.info(LogCategory.VOICE, tr("[Audio] Starting new playback"));
this.set_state(PlayerState.PREBUFFERING);
if (buffer.sampleRate != this.speakerContext.sampleRate) {
logWarn(LogCategory.VOICE, tr("[AudioController] Source sample rate isn't equal to playback sample rate! (%o | %o)"), buffer.sampleRate, this.speakerContext.sampleRate);
}
switch (this._player_state) {
case PlayerState.PREBUFFERING:
case PlayerState.BUFFERING:
this.reset_buffer_timeout(true); //Reset timeout, we got a new buffer
if(this._buffered_samples.length <= this._latency_buffer_length) {
if(this._player_state == PlayerState.BUFFERING) {
if(this.allowBuffering)
break;
} else
break;
if(this.playerState == PlayerState.STOPPED || this.playerState == PlayerState.STOPPING) {
logDebug(LogCategory.VOICE, tr("[Audio] Starting new playback"));
this.setPlayerState(PlayerState.PREBUFFERING);
}
if(this._player_state == PlayerState.PREBUFFERING) {
log.info(LogCategory.VOICE, tr("[Audio] Prebuffering succeeded (Replaying now)"));
if(this.callback_playback)
if(this.playerState === PlayerState.PREBUFFERING || this.playerState === PlayerState.BUFFERING) {
this.resetBufferTimeout(true);
this.bufferQueue.push(buffer);
this.bufferQueueTime += buffer.duration;
if(this.bufferQueueTime <= this.latencySettings.min_buffer / 1000) {
return;
}
/* finished buffering */
if(this.playerState == PlayerState.PREBUFFERING) {
logDebug(LogCategory.VOICE, tr("[Audio] Prebuffering succeeded (Replaying now)"));
if(this.callback_playback) {
this.callback_playback();
} else if(this.allowBuffering) {
log.info(LogCategory.VOICE, tr("[Audio] Buffering succeeded (Replaying now)"));
}
this._player_state = PlayerState.PLAYING;
case PlayerState.PLAYING:
this.replay_queue();
break;
default:
break;
} else {
logDebug(LogCategory.VOICE, tr("[Audio] Buffering succeeded (Replaying now)"));
}
this.replayBufferQueue();
this.setPlayerState(PlayerState.PLAYING);
} else if(this.playerState === PlayerState.PLAYING) {
const latency = this.getCurrentPlaybackLatency();
if(latency > (this.latencySettings.max_buffer / 1000)) {
logWarn(LogCategory.VOICE, tr("Dropping replay buffer for client %d because of too high replay latency. (Current: %f, Max: %f)"),
this.client_id, latency.toFixed(3), (this.latencySettings.max_buffer / 1000).toFixed(3));
return;
}
this.enqueueBufferForPayback(buffer);
} else {
logError(LogCategory.AUDIO, tr("This block should be unreachable!"));
return;
}
}
private replay_queue() {
let buffer: AudioBuffer;
while((buffer = this._buffered_samples.pop_front())) {
if(this._playing_nodes.length >= this._latency_buffer_length * 1.5 + 3) {
log.info(LogCategory.VOICE, tr("Dropping buffer because playing queue grows to much"));
continue; /* drop the data (we're behind) */
getCurrentPlaybackLatency() {
return Math.max(this.currentPlaybackTime - this.speakerContext.currentTime, 0);
}
if(this._time_index < this.speakerContext.currentTime)
this._time_index = this.speakerContext.currentTime;
stopAudio(abortPlayback: boolean) {
if(abortPlayback) {
this.setPlayerState(PlayerState.STOPPED);
this.flush();
if(this.callback_stopped) {
this.callback_stopped();
}
} else {
this.setPlayerState(PlayerState.STOPPING);
/* replay all pending buffers */
this.replayBufferQueue();
/* test if there are any buffers which are currently played, if not the state will change to stopped */
this.testReplayState();
}
}
private replayBufferQueue() {
for(const buffer of this.bufferQueue)
this.enqueueBufferForPayback(buffer);
this.bufferQueue = [];
this.bufferQueueTime = 0;
}
private enqueueBufferForPayback(buffer: AudioBuffer) {
/* advance the playback time index, we seem to be behind a bit */
if(this.currentPlaybackTime < this.speakerContext.currentTime)
this.currentPlaybackTime = this.speakerContext.currentTime;
const player = this.speakerContext.createBufferSource();
player.buffer = buffer;
player.onended = () => this.on_buffer_replay_finished(player);
this._playing_nodes.push(player);
player.onended = () => this.handleBufferPlaybackEnded(player);
this.playingNodes.push(player);
player.connect(aplayer.destination());
player.start(this._time_index);
this._time_index += buffer.duration;
}
player.connect(this.gainNode);
player.start(this.currentPlaybackTime);
this.currentPlaybackTime += buffer.duration;
}
private on_buffer_replay_finished(node: AudioBufferSourceNode) {
this._playing_nodes.remove(node);
this.test_buffer_queue();
private handleBufferPlaybackEnded(node: AudioBufferSourceNode) {
this.playingNodes.remove(node);
this.testReplayState();
}
stopAudio(now: boolean = false) {
this._player_state = PlayerState.STOPPING;
if(now) {
this._player_state = PlayerState.STOPPED;
this._buffered_samples = [];
for(const entry of this._playing_nodes)
entry.stop(0);
this._playing_nodes = [];
if(this.callback_stopped)
this.callback_stopped();
} else {
this.test_buffer_queue(); /* test if we're not already done */
this.replay_queue(); /* flush the queue */
}
private testReplayState() {
if(this.bufferQueue.length > 0 || this.playingNodes.length > 0) {
return;
}
private test_buffer_queue() {
if(this._buffered_samples.length == 0 && this._playing_nodes.length == 0) {
if(this._player_state != PlayerState.STOPPING && this._player_state != PlayerState.STOPPED) {
if(this._player_state == PlayerState.BUFFERING)
return; //We're already buffering
this._player_state = PlayerState.BUFFERING;
if(!this.allowBuffering)
log.warn(LogCategory.VOICE, tr("[Audio] Detected a buffer underflow!"));
this.reset_buffer_timeout(true);
} else {
this._player_state = PlayerState.STOPPED;
if(this.callback_stopped)
if(this.playerState === PlayerState.STOPPING) {
/* All buffers have been replayed successfully */
this.setPlayerState(PlayerState.STOPPED);
if(this.callback_stopped) {
this.callback_stopped();
}
} else if(this.playerState === PlayerState.PLAYING) {
logDebug(LogCategory.VOICE, tr("Client %d has a buffer underflow. Changing state to buffering."), this.client_id);
this.setPlayerState(PlayerState.BUFFERING);
}
}
private reset_buffer_timeout(restart: boolean) {
if(this._buffer_timeout)
clearTimeout(this._buffer_timeout);
/***
* Schedule a new buffer timeout.
* The buffer timeout is used to playback even small amounts of audio, which are less than the min. buffer size.
* @param scheduleNewTimeout
* @private
*/
private resetBufferTimeout(scheduleNewTimeout: boolean) {
clearTimeout(this.bufferTimeout);
if(restart)
this._buffer_timeout = setTimeout(() => {
if(this._player_state == PlayerState.PREBUFFERING || this._player_state == PlayerState.BUFFERING) {
log.warn(LogCategory.VOICE, tr("[Audio] Buffering exceeded timeout. Flushing and stopping replay"));
this.stopAudio();
if(scheduleNewTimeout) {
this.bufferTimeout = setTimeout(() => {
if(this.playerState == PlayerState.PREBUFFERING || this.playerState == PlayerState.BUFFERING) {
logWarn(LogCategory.VOICE, tr("[Audio] Buffering exceeded timeout. Flushing and stopping replay."));
this.stopAudio(false);
}
this._buffer_timeout = undefined;
this.bufferTimeout = undefined;
}, 1000);
}
}
private apply_volume_to_buffer(buffer: AudioBuffer) {
if(this._volume == 1)
private setPlayerState(state: PlayerState) {
if(this.playerState === state) {
return;
for(let channel = 0; channel < buffer.numberOfChannels; channel++) {
let data = buffer.getChannelData(channel);
for(let sample = 0; sample < data.length; sample++) {
let lane = data[sample];
lane *= this._volume;
data[sample] = lane;
}
}
}
private set_state(state: PlayerState) {
if(this._player_state == state)
return;
this._player_state = state;
if(this.callback_state_changed)
this.callback_state_changed(this._player_state);
this.playerState = state;
if(this.callback_state_changed) {
this.callback_state_changed(this.playerState);
}
get_codec_cache(codec: number) : CodecClientCache {
while(this._codecCache.length <= codec)
this._codecCache.push(new CodecClientCache());
return this._codecCache[codec];
}
get_state(): PlayerState {
return this._player_state;
return this.playerState;
}
get_volume(): number {
return this._volume;
return this.currentVolume;
}
set_volume(volume: number): void {
if(this._volume == volume)
if(this.currentVolume == volume)
return;
this._volume = volume;
/* apply the volume to all other buffers */
for(const buffer of this._buffered_samples)
this.apply_volume_to_buffer(buffer);
this.currentVolume = volume;
if(this.gainNode) {
this.gainNode.gain.value = volume;
}
}
abort_replay() {
this.stopAudio(true);
}
latency_settings(settings?: LatencySettings): LatencySettings {
throw "not supported";
}
reset_latency_settings() {
throw "not supported";
}
support_latency_settings(): boolean {
return false;
}
support_flush(): boolean {
return false;
return true;
}
flush() {
throw "not supported";
this.bufferQueue = [];
this.bufferQueueTime = 0;
for(const entry of this.playingNodes) {
entry.stop(0);
}
this.playingNodes = [];
}
latency_settings(settings?: LatencySettings): LatencySettings {
if(typeof settings !== "undefined") {
this.latencySettings = settings;
}
return this.latencySettings;
}
reset_latency_settings() {
this.latencySettings = {
min_buffer: 60,
max_buffer: 400
};
}
support_latency_settings(): boolean {
return true;
}
}

View File

@ -12,7 +12,6 @@ import {
VoiceConnectionStatus,
WhisperSessionInitializer
} from "tc-shared/connection/VoiceConnection";
import {codecPool} from "./CodecConverter";
import {createErrorModal} from "tc-shared/ui/elements/Modal";
import {ServerConnectionEvents} from "tc-shared/connection/ConnectionBase";
import {ConnectionState} from "tc-shared/ConnectionHandler";
@ -33,10 +32,6 @@ const KEY_VOICE_CONNECTION_TYPE: ValuedSettingsKey<number> = {
};
export class VoiceConnection extends AbstractVoiceConnection {
static codecSupported(type: number) : boolean {
return !!codecPool && codecPool.length > type && codecPool[type].supported();
}
readonly connection: ServerConnection;
private readonly serverConnectionStateListener;
@ -240,26 +235,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
return;
}
let codec_pool = codecPool[packet.codec];
if(!codec_pool) {
log.error(LogCategory.VOICE, tr("Could not playback codec %o"), packet.codec);
return;
}
if(packet.payload.length == 0) {
client.stopAudio();
codec_pool.releaseCodec(packet.clientId);
} else {
codec_pool.ownCodec(packet.clientId, () => {
logWarn(LogCategory.VOICE, tr("Received an encoded voice packet even thou we're only decoding!"));
}, true)
.then(decoder => decoder.decodeSamples(client.get_codec_cache(packet.codec), packet.payload))
.then(buffer => client.playback_buffer(buffer)).catch(error => {
log.error(LogCategory.VOICE, tr("Could not playback client's (%o) audio (%o)"), packet.clientId, error);
if(error instanceof Error)
log.error(LogCategory.VOICE, error.stack);
});
}
client.enqueuePacket(packet);
}
private handleRecorderStop() {
@ -335,6 +311,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
if(!(client instanceof VoiceClientController))
throw "Invalid client type";
client.destroy();
this.voiceClients.remove(client);
return Promise.resolve();
}
@ -346,11 +323,11 @@ export class VoiceConnection extends AbstractVoiceConnection {
}
decodingSupported(codec: number): boolean {
return VoiceConnection.codecSupported(codec);
return codec >= 4 && codec <= 5;
}
encodingSupported(codec: number): boolean {
return VoiceConnection.codecSupported(codec);
return codec >= 4 && codec <= 5;
}
getEncoderCodec(): number {

View File

@ -1,183 +0,0 @@
import {CodecType} from "tc-backend/web/codec/Codec";
import {
CWMessageCommand,
CWCommand,
CWMessage,
CWMessageResponse,
CWMessageErrorResponse, CWCommandResponseType
} from "tc-backend/web/codec/CodecWorkerMessages";
const prefix = "[CodecWorker] ";
export interface CodecWorker {
name();
initialise?() : string;
deinitialise();
decode(buffer: Uint8Array, responseBuffer: (length: number) => Uint8Array) : number | string;
encode(buffer: Uint8Array, responseBuffer: (length: number) => Uint8Array) : number | string;
reset();
}
let supported_types = {};
export function register_codec(type: CodecType, allocator: (options?: any) => Promise<CodecWorker>) {
supported_types[type] = allocator;
}
let initialize_callback: () => Promise<true | string>;
export function set_initialize_callback(callback: () => Promise<true | string>) {
initialize_callback = callback;
}
export let codec_instance: CodecWorker;
let globally_initialized = false;
let global_initialize_result;
let commandTransferableResponse: Transferable[];
let messageHandlers: { [T in keyof CWCommand]: (message: CWCommand[T]) => Promise<CWCommandResponseType<T>> } = {} as any;
function registerCommandHandler<T extends keyof CWCommand>(command: T, callback: (message: CWCommand[T]) => Promise<CWCommandResponseType<T>>) {
messageHandlers[command as any] = callback;
}
const handleOwnerMessage = (e: MessageEvent) => {
const timestampReceived = Date.now();
const message = e.data as CWMessage;
if(message.type === "error" || message.type === "success") {
console.warn("%sReceived a command response within the worker. We're not sending any commands so this should not happen!", prefix);
return;
} else if(message.type === "notify") {
console.warn("%sReceived a notify within the worker. This should not happen!", prefix);
return;
} else if(message.type === "command") {
const command = message as CWMessageCommand;
const sendExecuteError = error => {
let errorMessage;
if(typeof error === "string") {
errorMessage = error;
} else if(error instanceof Error) {
console.error("%sMessage handle error: %o", prefix, error);
errorMessage = error.message;
} else {
console.error("%sMessage handle error: %o", prefix, error);
errorMessage = "lookup the console";
}
postMessage({
type: "error",
error: errorMessage,
timestampReceived: timestampReceived,
timestampSend: Date.now(),
token: command.token
} as CWMessageErrorResponse, undefined, commandTransferableResponse);
};
const sendExecuteResult = result => {
postMessage({
type: "success",
response: result,
timestampReceived: timestampReceived,
timestampSend: Date.now(),
token: command.token
} as CWMessageResponse, undefined);
};
const handler = messageHandlers[message.command as any];
if(!handler) {
sendExecuteError("unknown command");
return;
}
handler(command.payload).then(sendExecuteResult).catch(sendExecuteError);
}
};
addEventListener("message", handleOwnerMessage);
/* command handlers */
registerCommandHandler("global-initialize", async () => {
const init_result = globally_initialized ? global_initialize_result : await initialize_callback();
globally_initialized = true;
if(typeof init_result === "string")
throw init_result;
});
registerCommandHandler("initialise", async data => {
console.log(prefix + "Initialize codec worker for codec %s", CodecType[data.type as CodecType]);
if(!supported_types[data.type])
throw "type unsupported";
try {
codec_instance = await supported_types[data.type](data);
} catch(ex) {
console.error("%sFailed to allocate codec: %o", prefix, ex);
throw typeof ex === "string" ? ex : "failed to allocate codec";
}
const error = codec_instance.initialise();
if(error)
throw error;
});
registerCommandHandler("reset", async () => {
codec_instance.reset();
});
registerCommandHandler("finalize", async () => {
/* memory will be cleaned up by its own */
});
let responseBuffer: Uint8Array;
const popResponseBuffer = () => { const temp = responseBuffer; responseBuffer = undefined; return temp; }
registerCommandHandler("decode-payload", async data => {
if(!codec_instance)
throw "codec not initialized/initialize failed";
const byteLength = codec_instance.decode(new Uint8Array(data.buffer, data.byteOffset, data.byteLength), length => {
if(length > data.maxByteLength)
throw "source buffer too small to hold the result";
//return responseBuffer = new Uint8Array(length);
return responseBuffer = new Uint8Array(data.buffer, 0, data.maxByteLength);
});
const buffer = popResponseBuffer();
if(typeof byteLength === "string") {
throw byteLength;
}
commandTransferableResponse = [buffer.buffer];
return {
buffer: buffer.buffer,
byteLength: byteLength,
byteOffset: 0,
};
});
registerCommandHandler("encode-payload", async data => {
if(!codec_instance)
throw "codec not initialized/initialize failed";
const byteLength = codec_instance.encode(new Uint8Array(data.buffer, data.byteOffset, data.byteLength), length => {
if(length > data.maxByteLength)
throw "source buffer too small to hold the result";
//return responseBuffer = new Uint8Array(length);
return responseBuffer = new Uint8Array(data.buffer, 0, data.maxByteLength);
});
const buffer = popResponseBuffer();
if(typeof byteLength === "string") {
throw byteLength;
}
commandTransferableResponse = [buffer.buffer];
return {
buffer: buffer.buffer,
byteLength: byteLength,
byteOffset: 0
};
});

View File

@ -1,222 +0,0 @@
import * as cworker from "./CodecWorker";
import {CodecType} from "tc-backend/web/codec/Codec";
import {CodecWorker} from "./CodecWorker";
const WASM_ERROR_MESSAGES = [
'no native wasm support detected'
];
interface OpusModuleType extends EmscriptenModule {
cwrap: typeof cwrap;
}
let OpusModule = {} as OpusModuleType;
const runtimeInitializedPromise = new Promise((resolve, reject) => {
const cleanup = () => {
OpusModule['onRuntimeInitialized'] = undefined;
OpusModule['onAbort'] = undefined;
};
OpusModule['onRuntimeInitialized'] = () => {
cleanup();
resolve();
};
OpusModule['onAbort'] = error => {
cleanup();
let message;
if(error instanceof DOMException)
message = "DOMException (" + error.name + "): " + error.code + " => " + error.message;
else if(error instanceof Error) {
message = error.message;
} else {
message = error;
}
reject(message);
}
});
OpusModule['print'] = function() {
const message = arguments[0] as string;
if(message.startsWith("CompileError: WebAssembly.instantiate(): ")) {
/* Compile errors also get printed to error stream so no need to log them here */
return;
}
console.log(...arguments);
};
OpusModule['printErr'] = function() {
const message = arguments[0] as string;
if(message.startsWith("wasm streaming compile failed: ")) {
const error_message = message.substr(31);
if(error_message.startsWith("TypeError: Failed to execute 'compile' on 'WebAssembly': ")) {
console.warn("Failed to compile opus native code: %s", error_message.substr(57));
} else {
console.warn("Failed to prepare opus native code asynchronously: %s", error_message);
}
return;
} else if(message === "falling back to ArrayBuffer instantiation") {
/*
We suppress this message, because it comes directly after "wasm streaming compile failed:".
So if we want to print multiple lines we just have to edit the lines above.
*/
return;
} else if(message.startsWith("failed to asynchronously prepare wasm:")) {
/*
Will be handled via abort
*/
return;
} else if(message.startsWith("CompileError: WebAssembly.instantiate():")) {
/*
Will be handled via abort already
*/
return;
}
for(const suppress of WASM_ERROR_MESSAGES)
if((arguments[0] as string).indexOf(suppress) != -1)
return;
console.error(...arguments);
};
self.addEventListener("unhandledrejection", event => {
let message;
if(event.reason instanceof Error) {
if(event.reason.name !== "RuntimeError")
return;
else
message = event.reason.message;
} else if(typeof event.reason === "string") {
message = event.reason;
} else {
return;
}
if(message.startsWith("abort(CompileError: WebAssembly.instantiate():")) {
/*
We already handled that error via the Module['printErr'] callback.
*/
event.preventDefault();
return;
}
});
enum OpusType {
VOIP = 2048,
AUDIO = 2049,
RESTRICTED_LOWDELAY = 2051
}
const OPUS_ERROR_CODES = [
"One or more invalid/out of range arguments", //-1 (OPUS_BAD_ARG)
"Not enough bytes allocated in the target buffer", //-2 (OPUS_BUFFER_TOO_SMALL)
"An internal error was detected", //-3 (OPUS_INTERNAL_ERROR)
"The compressed data passed is corrupted", //-4 (OPUS_INVALID_PACKET)
"Invalid/unsupported request number", //-5 (OPUS_UNIMPLEMENTED)
"An encoder or decoder structure is invalid or already freed", //-6 (OPUS_INVALID_STATE)
"Memory allocation has failed" //-7 (OPUS_ALLOC_FAIL)
];
class OpusWorker implements CodecWorker {
private static readonly kProcessBufferSize = 4096 * 2;
private readonly channelCount: number;
private readonly type: OpusType;
private nativeHandle: any;
private fn_newHandle: any;
private fn_decode: any;
private fn_encode: any;
private fn_reset: any;
private nativeBufferPtr: number;
private processBuffer: Uint8Array;
constructor(channelCount: number, type: OpusType) {
this.channelCount = channelCount;
this.type = type;
}
name(): string {
return "Opus (Type: " + OpusWorker[this.type] + " Channels: " + this.channelCount + ")";
}
initialise?() : string {
this.fn_newHandle = OpusModule.cwrap("codec_opus_createNativeHandle", "number", ["number", "number"]);
this.fn_decode = OpusModule.cwrap("codec_opus_decode", "number", ["number", "number", "number", "number"]);
this.fn_encode = OpusModule.cwrap("codec_opus_encode", "number", ["number", "number", "number", "number"]);
this.fn_reset = OpusModule.cwrap("codec_opus_reset", "number", ["number"]);
this.nativeHandle = this.fn_newHandle(this.channelCount, this.type);
this.nativeBufferPtr = OpusModule._malloc(OpusWorker.kProcessBufferSize);
this.processBuffer = new Uint8Array(OpusModule.HEAPU8.buffer, this.nativeBufferPtr, OpusWorker.kProcessBufferSize);
return undefined;
}
deinitialise() { } //TODO
decode(buffer: Uint8Array, responseBuffer: (length: number) => Uint8Array): number | string {
if (buffer.byteLength > this.processBuffer.byteLength)
return "supplied data exceeds internal buffer";
this.processBuffer.set(buffer);
let result = this.fn_decode(this.nativeHandle, this.processBuffer.byteOffset, buffer.byteLength, this.processBuffer.byteLength);
if (result < 0) return OPUS_ERROR_CODES[-result] || "unknown decode error " + result;
const resultByteLength = result * this.channelCount * 4;
const resultBuffer = responseBuffer(resultByteLength);
resultBuffer.set(this.processBuffer.subarray(0, resultByteLength), 0);
return resultByteLength;
}
encode(buffer: Uint8Array, responseBuffer: (length: number) => Uint8Array): number | string {
if (buffer.byteLength > this.processBuffer.byteLength)
return "supplied data exceeds internal buffer";
this.processBuffer.set(buffer);
let result = this.fn_encode(this.nativeHandle, this.processBuffer.byteOffset, buffer.byteLength, this.processBuffer.byteLength);
if (result < 0) return OPUS_ERROR_CODES[-result] || "unknown encode error " + result;
const resultBuffer = responseBuffer(result);
resultBuffer.set(this.processBuffer.subarray(0, result), 0);
return result;
}
reset() {
this.fn_reset(this.nativeHandle);
}
}
cworker.register_codec(CodecType.OPUS_MUSIC, async () => new OpusWorker(2, OpusType.AUDIO));
cworker.register_codec(CodecType.OPUS_VOICE, async () => new OpusWorker(1, OpusType.VOIP));
cworker.set_initialize_callback(async () => {
try {
/* could be directly required since it's just a file reference */
const [ moduleCreator, wasmFile ] = await Promise.all([
import("tc-backend/web/assembly/TeaWeb-Worker-Codec-Opus.js"),
// @ts-ignore
import("tc-backend/web/assembly/TeaWeb-Worker-Codec-Opus.wasm")
]);
const module = moduleCreator(Object.assign(OpusModule, {
locateFile(file: string) {
return file.endsWith(".wasm") ? wasmFile.default : file;
}
}));
if(module !== OpusModule)
throw "invalid opus module object";
} catch (e) {
OpusModule['onAbort']("Failed to load native scripts");
}
await runtimeInitializedPromise;
return true;
});

View File

@ -1 +0,0 @@
require("./OpusCodec");

35
web/audio-lib/Cargo.toml Normal file
View File

@ -0,0 +1,35 @@
[package]
name = "teaweb-audo-lib"
version = "0.1.0"
authors = ["WolverinDEV <git@teaspeak.de>"]
edition = "2018"
[lib]
crate-type = ["cdylib"]
[dependencies]
wasm-bindgen = "0.2"
wasm-bindgen-futures = "0.4.17"
js-sys = "0.3.44"
wasm-timer = "0.2.4"
futures = "0.3.5"
log = "0.4"
once_cell = "1.4.1"
[dev-dependencies]
tokio = { version = "0.2", features = ["full"] }
tokio-test = "0.2.1"
ntest = "0.7.1"
[target.'cfg(target_arch = "wasm32")'.dependencies]
opus-prebuild-wasm = { path = "D:\\git\\web\\opus-prebuild-wasm" }
console_log = "0.2.0"
console_error_panic_hook = "0.1.6"
# Used for the tests as an alternative (Attention: The opus library version differs!)
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
opus-cmake-sys = "1.0.6"
simple_logger = "1.6.0"
[package.metadata.wasm-pack.profile.release]
wasm-opt = ["-O2", "--enable-mutable-globals"]

184
web/audio-lib/src/audio.rs Normal file
View File

@ -0,0 +1,184 @@
use std::ops::{Add, Sub};
pub mod packet_queue;
pub mod codec;
pub mod decoder;
pub mod converter;
/// A wrapper around an u16 to represent an audio packet it
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct PacketId {
pub packet_id: u16
}
impl PacketId {
pub fn new(packet_id: u16) -> PacketId {
PacketId{ packet_id }
}
pub fn is_less(&self, other: &Self, clipping_window: Option<u16>) -> bool {
if let Some(window) = clipping_window {
if self.packet_id < window {
self.packet_id < other.packet_id && other.packet_id < 0xFFFF - window
} else if self.packet_id > 0xFFFF - window {
self.packet_id < other.packet_id || self.packet_id.wrapping_add(window) >= other.packet_id
} else {
self.packet_id < other.packet_id
}
} else {
self.packet_id < other.packet_id
}
}
pub fn difference(&self, other: &Self, clipping_window: Option<u16>) -> u16 {
if let Some(window) = clipping_window {
if self.packet_id < window {
return if other.packet_id > 0xFFFF - window {
(0xFFFF - other.packet_id) + self.packet_id + 1
} else if other.packet_id > self.packet_id {
other.packet_id - self.packet_id
} else {
self.packet_id - other.packet_id
}
} else if other.packet_id < window {
return if self.packet_id > 0xFFFF - window {
(0xFFFF - self.packet_id) + other.packet_id + 1
} else if self.packet_id > other.packet_id {
self.packet_id - other.packet_id
} else {
other.packet_id - self.packet_id
}
}
}
if self.packet_id > other.packet_id {
self.packet_id - other.packet_id
} else {
other.packet_id - self.packet_id
}
}
}
impl Add<u16> for PacketId {
type Output = PacketId;
fn add(self, rhs: u16) -> Self::Output {
PacketId{ packet_id: self.packet_id.wrapping_add(rhs) }
}
}
impl Sub<u16> for PacketId {
type Output = PacketId;
fn sub(self, rhs: u16) -> Self::Output {
PacketId{ packet_id: self.packet_id.wrapping_sub(rhs) }
}
}
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum Codec {
/// Speex narrow band, not supported any more
SpeexNarrow = 0x00,
/// Speex wide band, not supported any more
SpeexWide = 0x01,
/// Speex ultra wide band, not supported any more
SpeexUltraWide = 0x02,
/// Celt, not supported any more
Celt = 0x03,
/// Opus using the VoIP quality
Opus = 0x04,
/// Opus using the stereo music quality
OpusMusic = 0x05,
/// A lossless compression codec, currently not yet supported, but planned to
Flac = 0x10,
/// The codec is unknown
Unknown = 0xFF
}
impl Codec {
pub fn from_u8(value: u8) -> Codec {
match value {
x if x == Codec::SpeexNarrow as u8 => Codec::SpeexNarrow,
x if x == Codec::SpeexWide as u8 => Codec::SpeexWide,
x if x == Codec::SpeexUltraWide as u8 => Codec::SpeexUltraWide,
x if x == Codec::Celt as u8 => Codec::Celt,
x if x == Codec::Opus as u8 => Codec::Opus,
x if x == Codec::OpusMusic as u8 => Codec::OpusMusic,
x if x == Codec::Flac as u8 => Codec::Flac,
_ => Codec::Unknown
}
}
}
#[derive(PartialEq, Debug)]
pub struct AudioPacket {
pub client_id: u16,
pub packet_id: PacketId,
pub codec: Codec,
pub payload: Vec<u8>,
}
impl AudioPacket {
pub fn is_stop(&self) -> bool {
self.payload.is_empty()
}
}
#[cfg(test)]
mod tests {
use crate::audio::PacketId;
fn test_packet_id(a: u16, b: u16, result: bool, clipping_window: Option<u16>) {
let a = PacketId{ packet_id: a };
let b = PacketId{ packet_id: b };
assert_eq!(a.is_less(&b, clipping_window), result);
}
fn test_packet_difference(a: u16, b: u16, expected: u16, clipping_window: Option<u16>) {
let a = PacketId{ packet_id: a };
let b = PacketId{ packet_id: b };
assert_eq!(a.difference(&b, clipping_window), expected);
assert_eq!(b.difference(&a, clipping_window), expected);
}
#[test]
fn packet_id_is_less_basic() {
test_packet_id(2, 3, true, None);
test_packet_id(4, 3, false, None);
}
#[test]
fn packet_id_is_less_clipping() {
test_packet_id(0xFFFF, 0, false, None);
test_packet_id(0xFFFF, 1, false, None);
test_packet_id(0xFFFF, 2, false, None);
test_packet_id(0xFFFF, 2, true, Some(4));
test_packet_id(0xFFFF, 2, false, Some(2));
test_packet_id(2, 0xFFFF, false, Some(4));
for i in 1..0x2Fu16 {
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF1), true, Some(2));
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF5), true, Some(6));
test_packet_id(i.wrapping_add(0xFFF6), i.wrapping_add(0xFFF0), false, Some(6));
test_packet_id(i.wrapping_add(0xFFF0), i.wrapping_add(0xFFF6), true, Some(6));
}
}
#[test]
fn packet_id_difference() {
test_packet_difference(0, 0, 0, None);
test_packet_difference(0xFFFF, 0, 0xFFFF, None);
test_packet_difference(0xFFFF, 0, 1, Some(1));
for i in 0..0xFFu16 {
test_packet_difference(0xFF8F_u16.wrapping_add(i), 0xFF9F_u16.wrapping_add(i), 16, Some(16));
}
}
}

View File

@ -0,0 +1 @@
pub mod opus;

View File

@ -0,0 +1,845 @@
// Copyright 2016 Tad Hardesty
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! High-level bindings for libopus.
//!
//! Only brief descriptions are included here. For detailed information, consult
//! the [libopus documentation](https://opus-codec.org/docs/opus_api-1.1.2/).
#![warn(missing_docs)]
#![allow(dead_code)]
#[cfg(target_arch = "wasm32")]
extern crate opus_prebuild_wasm as ffi;
#[cfg(not(target_arch = "wasm32"))]
extern crate opus_cmake_sys as ffi;
use std::marker::PhantomData;
use std::os::raw::c_int;
use std::ffi::CStr;
// ============================================================================
// Constants
// Generic CTLs
const OPUS_RESET_STATE: c_int = 4028; // void
const OPUS_GET_FINAL_RANGE: c_int = 4031; // out *u32
const OPUS_GET_BANDWIDTH: c_int = 4009; // out *i32
const OPUS_GET_SAMPLE_RATE: c_int = 4029; // out *i32
// Encoder CTLs
const OPUS_SET_BITRATE: c_int = 4002; // in i32
const OPUS_GET_BITRATE: c_int = 4003; // out *i32
const OPUS_SET_VBR: c_int = 4006; // in i32
const OPUS_GET_VBR: c_int = 4007; // out *i32
const OPUS_SET_VBR_CONSTRAINT: c_int = 4020; // in i32
const OPUS_GET_VBR_CONSTRAINT: c_int = 4021; // out *i32
const OPUS_SET_INBAND_FEC: c_int = 4012; // in i32
const OPUS_GET_INBAND_FEC: c_int = 4013; // out *i32
const OPUS_SET_PACKET_LOSS_PERC: c_int = 4014; // in i32
const OPUS_GET_PACKET_LOSS_PERC: c_int = 4015; // out *i32
const OPUS_GET_LOOKAHEAD: c_int = 4027; // out *i32
// Decoder CTLs
const OPUS_SET_GAIN: c_int = 4034; // in i32
const OPUS_GET_GAIN: c_int = 4045; // out *i32
const OPUS_GET_LAST_PACKET_DURATION: c_int = 4039; // out *i32
const OPUS_GET_PITCH: c_int = 4033; // out *i32
// Bitrate
const OPUS_AUTO: c_int = -1000;
const OPUS_BITRATE_MAX: c_int = -1;
/// The possible applications for the codec.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Application {
/// Best for most VoIP/videoconference applications where listening quality
/// and intelligibility matter most.
Voip = 2048,
/// Best for broadcast/high-fidelity application where the decoded audio
/// should be as close as possible to the input.
Audio = 2049,
/// Only use when lowest-achievable latency is what matters most.
LowDelay = 2051,
}
/// The available channel setings.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Channels {
/// One channel.
Mono = 1,
/// Two channels, left and right.
Stereo = 2,
}
/// The available bandwidth level settings.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Bandwidth {
/// Auto/default setting.
Auto = -1000,
/// 4kHz bandpass.
Narrowband = 1101,
/// 6kHz bandpass.
Mediumband = 1102,
/// 8kHz bandpass.
Wideband = 1103,
/// 12kHz bandpass.
Superwideband = 1104,
/// 20kHz bandpass.
Fullband = 1105,
}
impl Bandwidth {
fn from_int(value: i32) -> Option<Bandwidth> {
Some(match value {
-1000 => Bandwidth::Auto,
1101 => Bandwidth::Narrowband,
1102 => Bandwidth::Mediumband,
1103 => Bandwidth::Wideband,
1104 => Bandwidth::Superwideband,
1105 => Bandwidth::Fullband,
_ => return None,
})
}
fn decode(value: i32, what: &'static str) -> Result<Bandwidth> {
match Bandwidth::from_int(value) {
Some(bandwidth) => Ok(bandwidth),
None => Err(Error::bad_arg(what)),
}
}
}
/// Possible error codes.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ErrorCode {
/// One or more invalid/out of range arguments.
BadArg = -1,
/// Not enough bytes allocated in the buffer.
BufferTooSmall = -2,
/// An internal error was detected.
InternalError = -3,
/// The compressed data passed is corrupted.
InvalidPacket = -4,
/// Invalid/unsupported request number.
Unimplemented = -5,
/// An encoder or decoder structure is invalid or already freed.
InvalidState = -6,
/// Memory allocation has failed.
AllocFail = -7,
/// An unknown failure.
Unknown = -8,
}
impl ErrorCode {
fn from_int(value: c_int) -> ErrorCode {
use ErrorCode::*;
match value {
ffi::OPUS_BAD_ARG => BadArg,
ffi::OPUS_BUFFER_TOO_SMALL => BufferTooSmall,
ffi::OPUS_INTERNAL_ERROR => InternalError,
ffi::OPUS_INVALID_PACKET => InvalidPacket,
ffi::OPUS_UNIMPLEMENTED => Unimplemented,
ffi::OPUS_INVALID_STATE => InvalidState,
ffi::OPUS_ALLOC_FAIL => AllocFail,
_ => Unknown,
}
}
/// Get a human-readable error string for this error code.
pub fn description(self) -> &'static str {
// should always be ASCII and non-null for any input
unsafe { CStr::from_ptr(ffi::opus_strerror(self as c_int)) }.to_str().unwrap()
}
}
/// Possible bitrates.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Bitrate {
/// Explicit bitrate choice (in bits/second).
Bits(i32),
/// Maximum bitrate allowed (up to maximum number of bytes for the packet).
Max,
/// Default bitrate decided by the encoder (not recommended).
Auto,
}
/// Get the libopus version string.
///
/// Applications may look for the substring "-fixed" in the version string to
/// determine whether they have a fixed-point or floating-point build at
/// runtime.
pub fn version() -> &'static str {
// verison string should always be ASCII
unsafe { CStr::from_ptr(ffi::opus_get_version_string()) }.to_str().unwrap()
}
macro_rules! ffi {
($f:ident $(, $rest:expr)*) => {
match unsafe { ffi::$f($($rest),*) } {
code if code < 0 => return Err(Error::from_code(stringify!($f), code)),
code => code,
}
}
}
macro_rules! ctl {
($f:ident, $this:ident, $ctl:ident, $($rest:expr),*) => {
match unsafe { ffi::$f($this.ptr, $ctl, $($rest),*) } {
code if code < 0 => return Err(Error::from_code(
concat!(stringify!($f), "(", stringify!($ctl), ")"),
code,
)),
_ => (),
}
}
}
// ============================================================================
// Encoder
macro_rules! enc_ctl {
($this:ident, $ctl:ident $(, $rest:expr)*) => {
ctl!(opus_encoder_ctl, $this, $ctl, $($rest),*)
}
}
/// An Opus encoder with associated state.
#[derive(Debug)]
pub struct Encoder {
ptr: *mut ffi::OpusEncoder,
channels: Channels,
}
impl Encoder {
/// Create and initialize an encoder.
pub fn new(sample_rate: u32, channels: Channels, mode: Application) -> Result<Encoder> {
let mut error = 0;
let ptr = unsafe { ffi::opus_encoder_create(
sample_rate as i32,
channels as c_int,
mode as c_int,
&mut error) };
if error != ffi::OPUS_OK || ptr.is_null() {
Err(Error::from_code("opus_encoder_create", error))
} else {
Ok(Encoder { ptr: ptr, channels: channels })
}
}
/// Encode an Opus frame.
pub fn encode(&mut self, input: &[i16], output: &mut [u8]) -> Result<usize> {
let len = ffi!(opus_encode, self.ptr,
input.as_ptr(), len(input) / self.channels as c_int,
output.as_mut_ptr(), len(output));
Ok(len as usize)
}
/// Encode an Opus frame from floating point input.
pub fn encode_float(&mut self, input: &[f32], output: &mut [u8]) -> Result<usize> {
let len = ffi!(opus_encode_float, self.ptr,
input.as_ptr(), len(input) / self.channels as c_int,
output.as_mut_ptr(), len(output));
Ok(len as usize)
}
/// Encode an Opus frame to a new buffer.
pub fn encode_vec(&mut self, input: &[i16], max_size: usize) -> Result<Vec<u8>> {
let mut output: Vec<u8> = vec![0; max_size];
let result = self.encode(input, output.as_mut_slice()).unwrap();
output.truncate(result);
Ok(output)
}
/// Encode an Opus frame from floating point input to a new buffer.
pub fn encode_vec_float(&mut self, input: &[f32], max_size: usize) -> Result<Vec<u8>> {
let mut output: Vec<u8> = vec![0; max_size];
let result = self.encode_float(input, output.as_mut_slice()).unwrap();
output.truncate(result);
Ok(output)
}
// ------------
// Generic CTLs
/// Reset the codec state to be equivalent to a freshly initialized state.
pub fn reset_state(&mut self) -> Result<()> {
enc_ctl!(self, OPUS_RESET_STATE);
Ok(())
}
/// Get the final range of the codec's entropy coder.
pub fn get_final_range(&mut self) -> Result<u32> {
let mut value: u32 = 0;
enc_ctl!(self, OPUS_GET_FINAL_RANGE, &mut value);
Ok(value)
}
/// Get the encoder's configured bandpass.
pub fn get_bandwidth(&mut self) -> Result<Bandwidth> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_BANDWIDTH, &mut value);
Bandwidth::decode(value, "opus_encoder_ctl(OPUS_GET_BANDWIDTH)")
}
/// Get the samping rate the encoder was intialized with.
pub fn get_sample_rate(&mut self) -> Result<u32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_SAMPLE_RATE, &mut value);
Ok(value as u32)
}
// ------------
// Encoder CTLs
/// Set the encoder's bitrate.
pub fn set_bitrate(&mut self, value: Bitrate) -> Result<()> {
let value: i32 = match value {
Bitrate::Auto => OPUS_AUTO,
Bitrate::Max => OPUS_BITRATE_MAX,
Bitrate::Bits(b) => b,
};
enc_ctl!(self, OPUS_SET_BITRATE, value);
Ok(())
}
/// Get the encoder's bitrate.
pub fn get_bitrate(&mut self) -> Result<Bitrate> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_BITRATE, &mut value);
Ok(match value {
OPUS_AUTO => Bitrate::Auto,
OPUS_BITRATE_MAX => Bitrate::Max,
_ => Bitrate::Bits(value),
})
}
/// Enable or disable variable bitrate.
pub fn set_vbr(&mut self, vbr: bool) -> Result<()> {
let value: i32 = if vbr { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_VBR, value);
Ok(())
}
/// Determine if variable bitrate is enabled.
pub fn get_vbr(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_VBR, &mut value);
Ok(value != 0)
}
/// Enable or disable constrained VBR.
pub fn set_vbr_constraint(&mut self, vbr: bool) -> Result<()> {
let value: i32 = if vbr { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_VBR_CONSTRAINT, value);
Ok(())
}
/// Determine if constrained VBR is enabled.
pub fn get_vbr_constraint(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_VBR_CONSTRAINT, &mut value);
Ok(value != 0)
}
/// Configures the encoder's use of inband forward error correction (FEC).
pub fn set_inband_fec(&mut self, value: bool) -> Result<()> {
let value: i32 = if value { 1 } else { 0 };
enc_ctl!(self, OPUS_SET_INBAND_FEC, value);
Ok(())
}
/// Gets encoder's configured use of inband forward error correction.
pub fn get_inband_fec(&mut self) -> Result<bool> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_INBAND_FEC, &mut value);
Ok(value != 0)
}
/// Sets the encoder's expected packet loss percentage.
pub fn set_packet_loss_perc(&mut self, value: i32) -> Result<()> {
enc_ctl!(self, OPUS_SET_PACKET_LOSS_PERC, value);
Ok(())
}
/// Gets the encoder's expected packet loss percentage.
pub fn get_packet_loss_perc(&mut self) -> Result<i32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_PACKET_LOSS_PERC, &mut value);
Ok(value)
}
/// Gets the total samples of delay added by the entire codec.
pub fn get_lookahead(&mut self) -> Result<i32> {
let mut value: i32 = 0;
enc_ctl!(self, OPUS_GET_LOOKAHEAD, &mut value);
Ok(value)
}
// TODO: Encoder-specific CTLs
}
impl Drop for Encoder {
fn drop(&mut self) {
unsafe { ffi::opus_encoder_destroy(self.ptr) }
}
}
// "A single codec state may only be accessed from a single thread at
// a time and any required locking must be performed by the caller. Separate
// streams must be decoded with separate decoder states and can be decoded
// in parallel unless the library was compiled with NONTHREADSAFE_PSEUDOSTACK
// defined."
//
// In other words, opus states may be moved between threads at will. A special
// compilation mode intended for embedded platforms forbids multithreaded use
// of the library as a whole rather than on a per-state basis, but the opus-sys
// crate does not use this mode.
unsafe impl Send for Encoder {}
// ============================================================================
// Decoder
macro_rules! dec_ctl {
($this:ident, $ctl:ident $(, $rest:expr)*) => {
ctl!(opus_decoder_ctl, $this, $ctl, $($rest),*)
}
}
/// An Opus decoder with associated state.
#[derive(Debug)]
pub struct Decoder {
ptr: *mut ffi::OpusDecoder,
channels: Channels,
}
impl Decoder {
/// Create and initialize a decoder.
pub fn new(sample_rate: u32, channels: Channels) -> Result<Decoder> {
let mut error = 0;
let ptr = unsafe { ffi::opus_decoder_create(
sample_rate as i32,
channels as c_int,
&mut error) };
if error != ffi::OPUS_OK || ptr.is_null() {
Err(Error::from_code("opus_decoder_create", error))
} else {
Ok(Decoder { ptr: ptr, channels: channels })
}
}
/// Decode an Opus packet.
pub fn decode(&mut self, input: &[u8], output: &mut [i16], fec: bool) -> Result<usize> {
let ptr = match input.len() {
0 => std::ptr::null(),
_ => input.as_ptr(),
};
let len = ffi!(opus_decode, self.ptr,
ptr, len(input),
output.as_mut_ptr(), len(output) / self.channels as c_int,
fec as c_int);
Ok(len as usize)
}
/// Decode an Opus packet with floating point output.
pub fn decode_float(&mut self, input: &[u8], output: &mut [f32], fec: bool) -> Result<usize> {
let ptr = match input.len() {
0 => std::ptr::null(),
_ => input.as_ptr(),
};
let len = ffi!(opus_decode_float, self.ptr,
ptr, len(input),
output.as_mut_ptr(), len(output) / self.channels as c_int,
fec as c_int);
Ok(len as usize)
}
/// Get the number of samples of an Opus packet.
pub fn get_nb_samples(&self, packet: &[u8]) -> Result<usize> {
let len = ffi!(opus_decoder_get_nb_samples, self.ptr,
packet.as_ptr(), packet.len() as i32);
Ok(len as usize)
}
// ------------
// Generic CTLs
/// Reset the codec state to be equivalent to a freshly initialized state.
pub fn reset_state(&mut self) -> Result<()> {
dec_ctl!(self, OPUS_RESET_STATE);
Ok(())
}
/// Get the final range of the codec's entropy coder.
pub fn get_final_range(&mut self) -> Result<u32> {
let mut value: u32 = 0;
dec_ctl!(self, OPUS_GET_FINAL_RANGE, &mut value);
Ok(value)
}
/// Get the decoder's last bandpass.
pub fn get_bandwidth(&mut self) -> Result<Bandwidth> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_BANDWIDTH, &mut value);
Bandwidth::decode(value, "opus_decoder_ctl(OPUS_GET_BANDWIDTH)")
}
/// Get the samping rate the decoder was intialized with.
pub fn get_sample_rate(&mut self) -> Result<u32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_SAMPLE_RATE, &mut value);
Ok(value as u32)
}
// ------------
// Decoder CTLs
/// Configures decoder gain adjustment.
///
/// Scales the decoded output by a factor specified in Q8 dB units. This has
/// a maximum range of -32768 to 32768 inclusive, and returns `BadArg`
/// otherwise. The default is zero indicating no adjustment. This setting
/// survives decoder reset.
///
/// `gain = pow(10, x / (20.0 * 256))`
pub fn set_gain(&mut self, gain: i32) -> Result<()> {
dec_ctl!(self, OPUS_SET_GAIN, gain);
Ok(())
}
/// Gets the decoder's configured gain adjustment.
pub fn get_gain(&mut self) -> Result<i32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_GAIN, &mut value);
Ok(value)
}
/// Gets the duration (in samples) of the last packet successfully decoded
/// or concealed.
pub fn get_last_packet_duration(&mut self) -> Result<u32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_LAST_PACKET_DURATION, &mut value);
Ok(value as u32)
}
/// Gets the pitch of the last decoded frame, if available.
///
/// This can be used for any post-processing algorithm requiring the use of
/// pitch, e.g. time stretching/shortening. If the last frame was not
/// voiced, or if the pitch was not coded in the frame, then zero is
/// returned.
pub fn get_pitch(&mut self) -> Result<i32> {
let mut value: i32 = 0;
dec_ctl!(self, OPUS_GET_PITCH, &mut value);
Ok(value)
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe { ffi::opus_decoder_destroy(self.ptr) }
}
}
// See `unsafe impl Send for Encoder`.
unsafe impl Send for Decoder {}
// ============================================================================
// Packet Analysis
/// Analyze raw Opus packets.
pub mod packet {
use super::*;
use super::ffi;
use std::{ptr, slice};
/// Get the bandwidth of an Opus packet.
pub fn get_bandwidth(packet: &[u8]) -> Result<Bandwidth> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_bandwidth"));
}
let bandwidth = ffi!(opus_packet_get_bandwidth, packet.as_ptr());
Bandwidth::decode(bandwidth, "opus_packet_get_bandwidth")
}
/// Get the number of channels from an Opus packet.
pub fn get_nb_channels(packet: &[u8]) -> Result<Channels> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_nb_channels"));
}
let channels = ffi!(opus_packet_get_nb_channels, packet.as_ptr());
match channels {
1 => Ok(Channels::Mono),
2 => Ok(Channels::Stereo),
_ => Err(Error::bad_arg("opus_packet_get_nb_channels")),
}
}
/// Get the number of frames in an Opus packet.
pub fn get_nb_frames(packet: &[u8]) -> Result<usize> {
let frames = ffi!(opus_packet_get_nb_frames, packet.as_ptr(), len(packet));
Ok(frames as usize)
}
/// Get the number of samples of an Opus packet.
pub fn get_nb_samples(packet: &[u8], sample_rate: u32) -> Result<usize> {
let frames = ffi!(opus_packet_get_nb_samples,
packet.as_ptr(), len(packet),
sample_rate as c_int);
Ok(frames as usize)
}
/// Get the number of samples per frame from an Opus packet.
pub fn get_samples_per_frame(packet: &[u8], sample_rate: u32) -> Result<usize> {
if packet.len() < 1 {
return Err(Error::bad_arg("opus_packet_get_samples_per_frame"))
}
let samples = ffi!(opus_packet_get_samples_per_frame,
packet.as_ptr(), sample_rate as c_int);
Ok(samples as usize)
}
/// Parse an Opus packet into one or more frames.
pub fn parse(packet: &[u8]) -> Result<Packet> {
let mut toc: u8 = 0;
let mut frames = [ptr::null(); 48];
let mut sizes = [0i16; 48];
let mut payload_offset: i32 = 0;
let num_frames = ffi!(opus_packet_parse,
packet.as_ptr(), len(packet),
&mut toc, frames.as_mut_ptr(),
sizes.as_mut_ptr(), &mut payload_offset);
let mut frames_vec = Vec::with_capacity(num_frames as usize);
for i in 0..num_frames as usize {
frames_vec.push(unsafe { slice::from_raw_parts(frames[i], sizes[i] as usize) });
}
Ok(Packet {
toc: toc,
frames: frames_vec,
payload_offset: payload_offset as usize,
})
}
/// A parsed Opus packet, retuned from `parse`.
#[derive(Debug)]
pub struct Packet<'a> {
/// The TOC byte of the packet.
pub toc: u8,
/// The frames contained in the packet.
pub frames: Vec<&'a [u8]>,
/// The offset into the packet at which the payload is located.
pub payload_offset: usize,
}
/// Pad a given Opus packet to a larger size.
///
/// The packet will be extended from the first `prev_len` bytes of the
/// buffer into the rest of the available space.
pub fn pad(packet: &mut [u8], prev_len: usize) -> Result<usize> {
let result = ffi!(opus_packet_pad, packet.as_mut_ptr(),
check_len(prev_len), len(packet));
Ok(result as usize)
}
/// Remove all padding from a given Opus packet and rewrite the TOC sequence
/// to minimize space usage.
pub fn unpad(packet: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_packet_unpad, packet.as_mut_ptr(), len(packet));
Ok(result as usize)
}
}
// ============================================================================
// Float Soft Clipping
/// Soft-clipping to bring a float signal within the [-1,1] range.
#[derive(Debug)]
pub struct SoftClip {
channels: Channels,
memory: [f32; 2],
}
impl SoftClip {
/// Initialize a new soft-clipping state.
pub fn new(channels: Channels) -> SoftClip {
SoftClip { channels: channels, memory: [0.0; 2] }
}
/// Apply soft-clipping to a float signal.
pub fn apply(&mut self, signal: &mut [f32]) {
unsafe { ffi::opus_pcm_soft_clip(
signal.as_mut_ptr(),
len(signal) / self.channels as c_int,
self.channels as c_int,
self.memory.as_mut_ptr()) };
}
}
// ============================================================================
// Repacketizer
/// A repacketizer used to merge together or split apart multiple Opus packets.
#[derive(Debug)]
pub struct Repacketizer {
ptr: *mut ffi::OpusRepacketizer,
}
impl Repacketizer {
/// Create and initialize a repacketizer.
pub fn new() -> Result<Repacketizer> {
let ptr = unsafe { ffi::opus_repacketizer_create() };
if ptr.is_null() {
Err(Error::from_code("opus_repacketizer_create", ffi::OPUS_ALLOC_FAIL))
} else {
Ok(Repacketizer { ptr: ptr })
}
}
/// Shortcut to combine several smaller packets into one larger one.
pub fn combine(&mut self, input: &[&[u8]], output: &mut [u8]) -> Result<usize> {
let mut state = self.begin();
for &packet in input {
state.cat(packet).unwrap();
}
state.out(output)
}
/// Begin using the repacketizer.
pub fn begin<'rp, 'buf>(&'rp mut self) -> RepacketizerState<'rp, 'buf> {
unsafe { ffi::opus_repacketizer_init(self.ptr); }
RepacketizerState { rp: self, phantom: PhantomData }
}
}
impl Drop for Repacketizer {
fn drop(&mut self) {
unsafe { ffi::opus_repacketizer_destroy(self.ptr) }
}
}
// See `unsafe impl Send for Encoder`.
unsafe impl Send for Repacketizer {}
// To understand why these lifetime bounds are needed, imagine that the
// repacketizer keeps an internal Vec<&'buf [u8]>, which is added to by cat()
// and accessed by get_nb_frames(), out(), and out_range(). To prove that these
// lifetime bounds are correct, a dummy implementation with the same signatures
// but a real Vec<&'buf [u8]> rather than unsafe blocks may be substituted.
/// An in-progress repacketization.
#[derive(Debug)]
pub struct RepacketizerState<'rp, 'buf> {
rp: &'rp mut Repacketizer,
phantom: PhantomData<&'buf [u8]>,
}
impl<'rp, 'buf> RepacketizerState<'rp, 'buf> {
/// Add a packet to the current repacketizer state.
pub fn cat(&mut self, packet: &'buf [u8]) -> Result<()> {
ffi!(opus_repacketizer_cat, self.rp.ptr,
packet.as_ptr(), len(packet));
Ok(())
}
/// Add a packet to the current repacketizer state, moving it.
#[inline]
pub fn cat_move<'b2>(self, packet: &'b2 [u8]) -> Result<RepacketizerState<'rp, 'b2>> where 'buf: 'b2 {
let mut shorter = self;
shorter.cat(packet).unwrap();
Ok(shorter)
}
/// Get the total number of frames contained in packet data submitted so
/// far via `cat`.
pub fn get_nb_frames(&mut self) -> usize {
unsafe { ffi::opus_repacketizer_get_nb_frames(self.rp.ptr) as usize }
}
/// Construct a new packet from data previously submitted via `cat`.
///
/// All previously submitted frames are used.
pub fn out(&mut self, buffer: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_repacketizer_out, self.rp.ptr,
buffer.as_mut_ptr(), len(buffer));
Ok(result as usize)
}
/// Construct a new packet from data previously submitted via `cat`, with
/// a manually specified subrange.
///
/// The `end` index should not exceed the value of `get_nb_frames()`.
pub fn out_range(&mut self, begin: usize, end: usize, buffer: &mut [u8]) -> Result<usize> {
let result = ffi!(opus_repacketizer_out_range, self.rp.ptr,
check_len(begin), check_len(end),
buffer.as_mut_ptr(), len(buffer));
Ok(result as usize)
}
}
// ============================================================================
// TODO: Multistream API
// ============================================================================
// Error Handling
/// Opus error Result alias.
pub type Result<T> = std::result::Result<T, Error>;
/// An error generated by the Opus library.
#[derive(Debug)]
pub struct Error {
function: &'static str,
code: ErrorCode,
}
impl Error {
fn bad_arg(what: &'static str) -> Error {
Error { function: what, code: ErrorCode::BadArg }
}
fn from_code(what: &'static str, code: c_int) -> Error {
Error { function: what, code: ErrorCode::from_int(code) }
}
/// Get the name of the Opus function from which the error originated.
#[inline]
pub fn function(&self) -> &'static str { self.function }
/// Get a textual description of the error provided by Opus.
#[inline]
pub fn description(&self) -> &'static str { self.code.description() }
/// Get the Opus error code of the error.
#[inline]
pub fn code(&self) -> ErrorCode { self.code }
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}: {}", self.function, self.description())
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
self.code.description()
}
}
fn check_len(val: usize) -> c_int {
let len = val as c_int;
if len as usize != val {
panic!("length out of range: {}", val);
}
len
}
#[inline]
fn len<T>(slice: &[T]) -> c_int {
check_len(slice.len())
}

View File

@ -0,0 +1,31 @@
#![allow(dead_code)]
/* source and target should not be intersecting! */
pub fn sequenced2interleaved(src: &[f32], dest: &mut [f32], sample_count: u32, channel_count: u32) {
for channel in 0..channel_count {
let mut source_index = (channel * sample_count) as usize;
let mut dest_index = channel as usize;
for _ in 0..sample_count {
dest[dest_index] = src[source_index];
source_index += 1 as usize;
dest_index += channel_count as usize;
}
}
}
/* source and target should not be intersecting! */
pub fn interleaved2sequenced(src: &[f32], dest: &mut [f32], sample_count: u32, channel_count: u32) {
for channel in 0..channel_count {
let mut source_index = channel as usize;
let mut dest_index = (channel * sample_count) as usize;
for _ in 0..sample_count {
dest[dest_index] = src[source_index];
source_index += channel_count as usize;
dest_index += 1 as usize;
}
}
}

View File

@ -0,0 +1,269 @@
use crate::audio::{AudioPacket, Codec};
use crate::audio::codec::opus::{Application, Decoder, Channels};
use std::cell::Cell;
use std::rc::Rc;
use std::cell::RefCell;
use std::fmt::Formatter;
#[derive(Debug, PartialEq)]
pub enum AudioDecodeError {
UnknownCodec,
UnsupportedCodec,
DecoderInitializeFailed(String, bool /* just now initialized */),
DecoderUninitialized,
InvalidPacket,
UnknownDecodeError(String)
}
enum DecoderState {
Unset,
Initialized(Rc<RefCell<dyn AudioCodecDecoder>>),
InitializeFailed(String)
}
impl std::fmt::Debug for DecoderState {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
DecoderState::Unset => {
f.write_str("DecoderState::Unset")
}
DecoderState::Initialized(_) => {
f.write_str("DecoderState::Initialized")
}
DecoderState::InitializeFailed(error) => {
f.write_str(&format!("DecoderState::InitializeFailed({:?})", error))
}
}
}
}
pub struct AudioDecoder {
opus_decoder: DecoderState,
opus_music_decoder: DecoderState,
last_decoded_codec: Codec,
}
impl AudioDecoder {
pub fn new() -> AudioDecoder {
AudioDecoder {
opus_music_decoder: DecoderState::Unset,
opus_decoder: DecoderState::Unset,
last_decoded_codec: Codec::Opus,
}
}
fn decoder_state(&mut self, codec: Codec) -> Result<&mut DecoderState, AudioDecodeError> {
match codec {
Codec::Opus => {
Ok(&mut self.opus_decoder)
}
Codec::OpusMusic => {
Ok(&mut self.opus_music_decoder)
}
Codec::SpeexNarrow | Codec::SpeexWide | Codec::SpeexUltraWide | Codec::Celt | Codec::Flac => {
Err(AudioDecodeError::UnsupportedCodec)
}
_ => {
Err(AudioDecodeError::UnknownCodec)
}
}
}
fn get_decoder(&mut self, codec: Codec, initialize: bool) -> Result<Rc<RefCell<dyn AudioCodecDecoder>>, AudioDecodeError> {
let mut decoder_state = self.decoder_state(codec)?;
match decoder_state {
DecoderState::Initialized(decoder) => {
Ok(decoder.clone())
}
DecoderState::InitializeFailed(error) => {
Err(AudioDecodeError::DecoderInitializeFailed(error.clone(), false))
}
DecoderState::Unset => {
if !initialize {
return Err(AudioDecodeError::DecoderUninitialized);
}
let mut decoder: Option<Rc<RefCell<dyn AudioCodecDecoder>>> = None;
match codec {
Codec::Opus => {
decoder = Some(Rc::new(RefCell::new(decoder::AudioOpusDecoder::new(Channels::Mono))));
}
Codec::OpusMusic => {
decoder = Some(Rc::new(RefCell::new(decoder::AudioOpusDecoder::new(Channels::Stereo))));
}
_ => {
panic!("This should never be reached");
}
}
let mut decoder = decoder.unwrap();
if let Err(error) = decoder.borrow_mut().initialize() {
*decoder_state = DecoderState::InitializeFailed(error.clone());
return Err(AudioDecodeError::DecoderInitializeFailed(error, true));
}
*decoder_state = DecoderState::Initialized(decoder.clone());
Ok(decoder)
}
}
}
pub fn initialize_codec(&mut self, codec: Codec) -> Result<(), AudioDecodeError> {
let _ = self.get_decoder(codec, true)?;
Ok(())
}
pub fn decode(&mut self, packet: &AudioPacket, dest: &mut Vec<f32>) -> Result<(usize /* samples */, u8 /* channels */), AudioDecodeError> {
let mut audio_decoder = self.get_decoder(packet.codec, true)?;
let mut audio_decoder = audio_decoder.borrow_mut();
let result = audio_decoder.decode(&packet.payload, dest)?;
self.last_decoded_codec = packet.codec;
Ok(result)
}
pub fn decode_lost(&mut self, _packet_count: usize) -> Result<(), AudioDecodeError> {
/* if the decoder hasn't been initialized or something similar it's not worth creating one */
if let Ok(decoder) = self.get_decoder(self.last_decoded_codec, false) {
decoder.borrow_mut().decode_lost()?;
}
Ok(())
}
}
trait AudioCodecDecoder {
/// Initialize the decoder.
/// On error occurrence, the error message will be returned
fn initialize(&mut self) -> Result<(), String>;
/// Decode the audio packet to float 32 interleaved samples.
/// Returns the amount of samples decoded.
fn decode(&mut self, src: &Vec<u8>, dest: &mut Vec<f32>) -> Result<(usize /* samples */, u8 /* channels */), AudioDecodeError>;
fn decode_lost(&mut self) -> Result<(), AudioDecodeError>;
}
mod decoder {
/* the opus implementation */
use crate::audio::codec::opus::{Application, Decoder, Channels, ErrorCode};
use crate::audio::decoder::{AudioCodecDecoder, AudioDecodeError};
use log::warn;
pub struct AudioOpusDecoder {
pub channel_count: Channels,
pub sample_rate: u32,
decoder: Option<Decoder>,
/// If it's set it indicates that we have to do some FEC decoding.
/// The option will hold the packet size, used for the FEC decoding.
fec_decode: Option<usize>
}
impl AudioOpusDecoder {
pub fn new(channels: Channels) -> AudioOpusDecoder {
AudioOpusDecoder {
decoder: None,
channel_count: channels,
sample_rate: 48_000,
fec_decode: None
}
}
}
impl AudioCodecDecoder for AudioOpusDecoder {
fn initialize(&mut self) -> Result<(), String> {
let decoder = Decoder::new(self.sample_rate, self.channel_count).map_err(|error| String::from(error.description()))?;
self.decoder = Some(decoder);
Ok(())
}
fn decode(&mut self, src: &Vec<u8>, dest: &mut Vec<f32>) -> Result<(usize, u8), AudioDecodeError> {
if let Some(ref mut decoder) = self.decoder {
let sample_count = decoder.get_nb_samples(src.as_slice())
.map_err(|_error| AudioDecodeError::InvalidPacket)?;
let mut total_sample_count = 0;
if let Some(fec_size) = self.fec_decode {
self.fec_decode = None;
dest.resize(
fec_size as usize * self.channel_count as usize +
sample_count * self.channel_count as usize, 0f32);
match decoder.decode_float(src.as_slice(), &mut dest[0..(fec_size * self.channel_count as usize)], true) {
Ok(sample_count) => total_sample_count += sample_count,
Err(error) => {
warn!("Failed to FEC decode opus packet: {}", error.description());
}
};
} else {
dest.resize(sample_count * self.channel_count as usize, 0f32);
}
match decoder.decode_float(src.as_slice(), &mut dest[(total_sample_count * self.channel_count as usize)..], false) {
Ok(sample_count) => Ok((total_sample_count + sample_count, self.channel_count as u8)),
Err(error) => match error.code() {
ErrorCode::InvalidPacket => {
Err(AudioDecodeError::InvalidPacket)
}
_ => {
Err(AudioDecodeError::UnknownDecodeError(String::from(error.description())))
}
}
}
} else {
Err(AudioDecodeError::DecoderUninitialized)
}
}
fn decode_lost(&mut self) -> Result<(), AudioDecodeError> {
if let Some(ref mut decoder) = self.decoder {
/* 960 is the default packet size for TeaSpeak */
let packet_size = decoder.get_last_packet_duration().unwrap_or(960) as usize;
self.fec_decode = Some(packet_size);
Ok(())
} else {
Err(AudioDecodeError::DecoderUninitialized)
}
}
}
}
mod tests {
use crate::audio::decoder::{AudioDecoder, AudioDecodeError};
use crate::audio::{AudioPacket, PacketId, Codec};
#[test]
fn test_invalid_packet() {
let mut decoder = AudioDecoder::new();
let mut buffer: Vec<f32> = Vec::new();
let packet = AudioPacket {
codec: Codec::Opus,
payload: vec![],
packet_id: PacketId::new(0),
client_id: 0
};
assert_eq!(decoder.decode(&packet, &mut buffer), Err(AudioDecodeError::InvalidPacket));
let packet = AudioPacket {
codec: Codec::Opus,
payload: vec![0, 0, 1],
packet_id: PacketId::new(0),
client_id: 0
};
decoder.decode(&packet, &mut buffer).expect("expected a result");
let packet = AudioPacket {
codec: Codec::Flac,
payload: vec![],
packet_id: PacketId::new(0),
client_id: 0
};
assert_eq!(decoder.decode(&packet, &mut buffer), Err(AudioDecodeError::UnsupportedCodec));
}
}

View File

@ -0,0 +1,529 @@
#![allow(dead_code)]
use std::task::{Poll, Context, Waker};
use std::collections::VecDeque;
use std::ops::{ Deref };
use std::time::{SystemTime, Duration, UNIX_EPOCH};
use futures::{FutureExt};
use crate::audio::{AudioPacket, Codec, PacketId};
#[derive(Debug, PartialEq)]
pub enum AudioPacketQueueEvent {
AudioPacket(Box<AudioPacket>),
PacketsLost(
PacketLostReason /* reason for these packets to be counted as lost*/,
u16 /* first lost packet id */,
u16 /* lost packets */
)
}
#[derive(Debug, PartialEq)]
pub enum PacketLostReason {
/// The packets have been failed to be received within a certain timeout
Timeout,
/// A packet sequence has been found after this packet.
/// We've declared this packet as lost
Sequence,
/// We've enough new packets, which can be replayed.
/// This is is also the reason if we're resetting the sequence.
ForceEnqueue
}
#[derive(Debug)]
pub struct AudioPacketQueue {
/// The window size for packet id clipping.
/// Must be at least 1!
pub clipping_window: u16,
/// Number of packets in a sequence to skip ahead to these packets and count the missing pieces as dropped
pub skip_sequence_length: u32,
/// Number of packets in the sequence to forcently replay the first packet
pub force_enqueue_buffer_length: u32,
/// Timeout after which a packet will forcently be replayed.
/// The missing slices will be counted as lost
pub packet_buffer_timeout: u32,
/// Max size of the event queue
pub event_queue_max_size: u32,
/// Timestamp of the last handled packet
last_packet_timestamp: i64,
/// Last packet which has been handled
last_packet_id: PacketId,
/// The event waker will be called as soon new events have been scheduled.
event_waker: Option<Waker>,
/// The event queue contains all audio queue events which needs to get polled
event_queue: VecDeque<AudioPacketQueueEvent>,
/// Buffer for the out of order packets.
/// The buffer should be at least the capacity of force_enqueue_buffer_length + 1 to prevent
/// unwanted allocations.
packet_buffer: VecDeque<EnqueuedPacket>,
/// A timer which is used for processing non sequence packets after a certain timeout
packet_buffer_timer: wasm_timer::Delay
}
#[derive(Debug, PartialEq)]
pub enum EnqueueError {
/// A packet with that id already exists
PacketAlreadyExists,
/// The packet is too old
PacketTooOld,
/// Containing the current sequence packet id
PacketSequenceMismatch(PacketId),
/// Event queue is too long (You need to poll some events first)
EventQueueOverflow
}
fn current_time_millis() -> i64 {
#[cfg(target_arch = "wasm32")]
let value = js_sys::Date::now() as i64;
#[cfg(not(target_arch = "wasm32"))]
let value = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as i64;
value
}
#[derive(Debug)]
struct EnqueuedPacket {
/// The actual audio packet
packet: Box<AudioPacket>,
/// The timestamp of the enqueueing used for the packet timeout
enqueue_timestamp: i64
}
impl Deref for EnqueuedPacket {
type Target = AudioPacket;
fn deref(&self) -> &Self::Target {
self.packet.as_ref()
}
}
impl AudioPacketQueue {
const DEFAULT_CLIPPING_WINDOW: u16 = 256;
pub fn new() -> AudioPacketQueue {
let instance = AudioPacketQueue {
clipping_window: AudioPacketQueue::DEFAULT_CLIPPING_WINDOW,
skip_sequence_length: 3,
force_enqueue_buffer_length: 5,
packet_buffer_timeout: 50,
event_queue_max_size: 64,
/* Decrease by one since we expect the initial packet to be enqueued soonly. */
last_packet_id: PacketId{ packet_id: 0 },
last_packet_timestamp: 0,
packet_buffer: VecDeque::with_capacity(30),
packet_buffer_timer: wasm_timer::Delay::new(Duration::from_millis(0)),
event_waker: None,
event_queue: VecDeque::with_capacity(30)
};
instance
}
/// Enqueue a new audio packet
pub fn enqueue_packet(&mut self, packet: Box<AudioPacket>) -> Result<(), EnqueueError> {
let current_time = current_time_millis();
/* check if we're expecting a sequence */
if current_time - self.last_packet_timestamp < 1000 {
if !self.last_packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
return Err(EnqueueError::PacketTooOld);
} else if self.last_packet_id.difference(&packet.packet_id, Some(self.clipping_window)) > 20 {
return Err(EnqueueError::PacketSequenceMismatch(self.last_packet_id.clone()));
}
} else {
/* we've a new sequence */
self.last_packet_timestamp = current_time;
self.last_packet_id = packet.packet_id - 1; /* reduce the last packet id by one so this packet is the next packet */
}
let mut index = 0;
while index < self.packet_buffer.len() {
let element = &self.packet_buffer[index];
if !element.packet_id.is_less(&packet.packet_id, Some(self.clipping_window)) {
break;
}
index += 1;
}
let packet = EnqueuedPacket{ packet, enqueue_timestamp: current_time };
if self.event_queue.len() > self.event_queue_max_size as usize {
return Err(EnqueueError::EventQueueOverflow);
}
if index >= self.packet_buffer.len() {
self.packet_buffer.push_back(packet);
} else if self.packet_buffer[index].packet_id == packet.packet_id {
return Err(EnqueueError::PacketAlreadyExists);
} else {
self.packet_buffer.insert(index, packet);
}
self.try_assemble_packets();
Ok(())
}
/// Reset the current packet sequence.
/// If you want to enqueue the pending packet buffer, which sequence hasn't been finished yet,
/// set the first parameter to false
pub fn reset_sequence(&mut self, drop_pending_buffers: bool) {
self.last_packet_id = PacketId{ packet_id: 0 };
self.last_packet_timestamp = 0;
if drop_pending_buffers {
self.clear_buffers();
} else if !self.packet_buffer.is_empty() {
for packet in self.packet_buffer.drain(..).collect::<Vec<EnqueuedPacket>>() {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::ForceEnqueue);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
}
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
}
}
/// Advance the last packet it to the target packet it.
/// If the new packet id isn't consecutive to the current one, an PacketsLost event will be enqueued.
/// The event waker will not be called.
fn advance_last_packet(&mut self, packet_id: PacketId, drop_reason: PacketLostReason) {
if self.last_packet_id + 1 != packet_id {
self.event_queue.push_back(AudioPacketQueueEvent::PacketsLost(
drop_reason,
self.last_packet_id.packet_id.wrapping_add(1),
self.last_packet_id.difference(&packet_id, Some(self.clipping_window)) - 1
));
}
self.last_packet_id = packet_id;
}
/// Clear all pending audio packets
fn clear_buffers(&mut self) {
self.packet_buffer.clear();
}
/// Get the number of pending events
pub fn pending_events(&self) -> usize {
self.event_queue.len()
}
/// Get the next event, manly used for testing purposes
pub fn pop_event(&mut self) -> Option<AudioPacketQueueEvent> {
self.event_queue.pop_front()
}
/// Poll for a events.
/// This method should be invoked regularly, else not every packet will be processed property.
pub fn poll_event(&mut self, cx: &mut Context<'_>) -> Poll<AudioPacketQueueEvent> {
if let Poll::Ready(_) = self.packet_buffer_timer.poll_unpin(cx) {
self.update_packet_timeouts(Some(cx));
}
if let Some(event) = self.pop_event() {
Poll::Ready(event)
} else {
self.event_waker = Some(cx.waker().clone());
Poll::Pending
}
}
fn try_assemble_packets(&mut self) {
while let Some(head) = self.packet_buffer.front() {
if head.packet_id == self.last_packet_id + 1 {
/* yeah, we received the next packet in the sequence */
let packet = self.packet_buffer.pop_front().unwrap();
self.last_packet_id = packet.packet_id;
self.last_packet_timestamp = current_time_millis();
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
} else {
break;
}
}
if self.packet_buffer.is_empty() {
return;
}
/* test if somewhere are more than three packets in a row */
{
let mut index = 0;
let mut sequence_index = 0;
let mut sequence_count = 0;
let mut expected_packet_id = self.packet_buffer.front().unwrap().packet_id;
while index < self.packet_buffer.len() {
if self.packet_buffer[index].packet_id != expected_packet_id {
sequence_index = index;
sequence_count = 1;
expected_packet_id = self.packet_buffer[index].packet_id + 1;
} else {
sequence_count += 1;
expected_packet_id = expected_packet_id + 1;
if sequence_count == self.skip_sequence_length {
break;
}
}
index += 1;
}
if sequence_count == self.skip_sequence_length {
/* okey we can skip */
/* include the first packet of the sequence */
let packets: Vec<EnqueuedPacket> = self.packet_buffer.drain(0..(sequence_index + 1)).collect();
for packet in packets {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::Sequence);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
}
self.last_packet_timestamp = current_time_millis();
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
/* now lets replay the next sequence */
self.try_assemble_packets();
return;
} else {
/* we've no sequence in a row */
}
}
/* force replay first packet, the a bit seek behind mode */
{
if self.packet_buffer.len() > self.force_enqueue_buffer_length as usize {
let packets: Vec<EnqueuedPacket> = self.packet_buffer.drain(0..(self.packet_buffer.len() - self.force_enqueue_buffer_length as usize)).collect();
for packet in packets {
self.advance_last_packet(packet.packet_id.clone(), PacketLostReason::ForceEnqueue);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
self.last_packet_timestamp = current_time_millis();
}
}
}
self.update_packet_timeouts(None);
}
fn update_packet_timeouts(&mut self, cx: Option<&mut Context<'_>>) {
let timeout_time = current_time_millis() - self.packet_buffer_timeout as i64;
let mut packet_scheduled = false;
while let Some(head) = self.packet_buffer.front() {
if timeout_time > head.enqueue_timestamp {
let packet = self.packet_buffer.pop_front().unwrap();
self.advance_last_packet(packet.packet_id, PacketLostReason::Timeout);
self.event_queue.push_back(AudioPacketQueueEvent::AudioPacket(packet.packet));
packet_scheduled = true;
}
break;
}
if packet_scheduled {
if let Some(waker) = &self.event_waker {
waker.wake_by_ref();
}
}
if let Some(head) = self.packet_buffer.front() {
let current_time = current_time_millis();
if let Some(cx) = cx {
let passed_millis = current_time - head.enqueue_timestamp;
if passed_millis >= timeout_time {
cx.waker().wake_by_ref();
} else {
self.packet_buffer_timer.reset(Duration::from_millis((self.packet_buffer_timeout as i64 - passed_millis) as u64));
let _ = self.packet_buffer_timer.poll_unpin(cx);
}
}
}
}
}
unsafe impl Send for AudioPacketQueue {}
impl Drop for AudioPacketQueue {
fn drop(&mut self) {
self.clear_buffers();
}
}
#[cfg(test)]
mod tests {
use super::{ AudioPacketQueue, EnqueueError };
use crate::audio::packet_queue::{AudioPacketQueueEvent, PacketLostReason};
use tokio::future::poll_fn;
use tokio_test::block_on;
use std::sync::{Arc, Mutex};
use ntest::timeout;
use crate::audio::{AudioPacket, PacketId, Codec};
fn enqueue_packet(queue: &mut AudioPacketQueue, packet_id: u16) -> Result<(), EnqueueError> {
queue.enqueue_packet(Box::new(AudioPacket {
packet_id: PacketId{ packet_id },
client_id: 0,
codec: Codec::Opus,
payload: vec![]
}))
}
fn darin_queued_events(queue: &mut AudioPacketQueue, _expect_events: bool) {
let mut events_processed = 0;
while let Some(event) = queue.pop_event() {
match event {
AudioPacketQueueEvent::AudioPacket(packet) => {
println!("Having an audio packet: {:?}", packet);
},
AudioPacketQueueEvent::PacketsLost(reason, first_packet, count) => {
println!("{:?} packets got lost due to {:?} (first packet id: {:?})", count, reason, first_packet);
}
}
events_processed += 1;
}
if !_expect_events && events_processed > 0 {
assert!(false, "we haven't expected any events but processed {} events", events_processed);
}
}
fn expect_queued_packet_event(queue: &mut AudioPacketQueue, packet_id: Option<u16>) {
if let Some(event) = queue.pop_event() {
match event {
AudioPacketQueueEvent::AudioPacket(packet) => {
if let Some(packet_id) = packet_id {
assert_eq!(packet_id, packet.packet_id.packet_id);
} else {
println!("Having an audio packet: {:?}", packet);
}
},
_ => {
assert!(false, "Expected a packet event");
}
}
} else {
assert!(false, "expected an event, but there wasn't one");
}
}
#[test]
//#[timeout(3000)]
fn queue_insert_0() {
let mut queue =AudioPacketQueue::new();
enqueue_packet(&mut queue, 0xFFFC).unwrap();
//enqueue_packet(&mut queue, 0xFFFF).unwrap();
//enqueue_packet(&mut queue, 0xFFFD).unwrap();
enqueue_packet(&mut queue, 0xFFFE).unwrap();
enqueue_packet(&mut queue, 2).unwrap();
enqueue_packet(&mut queue, 0).unwrap();
enqueue_packet(&mut queue, 2).expect_err("packet should be already registered");
enqueue_packet(&mut queue, 1).unwrap();
enqueue_packet(&mut queue, 2).expect_err("packet should be already registered");
expect_queued_packet_event(&mut queue,Some(0xFFFC));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 0xFFFD, 1));
expect_queued_packet_event(&mut queue,Some(0xFFFE));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 0xFFFF, 1));
expect_queued_packet_event(&mut queue,Some(0));
expect_queued_packet_event(&mut queue,Some(1));
expect_queued_packet_event(&mut queue,Some(2));
darin_queued_events(&mut queue, false);
}
#[test]
fn test_queue_force_window() {
let mut queue = AudioPacketQueue::new();
queue.force_enqueue_buffer_length = 5;
queue.skip_sequence_length = 3;
enqueue_packet(&mut queue, 0).expect("failed to enqueue packet");
expect_queued_packet_event(&mut queue, Some(0));
enqueue_packet(&mut queue, 2).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 4).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 6).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 8).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 10).expect("failed to enqueue packet");
assert_eq!(queue.pop_event(), None);
enqueue_packet(&mut queue, 12).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::ForceEnqueue, 1, 1));
expect_queued_packet_event(&mut queue, Some(2));
enqueue_packet(&mut queue, 13).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::ForceEnqueue, 3, 1));
expect_queued_packet_event(&mut queue, Some(4));
enqueue_packet(&mut queue, 14).expect("failed to enqueue packet");
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 5, 1));
expect_queued_packet_event(&mut queue, Some(6));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 7, 1));
expect_queued_packet_event(&mut queue, Some(8));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 9, 1));
expect_queued_packet_event(&mut queue, Some(10));
assert_eq!(queue.pop_event().unwrap(), AudioPacketQueueEvent::PacketsLost(PacketLostReason::Sequence, 11, 1));
expect_queued_packet_event(&mut queue, Some(12));
expect_queued_packet_event(&mut queue, Some(13));
expect_queued_packet_event(&mut queue, Some(14));
darin_queued_events(&mut queue, false);
}
#[test]
#[timeout(500)]
fn test_queue_packet_timeout() {
block_on(async {
let queue = Arc::new(Mutex::new(AudioPacketQueue::new()));
{
let mut queue = queue.lock().unwrap();
enqueue_packet(&mut queue, 0).expect("failed to enqueue packet");
expect_queued_packet_event(&mut queue, Some(0));
darin_queued_events(&mut queue, false);
enqueue_packet(&mut queue, 2).expect("failed to enqueue packet");
darin_queued_events(&mut queue, false);
}
{
let queue = queue.clone();
let next_event = poll_fn(move |cx| { queue.lock().unwrap().poll_event(cx) }).await;
assert_eq!(next_event, AudioPacketQueueEvent::PacketsLost(PacketLostReason::Timeout, 1, 1));
}
{
let mut queue = queue.lock().unwrap();
darin_queued_events(&mut queue, true);
}
});
}
}

View File

@ -0,0 +1,150 @@
use wasm_bindgen::prelude::*;
use std::collections::HashMap;
use std::sync::{ Arc, Mutex, MutexGuard };
use std::sync::atomic::{ AtomicU32, Ordering };
use std::cell::RefCell;
use once_cell::sync::Lazy;
use crate::audio::packet_queue::{AudioPacketQueue, AudioPacketQueueEvent, EnqueueError};
use futures::task::Context;
use futures;
use crate::audio::decoder::{AudioDecoder, AudioDecodeError};
use wasm_bindgen_futures::spawn_local;
use futures::future::{ poll_fn };
use crate::audio::{AudioPacket, Codec};
use log::*;
use crate::audio::converter::interleaved2sequenced;
pub type AudioClientId = u32;
pub trait AudioCallback {
/// Allocate the vector the result should be stored into
fn callback_buffer(&mut self) -> &mut Vec<f32>;
fn handle_audio(&mut self, sample_count: usize, channel_count: u8);
fn handle_stop(&mut self);
}
struct CallbackData {
callback: Option<js_sys::Function>,
buffer: Vec<f32>
}
pub struct AudioClient {
pub client_id: AudioClientId,
packet_queue: Mutex<AudioPacketQueue>,
decoder: Mutex<AudioDecoder>,
audio_process_abort_handle: Mutex<Option<futures::future::AbortHandle>>,
audio_callback: Mutex<Option<Box<dyn AudioCallback>>>,
}
type AudioClientRegistry = Mutex<HashMap<AudioClientId, Arc<AudioClient>>>;
static AUDIO_CLIENT_ID: AtomicU32 = AtomicU32::new(1);
static AUDIO_CLIENT_INSTANCES: Lazy<AudioClientRegistry> = Lazy::new(|| Mutex::new(HashMap::new()));
impl AudioClient {
pub fn find_client(client_id: AudioClientId) -> Option<Arc<AudioClient>> {
AUDIO_CLIENT_INSTANCES.lock().unwrap().get(&client_id).map(|client| client.clone())
}
pub fn new() -> Arc<AudioClient> {
let client_id = AUDIO_CLIENT_ID.fetch_add(1, Ordering::Relaxed);
let instance = Arc::new(AudioClient {
client_id,
packet_queue: Mutex::new(AudioPacketQueue::new()),
decoder: Mutex::new(AudioDecoder::new()),
audio_callback: Mutex::new(None),
audio_process_abort_handle: Mutex::new(None)
});
AUDIO_CLIENT_INSTANCES.lock().unwrap().insert(client_id, instance.clone());
instance
}
pub fn destroy(&self) {
AUDIO_CLIENT_INSTANCES.lock().unwrap().remove(&self.client_id);
self.abort_audio_processing();
}
pub fn client_id(&self) -> AudioClientId {
self.client_id
}
pub fn enqueue_audio_packet(&self, packet: Box<AudioPacket>) -> Result<(), EnqueueError> {
self.packet_queue.lock().unwrap().enqueue_packet(packet)?;
Ok(())
}
pub fn set_audio_callback(&self, callback: Option<Box<dyn AudioCallback>>) {
*self.audio_callback.lock().unwrap() = callback;
}
pub fn abort_audio_processing(&self) {
let mut handle = &mut *self.audio_process_abort_handle.lock().unwrap();
if let Some(ref abort_handle) = handle {
abort_handle.abort()
}
*handle = None;
}
pub fn is_audio_processing(&self) -> bool {
self.audio_process_abort_handle.lock().unwrap().is_some()
}
pub fn dispatch_processing_in_this_thread(client: Arc<AudioClient>) {
let client_copy = client.clone();
let (future, abort_handle) = futures::future::abortable(async move {
loop {
let client = client_copy.clone();
let packet_event = poll_fn(|cx| client.packet_queue.lock().unwrap().poll_event(cx)).await;
let client = client_copy.clone();
match packet_event {
AudioPacketQueueEvent::PacketsLost(_reason, _first_packet, count) => {
//debug!("{:?} packets got lost due to {:?} (first packet id: {:?})", count, reason, first_packet);
if let Err(error) = client.decoder.lock().unwrap().decode_lost(count.into()) {
error!("Failed to execute decode lost packet: {:?}", error);
};
}
AudioPacketQueueEvent::AudioPacket(packet) => {
if packet.is_stop() {
if let Some(ref mut callback) = *client.audio_callback.lock().unwrap() {
callback.handle_stop();
}
} else {
let mut callback = client.audio_callback.lock().unwrap();
if callback.is_none() {
break;
}
let mut callback = callback.as_mut().unwrap();
let callback_buffer = callback.callback_buffer();
let decode_result = client.decoder.lock().unwrap().decode(&*packet, callback_buffer);
if let Ok(decoded) = decode_result {
callback.handle_audio(decoded.0, decoded.1);
} else {
warn!("Failed to decode audio packet: {:?}", decode_result.unwrap_err());
}
}
}
}
}
});
*client.audio_process_abort_handle.lock().unwrap() = Some(abort_handle);
spawn_local(async { let _ = future.await; });
}
}
impl Drop for AudioClient {
fn drop(&mut self) {
self.abort_audio_processing();
debug!("Audio client destroyed");
}
}
unsafe impl Sync for AudioClient {}
unsafe impl Send for AudioClient {}

147
web/audio-lib/src/lib.rs Normal file
View File

@ -0,0 +1,147 @@
#![feature(c_variadic)]
extern crate wasm_bindgen;
#[cfg(target_arch = "wasm32")]
extern crate console_error_panic_hook;
mod audio;
mod audio_client;
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::{ spawn_local };
use js_sys;
use wasm_timer;
use std::time::Duration;
use log::*;
use audio::packet_queue::AudioPacketQueue;
use crate::audio::codec::opus;
use crate::audio_client::{AudioClientId, AudioClient, AudioCallback};
use crate::audio::{AudioPacket, Codec, PacketId};
use crate::audio::packet_queue::EnqueueError;
use crate::audio::converter::interleaved2sequenced;
use once_cell::unsync::Lazy;
use std::sync::Mutex;
#[cfg(not(target_arch = "wasm32"))]
extern crate simple_logger;
#[wasm_bindgen]
extern {
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
#[wasm_bindgen]
fn alert(s: &str);
}
/// If the initialization failed, optional result will contain the error.
#[wasm_bindgen]
pub fn initialize() -> Option<String> {
#[cfg(target_arch = "wasm32")]
console_log::init_with_level(Level::Trace);
#[cfg(target_arch = "wasm32")]
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
info!("Initializing audio lib with opus version: {}", opus::version());
None
}
#[wasm_bindgen]
pub fn audio_client_create() -> AudioClientId {
let client = AudioClient::new();
AudioClient::dispatch_processing_in_this_thread(client.clone());
client.client_id
}
/// Let the audio client say hi (mutable).
/// If an error occurs or the client isn't known an exception will be thrown.
#[wasm_bindgen]
pub fn audio_client_enqueue_buffer(client_id: AudioClientId, buffer: &[u8], packet_id: u16, codec: u8) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
let result = client.enqueue_audio_packet(Box::new(AudioPacket{
client_id: 0,
codec: Codec::from_u8(codec),
packet_id: PacketId{ packet_id },
payload: buffer.to_vec()
}));
if let Err(error) = result {
return Err(match error {
EnqueueError::PacketAlreadyExists => JsValue::from_str("packet already exists"),
EnqueueError::PacketSequenceMismatch(_) => JsValue::from_str("packet belongs to an invalid sequence"),
EnqueueError::PacketTooOld => JsValue::from_str("packet is too old"),
EnqueueError::EventQueueOverflow => JsValue::from_str("event queue overflow")
});
}
Ok(())
}
struct JsAudioCallback {
callback: js_sys::Function,
}
/* No locking needed, within the web client no multi threading is needed */
static mut AUDIO_SEQUENCED_BUFFER: Lazy<Vec<f32>> = Lazy::new(|| Vec::new());
static mut AUDIO_BUFFER: Lazy<Vec<f32>> = Lazy::new(|| Vec::new());
impl AudioCallback for JsAudioCallback {
fn callback_buffer(&mut self) -> &mut Vec<f32> {
unsafe { &mut *AUDIO_BUFFER }
}
fn handle_audio(&mut self, sample_count: usize, channel_count: u8) {
if channel_count > 1 {
let mut sequenced_buffer = unsafe { &mut *AUDIO_SEQUENCED_BUFFER };
sequenced_buffer.resize(sample_count * channel_count as usize, 0f32);
interleaved2sequenced(
unsafe { &mut *AUDIO_BUFFER }.as_slice(),
sequenced_buffer.as_mut_slice(),
sample_count as u32,
channel_count as u32
);
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::from(sequenced_buffer.as_ptr() as u32),
&JsValue::from(sample_count as u16),
&JsValue::from(channel_count)
);
} else {
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::from(unsafe { &mut *AUDIO_BUFFER }.as_ptr() as u32),
&JsValue::from(sample_count as u16),
&JsValue::from(channel_count)
);
}
}
fn handle_stop(&mut self) {
let _ = self.callback.call3(
&JsValue::undefined(),
&JsValue::undefined(),
&JsValue::from(0),
&JsValue::from(0)
);
}
}
#[wasm_bindgen]
pub fn audio_client_buffer_callback(client_id: AudioClientId, callback: js_sys::Function) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
client.set_audio_callback(Some(Box::new(JsAudioCallback{
callback
})));
Ok(())
}
#[wasm_bindgen]
pub fn audio_client_destroy(client_id: AudioClientId) -> Result<(), JsValue> {
let client = AudioClient::find_client(client_id).ok_or_else(|| JsValue::from_str("missing audio client"))?;
client.destroy();
debug!("Destroying client");
Ok(())
}

View File

@ -1,5 +1,7 @@
import * as path from "path";
import * as config_base from "./webpack.config";
// @ts-ignore
import * as WasmPackPlugin from "@wasm-tool/wasm-pack-plugin";
export = () => config_base.config("web").then(config => {
Object.assign(config.entry, {
@ -16,5 +18,10 @@ export = () => config_base.config("web").then(config => {
config.node = config.node || {};
config.node["fs"] = "empty";
config.plugins.push(new (WasmPackPlugin as any)({
crateDirectory: path.resolve(__dirname, "web", "audio-lib"),
outName: "index",
//forceMode: "profiling",
outDir: "pkg" }));
return Promise.resolve(config);
});

View File

@ -214,15 +214,6 @@ export const config = async (target: "web" | "client"): Promise<Configuration> =
"./webpack/WatLoader.js"
]
},
{
test: /\.wasm$/,
type: 'javascript/auto',
loader: 'file-loader',
options: {
/* the public path will already be set by emscripten base path */
publicPath: './'
}
},
{
test: /\.svg$/,
loader: 'svg-inline-loader'
@ -234,10 +225,10 @@ export const config = async (target: "web" | "client"): Promise<Configuration> =
esModule: false
}
}
],
},
]
} as any,
resolve: {
extensions: ['.tsx', '.ts', '.js', ".scss", ".css"],
extensions: ['.tsx', '.ts', '.js', ".scss", ".css", ".wasm"],
alias: {
"vendor/xbbcode": path.resolve(__dirname, "vendor/xbbcode/src")
},
@ -249,7 +240,7 @@ export const config = async (target: "web" | "client"): Promise<Configuration> =
filename: isDevelopment ? "[name].[contenthash].js" : "[contenthash].js",
chunkFilename: isDevelopment ? "[name].[contenthash].js" : "[contenthash].js",
path: path.resolve(__dirname, 'dist'),
publicPath: "js/"
publicPath: "/js/"
},
performance: {
hints: false

View File

@ -27,11 +27,13 @@ class ManifestGenerator {
if(!chunk.files.length)
continue;
/*
if(chunk.files.length !== 1) {
console.error("Expected only one file per chunk but got " + chunk.files.length);
chunk.files.forEach(e => console.log(" - %s", e));
throw "expected only one file per chunk";
}
*/
for(const file of chunk.files) {
const extension = path.extname(file);
@ -45,6 +47,8 @@ class ManifestGenerator {
hash: chunk.hash,
file: file
});
} else if(extension === ".wasm") {
/* do nothing */
} else {
throw "Unknown chunk file with extension " + extension;
}