Added safari support

canary
WolverinDEV 2018-09-26 15:04:56 +02:00
parent 46e5bb4121
commit 90840cf08b
9 changed files with 63 additions and 8 deletions

View File

@ -1,4 +1,7 @@
# Changelog:
* **26.09.18**:
- Added Safari support
* **25.09.18**:
- Added support for token use
- Added support for away messages

View File

@ -492,8 +492,6 @@ class AvatarManager {
else
img.attr("src", "data:image/png;base64," + avatar.base64);
console.debug("Avatar " + client.clientNickName() + " loaded :)");
console.log(avatar.base64);
console.log(avatar.url);
img.css("opacity", 0);
tag.append(img);

View File

@ -18,6 +18,7 @@ class AVGCalculator {
}
}
declare class webkitOfflineAudioContext extends OfflineAudioContext {}
abstract class BasicCodec implements Codec {
protected _audioContext: OfflineAudioContext;
protected _decodeResampler: AudioResampler;
@ -32,7 +33,7 @@ abstract class BasicCodec implements Codec {
constructor(codecSampleRate: number) {
this.channelCount = 1;
this.samplesPerUnit = 960;
this._audioContext = new OfflineAudioContext(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
this._audioContext = new (webkitOfflineAudioContext || OfflineAudioContext)(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
this._codecSampleRate = codecSampleRate;
this._decodeResampler = new AudioResampler(AudioController.globalContext.sampleRate);
this._encodeResampler = new AudioResampler(codecSampleRate);

View File

@ -97,6 +97,8 @@ app.loadedListener.push(() => {
$(document).one('click', event => AudioController.initializeFromGesture());
}
} catch (ex) {
if(ex instanceof ReferenceError)
ex = ex.message + ":<br>" + ex.stack;
displayCriticalError("Failed to invoke main function:<br>" + ex, false);
}
});

View File

@ -211,6 +211,24 @@ class ClientEntry {
}
},
MenuEntry.HR(),
/*
{
type: MenuEntryType.ENTRY,
icon: "client-kick_server",
name: "Add group to client",
invalidPermission: true, //!this.channelTree.client.permissions.neededPermission(PermissionType.I_CLIENT_BAN_MAX_BANTIME).granted(1),
callback: () => {
Modals.spawnBanClient(this.properties.client_nickname, (duration, reason) => {
this.channelTree.client.serverConnection.sendCommand("banclient", {
uid: this.properties.client_unique_identifier,
banreason: reason,
time: duration
});
});
}
},
MenuEntry.HR(),
*/
{
type: MenuEntryType.ENTRY,
icon: "client-volume",

View File

@ -11,6 +11,8 @@ interface Navigator {
webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
}
declare class webkitAudioContext extends AudioContext {}
class AudioController {
private static getUserMediaFunction() {
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
@ -30,7 +32,7 @@ class AudioController {
if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext;
if(!this._globalContext)
this._globalContext = new AudioContext();
this._globalContext = new (webkitAudioContext || AudioContext)();
if(this._globalContext.state == "suspended") {
if(!this._globalContextPromise) {
(this._globalContextPromise = this._globalContext.resume()).then(() => {

View File

@ -1,5 +1,6 @@
class AudioResampler {
targetSampleRate: number;
private _use_promise: boolean;
constructor(targetSampleRate: number = 44100){
this.targetSampleRate = targetSampleRate;
@ -7,18 +8,37 @@ class AudioResampler {
}
resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
if(!buffer) {
console.warn("Received empty buffer as input! Returning empty output!");
return new Promise<AudioBuffer>(resolve => resolve(undefined));
}
//console.log("Encode from %i to %i", buffer.sampleRate, this.targetSampleRate);
if(buffer.sampleRate == this.targetSampleRate)
return new Promise<AudioBuffer>(resolve => resolve(buffer));
let context;
context = new OfflineAudioContext(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
context = new (webkitOfflineAudioContext || OfflineAudioContext)(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
let source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(0);
source.connect(context.destination);
return context.startRendering();
if(typeof(this._use_promise) === "undefined") {
this._use_promise = navigator.browserSpecs.name != 'Safari';
}
if(this._use_promise)
return context.startRendering();
else {
return new Promise<AudioBuffer>((resolve, reject) => {
context.oncomplete = event => resolve(event.renderedBuffer);
try {
context.startRendering();
} catch (ex) {
reject(ex);
}
})
}
}
}

View File

@ -163,7 +163,7 @@ class VoiceConnection {
}
native_encoding_supported() : boolean {
if(!AudioContext.prototype.createMediaStreamDestination) return false; //Required, but not available within edge
if(!(webkitAudioContext || AudioContext).prototype.createMediaStreamDestination) return false; //Required, but not available within edge
return true;
}

View File

@ -23,6 +23,17 @@ interface MediaStreamConstraints {
groupId?: string;
}
if(!AudioBuffer.prototype.copyToChannel) { //Webkit does not implement this function
AudioBuffer.prototype.copyToChannel = function (source: Float32Array, channelNumber: number, startInChannel?: number) {
if(!startInChannel) startInChannel = 0;
let destination = this.getChannelData(channelNumber);
for(let index = 0; index < source.length; index++)
if(destination.length < index + startInChannel)
destination[index + startInChannel] = source[index];
}
}
class VoiceRecorder {
private static readonly CHANNEL = 0;
private static readonly CHANNELS = 1;