Added safari support
This commit is contained in:
parent
46e5bb4121
commit
90840cf08b
9 changed files with 63 additions and 8 deletions
|
@ -1,4 +1,7 @@
|
||||||
# Changelog:
|
# Changelog:
|
||||||
|
* **26.09.18**:
|
||||||
|
- Added Safari support
|
||||||
|
|
||||||
* **25.09.18**:
|
* **25.09.18**:
|
||||||
- Added support for token use
|
- Added support for token use
|
||||||
- Added support for away messages
|
- Added support for away messages
|
||||||
|
|
|
@ -492,8 +492,6 @@ class AvatarManager {
|
||||||
else
|
else
|
||||||
img.attr("src", "data:image/png;base64," + avatar.base64);
|
img.attr("src", "data:image/png;base64," + avatar.base64);
|
||||||
console.debug("Avatar " + client.clientNickName() + " loaded :)");
|
console.debug("Avatar " + client.clientNickName() + " loaded :)");
|
||||||
console.log(avatar.base64);
|
|
||||||
console.log(avatar.url);
|
|
||||||
|
|
||||||
img.css("opacity", 0);
|
img.css("opacity", 0);
|
||||||
tag.append(img);
|
tag.append(img);
|
||||||
|
|
|
@ -18,6 +18,7 @@ class AVGCalculator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
declare class webkitOfflineAudioContext extends OfflineAudioContext {}
|
||||||
abstract class BasicCodec implements Codec {
|
abstract class BasicCodec implements Codec {
|
||||||
protected _audioContext: OfflineAudioContext;
|
protected _audioContext: OfflineAudioContext;
|
||||||
protected _decodeResampler: AudioResampler;
|
protected _decodeResampler: AudioResampler;
|
||||||
|
@ -32,7 +33,7 @@ abstract class BasicCodec implements Codec {
|
||||||
constructor(codecSampleRate: number) {
|
constructor(codecSampleRate: number) {
|
||||||
this.channelCount = 1;
|
this.channelCount = 1;
|
||||||
this.samplesPerUnit = 960;
|
this.samplesPerUnit = 960;
|
||||||
this._audioContext = new OfflineAudioContext(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
|
this._audioContext = new (webkitOfflineAudioContext || OfflineAudioContext)(AudioController.globalContext.destination.channelCount, 1024,AudioController.globalContext.sampleRate );
|
||||||
this._codecSampleRate = codecSampleRate;
|
this._codecSampleRate = codecSampleRate;
|
||||||
this._decodeResampler = new AudioResampler(AudioController.globalContext.sampleRate);
|
this._decodeResampler = new AudioResampler(AudioController.globalContext.sampleRate);
|
||||||
this._encodeResampler = new AudioResampler(codecSampleRate);
|
this._encodeResampler = new AudioResampler(codecSampleRate);
|
||||||
|
|
|
@ -97,6 +97,8 @@ app.loadedListener.push(() => {
|
||||||
$(document).one('click', event => AudioController.initializeFromGesture());
|
$(document).one('click', event => AudioController.initializeFromGesture());
|
||||||
}
|
}
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
|
if(ex instanceof ReferenceError)
|
||||||
|
ex = ex.message + ":<br>" + ex.stack;
|
||||||
displayCriticalError("Failed to invoke main function:<br>" + ex, false);
|
displayCriticalError("Failed to invoke main function:<br>" + ex, false);
|
||||||
}
|
}
|
||||||
});
|
});
|
|
@ -211,6 +211,24 @@ class ClientEntry {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
MenuEntry.HR(),
|
MenuEntry.HR(),
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
type: MenuEntryType.ENTRY,
|
||||||
|
icon: "client-kick_server",
|
||||||
|
name: "Add group to client",
|
||||||
|
invalidPermission: true, //!this.channelTree.client.permissions.neededPermission(PermissionType.I_CLIENT_BAN_MAX_BANTIME).granted(1),
|
||||||
|
callback: () => {
|
||||||
|
Modals.spawnBanClient(this.properties.client_nickname, (duration, reason) => {
|
||||||
|
this.channelTree.client.serverConnection.sendCommand("banclient", {
|
||||||
|
uid: this.properties.client_unique_identifier,
|
||||||
|
banreason: reason,
|
||||||
|
time: duration
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
MenuEntry.HR(),
|
||||||
|
*/
|
||||||
{
|
{
|
||||||
type: MenuEntryType.ENTRY,
|
type: MenuEntryType.ENTRY,
|
||||||
icon: "client-volume",
|
icon: "client-volume",
|
||||||
|
|
|
@ -11,6 +11,8 @@ interface Navigator {
|
||||||
webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
|
webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
declare class webkitAudioContext extends AudioContext {}
|
||||||
|
|
||||||
class AudioController {
|
class AudioController {
|
||||||
private static getUserMediaFunction() {
|
private static getUserMediaFunction() {
|
||||||
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
|
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
|
||||||
|
@ -30,7 +32,7 @@ class AudioController {
|
||||||
if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext;
|
if(this._globalContext && this._globalContext.state != "suspended") return this._globalContext;
|
||||||
|
|
||||||
if(!this._globalContext)
|
if(!this._globalContext)
|
||||||
this._globalContext = new AudioContext();
|
this._globalContext = new (webkitAudioContext || AudioContext)();
|
||||||
if(this._globalContext.state == "suspended") {
|
if(this._globalContext.state == "suspended") {
|
||||||
if(!this._globalContextPromise) {
|
if(!this._globalContextPromise) {
|
||||||
(this._globalContextPromise = this._globalContext.resume()).then(() => {
|
(this._globalContextPromise = this._globalContext.resume()).then(() => {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
class AudioResampler {
|
class AudioResampler {
|
||||||
targetSampleRate: number;
|
targetSampleRate: number;
|
||||||
|
private _use_promise: boolean;
|
||||||
|
|
||||||
constructor(targetSampleRate: number = 44100){
|
constructor(targetSampleRate: number = 44100){
|
||||||
this.targetSampleRate = targetSampleRate;
|
this.targetSampleRate = targetSampleRate;
|
||||||
|
@ -7,18 +8,37 @@ class AudioResampler {
|
||||||
}
|
}
|
||||||
|
|
||||||
resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
|
resample(buffer: AudioBuffer) : Promise<AudioBuffer> {
|
||||||
|
if(!buffer) {
|
||||||
|
console.warn("Received empty buffer as input! Returning empty output!");
|
||||||
|
return new Promise<AudioBuffer>(resolve => resolve(undefined));
|
||||||
|
}
|
||||||
//console.log("Encode from %i to %i", buffer.sampleRate, this.targetSampleRate);
|
//console.log("Encode from %i to %i", buffer.sampleRate, this.targetSampleRate);
|
||||||
if(buffer.sampleRate == this.targetSampleRate)
|
if(buffer.sampleRate == this.targetSampleRate)
|
||||||
return new Promise<AudioBuffer>(resolve => resolve(buffer));
|
return new Promise<AudioBuffer>(resolve => resolve(buffer));
|
||||||
|
|
||||||
let context;
|
let context;
|
||||||
context = new OfflineAudioContext(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
|
context = new (webkitOfflineAudioContext || OfflineAudioContext)(buffer.numberOfChannels, Math.ceil(buffer.length * this.targetSampleRate / buffer.sampleRate), this.targetSampleRate);
|
||||||
|
|
||||||
let source = context.createBufferSource();
|
let source = context.createBufferSource();
|
||||||
source.buffer = buffer;
|
source.buffer = buffer;
|
||||||
source.connect(context.destination);
|
|
||||||
source.start(0);
|
source.start(0);
|
||||||
|
source.connect(context.destination);
|
||||||
|
|
||||||
return context.startRendering();
|
if(typeof(this._use_promise) === "undefined") {
|
||||||
|
this._use_promise = navigator.browserSpecs.name != 'Safari';
|
||||||
|
}
|
||||||
|
|
||||||
|
if(this._use_promise)
|
||||||
|
return context.startRendering();
|
||||||
|
else {
|
||||||
|
return new Promise<AudioBuffer>((resolve, reject) => {
|
||||||
|
context.oncomplete = event => resolve(event.renderedBuffer);
|
||||||
|
try {
|
||||||
|
context.startRendering();
|
||||||
|
} catch (ex) {
|
||||||
|
reject(ex);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -163,7 +163,7 @@ class VoiceConnection {
|
||||||
}
|
}
|
||||||
|
|
||||||
native_encoding_supported() : boolean {
|
native_encoding_supported() : boolean {
|
||||||
if(!AudioContext.prototype.createMediaStreamDestination) return false; //Required, but not available within edge
|
if(!(webkitAudioContext || AudioContext).prototype.createMediaStreamDestination) return false; //Required, but not available within edge
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,17 @@ interface MediaStreamConstraints {
|
||||||
groupId?: string;
|
groupId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(!AudioBuffer.prototype.copyToChannel) { //Webkit does not implement this function
|
||||||
|
AudioBuffer.prototype.copyToChannel = function (source: Float32Array, channelNumber: number, startInChannel?: number) {
|
||||||
|
if(!startInChannel) startInChannel = 0;
|
||||||
|
|
||||||
|
let destination = this.getChannelData(channelNumber);
|
||||||
|
for(let index = 0; index < source.length; index++)
|
||||||
|
if(destination.length < index + startInChannel)
|
||||||
|
destination[index + startInChannel] = source[index];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class VoiceRecorder {
|
class VoiceRecorder {
|
||||||
private static readonly CHANNEL = 0;
|
private static readonly CHANNEL = 0;
|
||||||
private static readonly CHANNELS = 1;
|
private static readonly CHANNELS = 1;
|
||||||
|
|
Loading…
Add table
Reference in a new issue