Sloppy allowing the user to controll it's max bandwidth used

This commit is contained in:
WolverinDEV 2021-01-04 21:28:47 +01:00
parent b162668090
commit 2a120987cf
11 changed files with 290 additions and 89 deletions

View file

@ -77,7 +77,7 @@ export type LocalVideoBroadcastState = {
state: "broadcasting" state: "broadcasting"
} }
export interface BroadcastConstraints { export interface VideoBroadcastConfig {
/** /**
* Ideal and max video width * Ideal and max video width
*/ */
@ -94,9 +94,16 @@ export interface BroadcastConstraints {
dynamicQuality: boolean, dynamicQuality: boolean,
/** /**
* Max bandwidth which should be used (in bits/second) * Max bandwidth which should be used (in bits/second).
* `0` indicates no bandwidth limit.
*/ */
maxBandwidth: number, maxBandwidth: number | 0,
/**
* Interval of enforcing keyframes.
* Zero means that no keyframes will be enforced.
*/
keyframeInterval: number | 0,
/** /**
* Maximal frame rate for the video. * Maximal frame rate for the video.
@ -124,16 +131,14 @@ export interface LocalVideoBroadcast {
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints * @param constraints
*/ */
startBroadcasting(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>; startBroadcasting(source: VideoSource, constraints: VideoBroadcastConfig) : Promise<void>;
/** /**
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints * @param constraints
*/ */
changeSource(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>; changeSource(source: VideoSource, constraints: VideoBroadcastConfig) : Promise<void>;
getConstraints() : VideoBroadcastConfig | undefined;
getConstraints() : BroadcastConstraints | undefined;
applyConstraints(constraints: BroadcastConstraints) : Promise<void>;
stopBroadcasting(); stopBroadcasting();
} }

View file

@ -11,6 +11,7 @@ import {SdpCompressor, SdpProcessor} from "./SdpUtils";
import {ErrorCode} from "tc-shared/connection/ErrorCode"; import {ErrorCode} from "tc-shared/connection/ErrorCode";
import {WhisperTarget} from "tc-shared/voice/VoiceWhisper"; import {WhisperTarget} from "tc-shared/voice/VoiceWhisper";
import {globalAudioContext} from "tc-backend/audio/player"; import {globalAudioContext} from "tc-backend/audio/player";
import {VideoBroadcastConfig, VideoBroadcastType} from "tc-shared/connection/VideoConnection";
const kSdpCompressionMode = 1; const kSdpCompressionMode = 1;
@ -665,37 +666,90 @@ export class RTCConnection {
return oldTrack; return oldTrack;
} }
/** public async startVideoBroadcast(type: VideoBroadcastType, config: VideoBroadcastConfig) {
* @param type let track: RTCBroadcastableTrackType;
* @throws a string on error let broadcastType: number;
*/
public async startTrackBroadcast(type: RTCBroadcastableTrackType) : Promise<void> {
if(typeof this.currentTransceiver[type] !== "object") {
throw tr("missing transceiver");
}
switch (type) { switch (type) {
case "audio": case "camera":
if(!this.audioSupport) { broadcastType = 0;
throw tr("audio support isn't enabled"); track = "video";
}
break; break;
case "video": case "screen":
case "video-screen": broadcastType = 1;
track = "video-screen";
break; break;
default: default:
throw tr("invalid broadcast type"); throw tr("invalid video broadcast type");
} }
let payload = {};
payload["broadcast_keyframe_interval"] = config.keyframeInterval;
payload["broadcast_bitrate_max"] = config.maxBandwidth;
payload["ssrc"] = this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver[track].mid);
payload["type"] = broadcastType;
try { try {
await this.connection.send_command("rtcbroadcast", { await this.connection.send_command("broadcastvideo", payload);
type: broadcastableTrackTypeToNumber(type), } catch (error) {
ssrc: this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver[type].mid) if(error instanceof CommandResult) {
if(error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) {
throw tr("failed on permission") + " " + this.connection.client.permissions.getFailedPermission(error);
}
error = error.formattedMessage();
}
logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), type, error);
throw tr("failed to signal broadcast start");
}
}
public async changeVideoBroadcastConfig(type: VideoBroadcastType, config: VideoBroadcastConfig) {
let track: RTCBroadcastableTrackType;
let broadcastType: number;
switch (type) {
case "camera":
broadcastType = 0;
track = "video";
break;
case "screen":
broadcastType = 1;
track = "video-screen";
break;
default:
throw tr("invalid video broadcast type");
}
let payload = {};
payload["broadcast_keyframe_interval"] = config.keyframeInterval;
payload["broadcast_bitrate_max"] = config.maxBandwidth;
payload["bt"] = broadcastType;
try {
await this.connection.send_command("broadcastvideoconfigure", payload);
} catch (error) {
if(error instanceof CommandResult) {
if(error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) {
throw tr("failed on permission") + " " + this.connection.client.permissions.getFailedPermission(error);
}
error = error.formattedMessage();
}
logError(LogCategory.WEBRTC, tr("failed to update %s broadcast: %o"), type, error);
throw tr("failed to update broadcast config");
}
}
public async startAudioBroadcast() {
try {
await this.connection.send_command("broadcastaudio", {
ssrc: this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver["audio"].mid)
}); });
} catch (error) { } catch (error) {
logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), type, error); logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), "audio", error);
throw tr("failed to signal broadcast start"); throw tr("failed to signal broadcast start");
} }
} }
@ -727,10 +781,30 @@ export class RTCConnection {
} }
public stopTrackBroadcast(type: RTCBroadcastableTrackType) { public stopTrackBroadcast(type: RTCBroadcastableTrackType) {
this.connection.send_command("rtcbroadcast", { let promise: Promise<any>;
type: broadcastableTrackTypeToNumber(type), switch (type) {
case "audio":
promise = this.connection.send_command("broadcastaudio", {
ssrc: 0 ssrc: 0
}).catch(error => { });
break;
case "video-screen":
promise = this.connection.send_command("broadcastvideo", {
type: 1,
ssrc: 0
});
break;
case "video":
promise = this.connection.send_command("broadcastvideo", {
type: 0,
ssrc: 0
});
break;
}
promise.catch(error => {
logWarn(LogCategory.WEBRTC, tr("Failed to signal track broadcast stop: %o"), error); logWarn(LogCategory.WEBRTC, tr("Failed to signal track broadcast stop: %o"), error);
}); });
} }

View file

@ -1,5 +1,5 @@
import { import {
BroadcastConstraints, VideoBroadcastConfig,
LocalVideoBroadcast, LocalVideoBroadcast,
LocalVideoBroadcastEvents, LocalVideoBroadcastEvents,
LocalVideoBroadcastState, LocalVideoBroadcastState,
@ -28,7 +28,8 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
private state: LocalVideoBroadcastState; private state: LocalVideoBroadcastState;
private currentSource: VideoSource; private currentSource: VideoSource;
private currentConstrints: BroadcastConstraints; private currentConfig: VideoBroadcastConfig;
private signaledConfig: VideoBroadcastConfig | undefined;
private broadcastStartId: number; private broadcastStartId: number;
private localStartPromise: Promise<void>; private localStartPromise: Promise<void>;
@ -72,7 +73,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return Promise.resolve(undefined); return Promise.resolve(undefined);
} }
async changeSource(source: VideoSource, constraints: BroadcastConstraints): Promise<void> { async changeSource(source: VideoSource, constraints: VideoBroadcastConfig): Promise<void> {
let sourceRef = source.ref(); let sourceRef = source.ref();
try { try {
if(this.currentSource !== source) { if(this.currentSource !== source) {
@ -91,7 +92,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
/* Apply the constraints to the current source */ /* Apply the constraints to the current source */
await this.doApplyConstraints(constraints, source); await this.doApplyLocalConstraints(constraints, source);
const startId = ++this.broadcastStartId; const startId = ++this.broadcastStartId;
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
@ -108,7 +109,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
this.setCurrentSource(sourceRef); this.setCurrentSource(sourceRef);
} else if(!_.isEqual(this.currentConstrints, constraints)) { } else if(!_.isEqual(this.currentConfig, constraints)) {
console.error("Constraints changed"); console.error("Constraints changed");
await this.applyConstraints(constraints); await this.applyConstraints(constraints);
} }
@ -120,12 +121,12 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
private setCurrentSource(source: VideoSource | undefined) { private setCurrentSource(source: VideoSource | undefined) {
if(this.currentSource) { if(this.currentSource) {
this.currentSource.deref(); this.currentSource.deref();
this.currentConstrints = undefined; this.currentConfig = undefined;
} }
this.currentSource = source?.ref(); this.currentSource = source?.ref();
} }
async startBroadcasting(source: VideoSource, constraints: BroadcastConstraints): Promise<void> { async startBroadcasting(source: VideoSource, constraints: VideoBroadcastConfig): Promise<void> {
const sourceRef = source.ref(); const sourceRef = source.ref();
while(this.localStartPromise) { while(this.localStartPromise) {
await this.localStartPromise; await this.localStartPromise;
@ -141,7 +142,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
} }
private async doStartBroadcast(source: VideoSource, constraints: BroadcastConstraints) { private async doStartBroadcast(source: VideoSource, constraints: VideoBroadcastConfig) {
const videoTracks = source.getStream().getVideoTracks(); const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) { if(videoTracks.length === 0) {
throw tr("missing video stream track"); throw tr("missing video stream track");
@ -157,7 +158,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
try { try {
await this.applyConstraints(constraints); await this.doApplyLocalConstraints(constraints, this.currentSource);
} catch (error) { } catch (error) {
if(this.broadcastStartId !== startId) { if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */ /* broadcast start has been canceled */
@ -194,7 +195,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
try { try {
await this.handle.getRTCConnection().startTrackBroadcast(rtcBroadcastType); await this.handle.getRTCConnection().startVideoBroadcast(this.type, this.currentConfig);
} catch (error) { } catch (error) {
if(this.broadcastStartId !== startId) { if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */ /* broadcast start has been canceled */
@ -210,14 +211,27 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return; return;
} }
this.signaledConfig = Object.assign({}, this.currentConfig);
this.setState({ state: "broadcasting" }); this.setState({ state: "broadcasting" });
} }
async applyConstraints(constraints: BroadcastConstraints): Promise<void> { async applyConstraints(constraints: VideoBroadcastConfig): Promise<void> {
await this.doApplyConstraints(constraints, this.currentSource); await this.doApplyLocalConstraints(constraints, this.currentSource);
if(this.signaledConfig?.keyframeInterval !== constraints.keyframeInterval ||
this.signaledConfig?.maxBandwidth !== constraints.maxBandwidth
) {
try {
await this.handle.getRTCConnection().changeVideoBroadcastConfig(this.type, constraints);
this.signaledConfig = Object.assign({}, constraints);
} catch (error) {
/* Really rethrow it? */
throw error;
}
}
} }
private async doApplyConstraints(constraints: BroadcastConstraints, source: VideoSource): Promise<void> { private async doApplyLocalConstraints(constraints: VideoBroadcastConfig, source: VideoSource): Promise<void> {
const capabilities = source.getCapabilities(); const capabilities = source.getCapabilities();
const videoConstraints: MediaTrackConstraints = {}; const videoConstraints: MediaTrackConstraints = {};
@ -249,9 +263,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints); await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints);
this.currentConstrints = constraints; this.currentConfig = constraints;
/* TODO: Bandwidth update? */
} }
stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) { stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) {
@ -296,11 +308,10 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
this.setState({ state: "initializing" }); this.setState({ state: "initializing" });
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
const startId = ++this.broadcastStartId; const startId = ++this.broadcastStartId;
try { try {
await this.handle.getRTCConnection().startTrackBroadcast(rtcBroadcastType); await this.handle.getRTCConnection().startVideoBroadcast(this.type, this.currentConfig);
} catch (error) { } catch (error) {
if(this.broadcastStartId !== startId) { if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */ /* broadcast start has been canceled */
@ -313,8 +324,8 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
})(); })();
} }
getConstraints(): BroadcastConstraints | undefined { getConstraints(): VideoBroadcastConfig | undefined {
return this.currentConstrints; return this.currentConfig;
} }
} }

View file

@ -193,14 +193,14 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
} }
spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice }) spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice })
.then(async ({ source, constraints }) => { .then(async ({ source, config }) => {
if(!source) { return; } if(!source) { return; }
try { try {
const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType); const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType);
if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") { if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") {
console.error("Change source"); console.error("Change source");
broadcast.changeSource(source, constraints).catch(error => { broadcast.changeSource(source, config).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error); logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error);
if(typeof error !== "string") { if(typeof error !== "string") {
error = tr("lookup the console for detail"); error = tr("lookup the console for detail");
@ -214,7 +214,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
}); });
} else { } else {
console.error("Start broadcast"); console.error("Start broadcast");
broadcast.startBroadcasting(source, constraints).catch(error => { broadcast.startBroadcasting(source, config).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error); logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error);
if(typeof error !== "string") { if(typeof error !== "string") {
error = tr("lookup the console for detail"); error = tr("lookup the console for detail");
@ -252,7 +252,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
} }
spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) }) spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) })
.then(async ({ source, constraints }) => { .then(async ({ source, config }) => {
if (!source) { if (!source) {
return; return;
} }
@ -262,7 +262,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
return; return;
} }
await broadcast.changeSource(source, constraints); await broadcast.changeSource(source, config);
}).catch(error => { }).catch(error => {
logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error); logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error);
createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open(); createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open();

View file

@ -555,6 +555,20 @@ export class Settings extends StaticSettings {
valueType: "number", valueType: "number",
}; };
static readonly KEY_VIDEO_DYNAMIC_QUALITY: ValuedSettingsKey<boolean> = {
key: 'video_dynamic_quality',
defaultValue: true,
description: "Dynamically decrease video quality in order to archive a higher framerate.",
valueType: "boolean",
};
static readonly KEY_VIDEO_DYNAMIC_FRAME_RATE: ValuedSettingsKey<boolean> = {
key: 'video_dynamic_frame_rate',
defaultValue: true,
description: "Dynamically decrease video framerate to allow higher video resolutions.",
valueType: "boolean",
};
static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => { static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => {
return { return {
key: "log." + category.toLowerCase() + ".enabled", key: "log." + category.toLowerCase() + ".enabled",

View file

@ -204,13 +204,11 @@ export class ServerEntry extends ChannelTreeEntry<ServerEvents> {
createServerModal(this, properties => { createServerModal(this, properties => {
log.info(LogCategory.SERVER, tr("Changing server properties %o"), properties); log.info(LogCategory.SERVER, tr("Changing server properties %o"), properties);
console.log(tr("Changed properties: %o"), properties); console.log(tr("Changed properties: %o"), properties);
if (properties) { if (Object.keys(properties || {}).length > 0) {
if(Object.keys(properties)) {
return this.channelTree.client.serverConnection.send_command("serveredit", properties).then(() => { return this.channelTree.client.serverConnection.send_command("serveredit", properties).then(() => {
this.channelTree.client.sound.play(Sound.SERVER_EDITED_SELF); this.channelTree.client.sound.play(Sound.SERVER_EDITED_SELF);
}); });
} }
}
return Promise.resolve(); return Promise.resolve();
}); });
} }

View file

@ -4,7 +4,7 @@ import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definition
import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer"; import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer";
import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource"; import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource";
import {LogCategory, logError, logWarn} from "tc-shared/log"; import {LogCategory, logError, logWarn} from "tc-shared/log";
import {BroadcastConstraints, VideoBroadcastType} from "tc-shared/connection/VideoConnection"; import {VideoBroadcastConfig, VideoBroadcastType} from "tc-shared/connection/VideoConnection";
import {Settings, settings} from "tc-shared/settings"; import {Settings, settings} from "tc-shared/settings";
import {tr} from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
@ -19,14 +19,14 @@ export type VideoSourceModalAction = {
} | { } | {
mode: "edit", mode: "edit",
source: VideoSource, source: VideoSource,
broadcastConstraints: BroadcastConstraints broadcastConstraints: VideoBroadcastConfig
}; };
/** /**
* @param type The video type which should be prompted * @param type The video type which should be prompted
* @param mode * @param mode
*/ */
export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, constraints: BroadcastConstraints | undefined }> { export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, config: VideoBroadcastConfig | undefined }> {
const controller = new VideoSourceController(type); const controller = new VideoSourceController(type);
let defaultSelectDevice: string | true; let defaultSelectDevice: string | true;
@ -41,7 +41,7 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode
controller.destroy(); controller.destroy();
return { return {
source: resultSource, source: resultSource,
constraints: resultConstraints config: resultConstraints
}; };
} else { } else {
/* Select failed. We'll open the modal and show the error. */ /* Select failed. We'll open the modal and show the error. */
@ -91,11 +91,11 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode
controller.destroy(); controller.destroy();
return { return {
source: resultSource, source: resultSource,
constraints: resultConstraints config: resultConstraints
}; };
} }
function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: BroadcastConstraints) { function updateBroadcastConfigFromSource(source: VideoSource, constraints: VideoBroadcastConfig) {
const videoTrack = source.getStream().getVideoTracks()[0]; const videoTrack = source.getStream().getVideoTracks()[0];
const trackSettings = videoTrack.getSettings(); const trackSettings = videoTrack.getSettings();
@ -104,7 +104,7 @@ function updateBroadcastConstraintsFromSource(source: VideoSource, constraints:
constraints.maxFrameRate = trackSettings.frameRate; constraints.maxFrameRate = trackSettings.frameRate;
} }
async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise<BroadcastConstraints> { async function generateAndApplyDefaultConfig(source: VideoSource) : Promise<VideoBroadcastConfig> {
const videoTrack = source.getStream().getVideoTracks()[0]; const videoTrack = source.getStream().getVideoTracks()[0];
let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT); let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT);
@ -116,7 +116,12 @@ async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise
maxHeight = Math.min(maxHeight, capabilities.maxHeight); maxHeight = Math.min(maxHeight, capabilities.maxHeight);
maxWidth = Math.min(maxWidth, capabilities.maxWidth); maxWidth = Math.min(maxWidth, capabilities.maxWidth);
const broadcastConstraints: BroadcastConstraints = {} as any; /* FIXME: Get these values somewhere else! */
const broadcastConstraints: VideoBroadcastConfig = {
maxBandwidth: 1_600_000,
keyframeInterval: 0
} as VideoBroadcastConfig;
{ {
let ratio = 1; let ratio = 1;
@ -137,23 +142,22 @@ async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise
} }
} }
broadcastConstraints.dynamicQuality = true; broadcastConstraints.dynamicQuality = settings.static_global(Settings.KEY_VIDEO_DYNAMIC_QUALITY);
broadcastConstraints.dynamicFrameRate = true; broadcastConstraints.dynamicFrameRate = settings.static_global(Settings.KEY_VIDEO_DYNAMIC_FRAME_RATE);
broadcastConstraints.maxBandwidth = 10_000_000;
try { try {
await applyBroadcastConstraints(source, broadcastConstraints); await applyBroadcastConfig(source, broadcastConstraints);
} catch (error) { } catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast constraints: %o"), error); logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast config: %o"), error);
} }
updateBroadcastConstraintsFromSource(source, broadcastConstraints); updateBroadcastConfigFromSource(source, broadcastConstraints);
return broadcastConstraints; return broadcastConstraints;
} }
/* May throws an overconstraint error */ /* May throws an overconstraint error */
async function applyBroadcastConstraints(source: VideoSource, constraints: BroadcastConstraints) { async function applyBroadcastConfig(source: VideoSource, constraints: VideoBroadcastConfig) {
const videoTrack = source.getStream().getVideoTracks()[0]; const videoTrack = source.getStream().getVideoTracks()[0];
if(!videoTrack) { return; } if(!videoTrack) { return; }
@ -183,7 +187,7 @@ class VideoSourceController {
private readonly type: VideoBroadcastType; private readonly type: VideoBroadcastType;
private currentSource: VideoSource | string; private currentSource: VideoSource | string;
private currentConstraints: BroadcastConstraints; private currentConstraints: VideoBroadcastConfig;
/* preselected current source id */ /* preselected current source id */
private currentSourceId: string; private currentSourceId: string;
@ -204,6 +208,8 @@ class VideoSourceController {
this.events.on("query_start_button", () => this.notifyStartButton()); this.events.on("query_start_button", () => this.notifyStartButton());
this.events.on("query_setting_dimension", () => this.notifySettingDimension()); this.events.on("query_setting_dimension", () => this.notifySettingDimension());
this.events.on("query_setting_framerate", () => this.notifySettingFramerate()); this.events.on("query_setting_framerate", () => this.notifySettingFramerate());
this.events.on("query_setting_bitrate_max", () => this.notifySettingBitrate());
this.events.on("query_setting_keyframe_sender", () => this.notifySettingKeyframeInterval());
this.events.on("action_request_permissions", () => { this.events.on("action_request_permissions", () => {
getVideoDriver().requestPermissions().then(result => { getVideoDriver().requestPermissions().then(result => {
@ -289,6 +295,14 @@ class VideoSourceController {
this.events.on("action_setting_framerate", event => { this.events.on("action_setting_framerate", event => {
this.currentConstraints.maxFrameRate = event.frameRate; this.currentConstraints.maxFrameRate = event.frameRate;
}); });
this.events.on("action_setting_bitrate_max", event => {
this.currentConstraints.maxBandwidth = event.bitrate;
});
this.events.on("action_setting_keyframe_sender", event => {
this.currentConstraints.keyframeInterval = event.interval;
});
} }
destroy() { destroy() {
@ -310,7 +324,7 @@ class VideoSourceController {
if(this.currentConstraints) { if(this.currentConstraints) {
try { try {
/* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */ /* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */
await applyBroadcastConstraints(source, this.currentConstraints); await applyBroadcastConfig(source, this.currentConstraints);
} catch (error) { } catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error); logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error);
this.currentConstraints = undefined; this.currentConstraints = undefined;
@ -318,7 +332,7 @@ class VideoSourceController {
} }
if(!this.currentConstraints) { if(!this.currentConstraints) {
this.currentConstraints = await generateAndApplyDefaultConstraints(source); this.currentConstraints = await generateAndApplyDefaultConfig(source);
} }
} }
@ -328,9 +342,11 @@ class VideoSourceController {
this.notifyCurrentSource(); this.notifyCurrentSource();
this.notifySettingDimension(); this.notifySettingDimension();
this.notifySettingFramerate(); this.notifySettingFramerate();
this.notifySettingBitrate();
this.notifySettingKeyframeInterval();
} }
async useSettings(source: VideoSource, constraints: BroadcastConstraints) { async useSettings(source: VideoSource, constraints: VideoBroadcastConfig) {
if(typeof this.currentSource === "object") { if(typeof this.currentSource === "object") {
this.currentSource.deref(); this.currentSource.deref();
} }
@ -342,6 +358,8 @@ class VideoSourceController {
this.notifyCurrentSource(); this.notifyCurrentSource();
this.notifySettingDimension(); this.notifySettingDimension();
this.notifySettingFramerate(); this.notifySettingFramerate();
this.notifySettingBitrate();
this.notifySettingKeyframeInterval();
} }
async selectSource(sourceId: string) : Promise<boolean> { async selectSource(sourceId: string) : Promise<boolean> {
@ -387,7 +405,7 @@ class VideoSourceController {
return typeof this.currentSource === "object" ? this.currentSource : undefined; return typeof this.currentSource === "object" ? this.currentSource : undefined;
} }
getBroadcastConstraints() : BroadcastConstraints { getBroadcastConstraints() : VideoBroadcastConfig {
return this.currentConstraints; return this.currentConstraints;
} }
@ -528,4 +546,23 @@ class VideoSourceController {
this.events.fire_react("notify_settings_framerate", { frameRate: undefined }); this.events.fire_react("notify_settings_framerate", { frameRate: undefined });
} }
}; };
private notifySettingBitrate() {
if(this.currentConstraints) {
this.events.fire_react("notify_setting_bitrate_max", {
bitrate: {
allowedBitrate: 0,
bitrate: this.currentConstraints.maxBandwidth
}
});
} else {
this.events.fire_react("notify_setting_bitrate_max", undefined);
}
}
private notifySettingKeyframeInterval() {
this.events.fire_react("notify_settings_keyframe_sender", {
interval: this.currentConstraints?.keyframeInterval || 0
});
}
} }

View file

@ -51,6 +51,11 @@ export type SettingFrameRate = {
current: number current: number
}; };
export type SettingBitrate = {
allowedBitrate: number | -1
bitrate: number | 0,
};
export interface ModalVideoSourceEvents { export interface ModalVideoSourceEvents {
action_cancel: {}, action_cancel: {},
action_start: {}, action_start: {},
@ -58,6 +63,8 @@ export interface ModalVideoSourceEvents {
action_select_source: { id: string | undefined }, action_select_source: { id: string | undefined },
action_setting_dimension: { width: number, height: number }, action_setting_dimension: { width: number, height: number },
action_setting_framerate: { frameRate: number }, action_setting_framerate: { frameRate: number },
action_setting_bitrate_max: { bitrate: number | 0 },
action_setting_keyframe_sender: { interval: number | 0 },
action_toggle_screen_capture_device_select: { shown: boolean }, action_toggle_screen_capture_device_select: { shown: boolean },
action_preselect_screen_capture_device: { deviceId: string }, action_preselect_screen_capture_device: { deviceId: string },
@ -67,6 +74,8 @@ export interface ModalVideoSourceEvents {
query_start_button: {}, query_start_button: {},
query_setting_dimension: {}, query_setting_dimension: {},
query_setting_framerate: {}, query_setting_framerate: {},
query_setting_bitrate_max: {},
query_setting_keyframe_sender: {},
query_screen_capture_devices: {} query_screen_capture_devices: {}
notify_source: { state: VideoSourceState } notify_source: { state: VideoSourceState }
@ -91,7 +100,13 @@ export interface ModalVideoSourceEvents {
}, },
notify_screen_capture_devices: { notify_screen_capture_devices: {
devices: ScreenCaptureDeviceList devices: ScreenCaptureDeviceList
} },
notify_setting_bitrate_max: {
bitrate: SettingBitrate | undefined
},
notify_settings_keyframe_sender: {
interval: number | 0
},
notify_destroy: {} notify_destroy: {}
} }

View file

@ -1,6 +1,8 @@
@import "../../../../css/static/mixin"; @import "../../../../css/static/mixin";
@import "../../../../css/static/properties"; @import "../../../../css/static/properties";
//#96903a
.container { .container {
display: flex; display: flex;
flex-direction: column; flex-direction: column;

View file

@ -2,12 +2,12 @@ import {Registry} from "tc-shared/events";
import * as React from "react"; import * as React from "react";
import { import {
DeviceListResult, DeviceListResult,
ModalVideoSourceEvents, ScreenCaptureDeviceList, SettingFrameRate, ModalVideoSourceEvents, ScreenCaptureDeviceList, SettingBitrate, SettingFrameRate,
VideoPreviewStatus, VideoSourceState VideoPreviewStatus, VideoSourceState
} from "tc-shared/ui/modal/video-source/Definitions"; } from "tc-shared/ui/modal/video-source/Definitions";
import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller"; import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller";
import {Translatable, VariadicTranslatable} from "tc-shared/ui/react-elements/i18n"; import {Translatable, VariadicTranslatable} from "tc-shared/ui/react-elements/i18n";
import {Select} from "tc-shared/ui/react-elements/InputField"; import {BoxedInputField, Select} from "tc-shared/ui/react-elements/InputField";
import {Button} from "tc-shared/ui/react-elements/Button"; import {Button} from "tc-shared/ui/react-elements/Button";
import {useContext, useEffect, useRef, useState} from "react"; import {useContext, useEffect, useRef, useState} from "react";
import {VideoBroadcastType} from "tc-shared/connection/VideoConnection"; import {VideoBroadcastType} from "tc-shared/connection/VideoConnection";
@ -16,6 +16,7 @@ import {Checkbox} from "tc-shared/ui/react-elements/Checkbox";
import {Tab, TabEntry} from "tc-shared/ui/react-elements/Tab"; import {Tab, TabEntry} from "tc-shared/ui/react-elements/Tab";
import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots"; import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
import {ScreenCaptureDevice} from "tc-shared/video/VideoSource"; import {ScreenCaptureDevice} from "tc-shared/video/VideoSource";
import {useTr} from "tc-shared/ui/react-elements/Helper";
const cssStyle = require("./Renderer.scss"); const cssStyle = require("./Renderer.scss");
const ModalEvents = React.createContext<Registry<ModalVideoSourceEvents>>(undefined); const ModalEvents = React.createContext<Registry<ModalVideoSourceEvents>>(undefined);
@ -540,6 +541,49 @@ const SettingFramerate = () => {
); );
} }
const SettingBps = () => {
const events = useContext(ModalEvents);
const [ bitrate, setBitrate ] = useState<SettingBitrate | undefined>(() => {
events.fire("query_setting_bitrate_max");
return undefined;
});
events.reactUse("notify_setting_bitrate_max", event => {
setBitrate(event.bitrate);
setCurrentValue(undefined);
});
const [ currentValue, setCurrentValue ] = useState<string>(undefined);
const advanced = useContext(AdvancedSettings);
if(!advanced) {
return null;
}
return (
<div className={cssStyle.setting + " " + cssStyle.dimensions}>
<div className={cssStyle.title}>
<div><Translatable>Bitrate</Translatable></div>
<div>{bitrate ? (bitrate.bitrate / 1000).toFixed() + " kbps" : ""}</div>
</div>
<div className={cssStyle.body}>
<BoxedInputField
value={bitrate ? typeof currentValue === "string" ? currentValue : (bitrate.bitrate / 1000).toFixed(0) : " "}
placeholder={tr("loading")}
onChange={value => {
const numValue = (parseInt(value) * 1000) || 0;
bitrate.bitrate = numValue;
events.fire("action_setting_bitrate_max", { bitrate: numValue });
setCurrentValue(undefined);
}}
onInput={value => setCurrentValue(value)}
type={"number"}
/>
</div>
</div>
);
}
const calculateBps = (width: number, height: number, frameRate: number) => { const calculateBps = (width: number, height: number, frameRate: number) => {
/* Based on the tables showed here: http://www.lighterra.com/papers/videoencodingh264/ */ /* Based on the tables showed here: http://www.lighterra.com/papers/videoencodingh264/ */
const estimatedBitsPerPixed = 3.9; const estimatedBitsPerPixed = 3.9;
@ -609,6 +653,7 @@ const Settings = React.memo(() => {
<div className={cssStyle.sectionBody}> <div className={cssStyle.sectionBody}>
<SettingDimension /> <SettingDimension />
<SettingFramerate /> <SettingFramerate />
<SettingBps />
<BpsInfo /> <BpsInfo />
</div> </div>
</div> </div>

View file

@ -73,7 +73,7 @@ export class RtpVoiceConnection extends AbstractVoiceConnection {
const localClientId = this.rtcConnection.getConnection().client.getClientId(); const localClientId = this.rtcConnection.getConnection().client.getClientId();
for(const data of event.arguments) { for(const data of event.arguments) {
if(parseInt(data["clid"]) === localClientId) { if(parseInt(data["clid"]) === localClientId) {
this.rtcConnection.startTrackBroadcast("audio").catch(error => { this.rtcConnection.startAudioBroadcast().catch(error => {
logError(LogCategory.VOICE, tr("Failed to start voice audio broadcasting after channel switch: %o"), error); logError(LogCategory.VOICE, tr("Failed to start voice audio broadcasting after channel switch: %o"), error);
this.localFailedReason = tr("Failed to start audio broadcasting"); this.localFailedReason = tr("Failed to start audio broadcasting");
this.setConnectionState(VoiceConnectionStatus.Failed); this.setConnectionState(VoiceConnectionStatus.Failed);
@ -422,7 +422,7 @@ export class RtpVoiceConnection extends AbstractVoiceConnection {
private handleRtcConnectionStateChanged(event: RTCConnectionEvents["notify_state_changed"]) { private handleRtcConnectionStateChanged(event: RTCConnectionEvents["notify_state_changed"]) {
switch (event.newState) { switch (event.newState) {
case RTPConnectionState.CONNECTED: case RTPConnectionState.CONNECTED:
this.rtcConnection.startTrackBroadcast("audio").then(() => { this.rtcConnection.startAudioBroadcast().then(() => {
logTrace(LogCategory.VOICE, tr("Local audio broadcasting has been started successfully")); logTrace(LogCategory.VOICE, tr("Local audio broadcasting has been started successfully"));
this.setConnectionState(VoiceConnectionStatus.Connected); this.setConnectionState(VoiceConnectionStatus.Connected);
}).catch(error => { }).catch(error => {