diff --git a/shared/js/connection/VideoConnection.ts b/shared/js/connection/VideoConnection.ts index bcda07e1..a3d82423 100644 --- a/shared/js/connection/VideoConnection.ts +++ b/shared/js/connection/VideoConnection.ts @@ -77,7 +77,7 @@ export type LocalVideoBroadcastState = { state: "broadcasting" } -export interface BroadcastConstraints { +export interface VideoBroadcastConfig { /** * Ideal and max video width */ @@ -94,9 +94,16 @@ export interface BroadcastConstraints { dynamicQuality: boolean, /** - * Max bandwidth which should be used (in bits/second) + * Max bandwidth which should be used (in bits/second). + * `0` indicates no bandwidth limit. */ - maxBandwidth: number, + maxBandwidth: number | 0, + + /** + * Interval of enforcing keyframes. + * Zero means that no keyframes will be enforced. + */ + keyframeInterval: number | 0, /** * Maximal frame rate for the video. @@ -124,16 +131,14 @@ export interface LocalVideoBroadcast { * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param constraints */ - startBroadcasting(source: VideoSource, constraints: BroadcastConstraints) : Promise; + startBroadcasting(source: VideoSource, constraints: VideoBroadcastConfig) : Promise; /** * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param constraints */ - changeSource(source: VideoSource, constraints: BroadcastConstraints) : Promise; - - getConstraints() : BroadcastConstraints | undefined; - applyConstraints(constraints: BroadcastConstraints) : Promise; + changeSource(source: VideoSource, constraints: VideoBroadcastConfig) : Promise; + getConstraints() : VideoBroadcastConfig | undefined; stopBroadcasting(); } diff --git a/shared/js/connection/rtc/Connection.ts b/shared/js/connection/rtc/Connection.ts index 92c0cc09..eb524239 100644 --- a/shared/js/connection/rtc/Connection.ts +++ b/shared/js/connection/rtc/Connection.ts @@ -11,6 +11,7 @@ import {SdpCompressor, SdpProcessor} from "./SdpUtils"; import {ErrorCode} from "tc-shared/connection/ErrorCode"; import {WhisperTarget} from "tc-shared/voice/VoiceWhisper"; import {globalAudioContext} from "tc-backend/audio/player"; +import {VideoBroadcastConfig, VideoBroadcastType} from "tc-shared/connection/VideoConnection"; const kSdpCompressionMode = 1; @@ -665,37 +666,90 @@ export class RTCConnection { return oldTrack; } - /** - * @param type - * @throws a string on error - */ - public async startTrackBroadcast(type: RTCBroadcastableTrackType) : Promise { - if(typeof this.currentTransceiver[type] !== "object") { - throw tr("missing transceiver"); - } - + public async startVideoBroadcast(type: VideoBroadcastType, config: VideoBroadcastConfig) { + let track: RTCBroadcastableTrackType; + let broadcastType: number; switch (type) { - case "audio": - if(!this.audioSupport) { - throw tr("audio support isn't enabled"); - } + case "camera": + broadcastType = 0; + track = "video"; break; - case "video": - case "video-screen": + case "screen": + broadcastType = 1; + track = "video-screen"; break; default: - throw tr("invalid broadcast type"); + throw tr("invalid video broadcast type"); } + let payload = {}; + payload["broadcast_keyframe_interval"] = config.keyframeInterval; + payload["broadcast_bitrate_max"] = config.maxBandwidth; + payload["ssrc"] = this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver[track].mid); + payload["type"] = broadcastType; + try { - await this.connection.send_command("rtcbroadcast", { - type: broadcastableTrackTypeToNumber(type), - ssrc: this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver[type].mid) + await this.connection.send_command("broadcastvideo", payload); + } catch (error) { + if(error instanceof CommandResult) { + if(error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) { + throw tr("failed on permission") + " " + this.connection.client.permissions.getFailedPermission(error); + } + + error = error.formattedMessage(); + } + logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), type, error); + throw tr("failed to signal broadcast start"); + } + } + + public async changeVideoBroadcastConfig(type: VideoBroadcastType, config: VideoBroadcastConfig) { + let track: RTCBroadcastableTrackType; + let broadcastType: number; + switch (type) { + case "camera": + broadcastType = 0; + track = "video"; + break; + + case "screen": + broadcastType = 1; + track = "video-screen"; + break; + + default: + throw tr("invalid video broadcast type"); + } + + let payload = {}; + payload["broadcast_keyframe_interval"] = config.keyframeInterval; + payload["broadcast_bitrate_max"] = config.maxBandwidth; + payload["bt"] = broadcastType; + + try { + await this.connection.send_command("broadcastvideoconfigure", payload); + } catch (error) { + if(error instanceof CommandResult) { + if(error.id === ErrorCode.SERVER_INSUFFICIENT_PERMISSIONS) { + throw tr("failed on permission") + " " + this.connection.client.permissions.getFailedPermission(error); + } + + error = error.formattedMessage(); + } + logError(LogCategory.WEBRTC, tr("failed to update %s broadcast: %o"), type, error); + throw tr("failed to update broadcast config"); + } + } + + public async startAudioBroadcast() { + try { + await this.connection.send_command("broadcastaudio", { + ssrc: this.sdpProcessor.getLocalSsrcFromFromMediaId(this.currentTransceiver["audio"].mid) }); } catch (error) { - logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), type, error); + logError(LogCategory.WEBRTC, tr("failed to start %s broadcast: %o"), "audio", error); throw tr("failed to signal broadcast start"); } } @@ -727,10 +781,30 @@ export class RTCConnection { } public stopTrackBroadcast(type: RTCBroadcastableTrackType) { - this.connection.send_command("rtcbroadcast", { - type: broadcastableTrackTypeToNumber(type), - ssrc: 0 - }).catch(error => { + let promise: Promise; + switch (type) { + case "audio": + promise = this.connection.send_command("broadcastaudio", { + ssrc: 0 + }); + break; + + case "video-screen": + promise = this.connection.send_command("broadcastvideo", { + type: 1, + ssrc: 0 + }); + break; + + case "video": + promise = this.connection.send_command("broadcastvideo", { + type: 0, + ssrc: 0 + }); + break; + } + + promise.catch(error => { logWarn(LogCategory.WEBRTC, tr("Failed to signal track broadcast stop: %o"), error); }); } diff --git a/shared/js/connection/rtc/video/Connection.ts b/shared/js/connection/rtc/video/Connection.ts index 7ee05c5c..de1b38a1 100644 --- a/shared/js/connection/rtc/video/Connection.ts +++ b/shared/js/connection/rtc/video/Connection.ts @@ -1,5 +1,5 @@ import { - BroadcastConstraints, + VideoBroadcastConfig, LocalVideoBroadcast, LocalVideoBroadcastEvents, LocalVideoBroadcastState, @@ -28,7 +28,8 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { private state: LocalVideoBroadcastState; private currentSource: VideoSource; - private currentConstrints: BroadcastConstraints; + private currentConfig: VideoBroadcastConfig; + private signaledConfig: VideoBroadcastConfig | undefined; private broadcastStartId: number; private localStartPromise: Promise; @@ -72,7 +73,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { return Promise.resolve(undefined); } - async changeSource(source: VideoSource, constraints: BroadcastConstraints): Promise { + async changeSource(source: VideoSource, constraints: VideoBroadcastConfig): Promise { let sourceRef = source.ref(); try { if(this.currentSource !== source) { @@ -91,7 +92,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } /* Apply the constraints to the current source */ - await this.doApplyConstraints(constraints, source); + await this.doApplyLocalConstraints(constraints, source); const startId = ++this.broadcastStartId; let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; @@ -108,7 +109,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } this.setCurrentSource(sourceRef); - } else if(!_.isEqual(this.currentConstrints, constraints)) { + } else if(!_.isEqual(this.currentConfig, constraints)) { console.error("Constraints changed"); await this.applyConstraints(constraints); } @@ -120,12 +121,12 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { private setCurrentSource(source: VideoSource | undefined) { if(this.currentSource) { this.currentSource.deref(); - this.currentConstrints = undefined; + this.currentConfig = undefined; } this.currentSource = source?.ref(); } - async startBroadcasting(source: VideoSource, constraints: BroadcastConstraints): Promise { + async startBroadcasting(source: VideoSource, constraints: VideoBroadcastConfig): Promise { const sourceRef = source.ref(); while(this.localStartPromise) { await this.localStartPromise; @@ -141,7 +142,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } } - private async doStartBroadcast(source: VideoSource, constraints: BroadcastConstraints) { + private async doStartBroadcast(source: VideoSource, constraints: VideoBroadcastConfig) { const videoTracks = source.getStream().getVideoTracks(); if(videoTracks.length === 0) { throw tr("missing video stream track"); @@ -157,7 +158,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } try { - await this.applyConstraints(constraints); + await this.doApplyLocalConstraints(constraints, this.currentSource); } catch (error) { if(this.broadcastStartId !== startId) { /* broadcast start has been canceled */ @@ -194,7 +195,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } try { - await this.handle.getRTCConnection().startTrackBroadcast(rtcBroadcastType); + await this.handle.getRTCConnection().startVideoBroadcast(this.type, this.currentConfig); } catch (error) { if(this.broadcastStartId !== startId) { /* broadcast start has been canceled */ @@ -210,14 +211,27 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { return; } + this.signaledConfig = Object.assign({}, this.currentConfig); this.setState({ state: "broadcasting" }); } - async applyConstraints(constraints: BroadcastConstraints): Promise { - await this.doApplyConstraints(constraints, this.currentSource); + async applyConstraints(constraints: VideoBroadcastConfig): Promise { + await this.doApplyLocalConstraints(constraints, this.currentSource); + + if(this.signaledConfig?.keyframeInterval !== constraints.keyframeInterval || + this.signaledConfig?.maxBandwidth !== constraints.maxBandwidth + ) { + try { + await this.handle.getRTCConnection().changeVideoBroadcastConfig(this.type, constraints); + this.signaledConfig = Object.assign({}, constraints); + } catch (error) { + /* Really rethrow it? */ + throw error; + } + } } - private async doApplyConstraints(constraints: BroadcastConstraints, source: VideoSource): Promise { + private async doApplyLocalConstraints(constraints: VideoBroadcastConfig, source: VideoSource): Promise { const capabilities = source.getCapabilities(); const videoConstraints: MediaTrackConstraints = {}; @@ -249,9 +263,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints); - this.currentConstrints = constraints; - - /* TODO: Bandwidth update? */ + this.currentConfig = constraints; } stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) { @@ -296,11 +308,10 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } this.setState({ state: "initializing" }); - let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; const startId = ++this.broadcastStartId; try { - await this.handle.getRTCConnection().startTrackBroadcast(rtcBroadcastType); + await this.handle.getRTCConnection().startVideoBroadcast(this.type, this.currentConfig); } catch (error) { if(this.broadcastStartId !== startId) { /* broadcast start has been canceled */ @@ -313,8 +324,8 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { })(); } - getConstraints(): BroadcastConstraints | undefined { - return this.currentConstrints; + getConstraints(): VideoBroadcastConfig | undefined { + return this.currentConfig; } } diff --git a/shared/js/events/ClientGlobalControlHandler.ts b/shared/js/events/ClientGlobalControlHandler.ts index 00ba4ed6..5b4fca50 100644 --- a/shared/js/events/ClientGlobalControlHandler.ts +++ b/shared/js/events/ClientGlobalControlHandler.ts @@ -193,14 +193,14 @@ export function initialize(event_registry: Registry) } spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice }) - .then(async ({ source, constraints }) => { + .then(async ({ source, config }) => { if(!source) { return; } try { const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType); if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") { console.error("Change source"); - broadcast.changeSource(source, constraints).catch(error => { + broadcast.changeSource(source, config).catch(error => { logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error); if(typeof error !== "string") { error = tr("lookup the console for detail"); @@ -214,7 +214,7 @@ export function initialize(event_registry: Registry) }); } else { console.error("Start broadcast"); - broadcast.startBroadcasting(source, constraints).catch(error => { + broadcast.startBroadcasting(source, config).catch(error => { logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error); if(typeof error !== "string") { error = tr("lookup the console for detail"); @@ -252,7 +252,7 @@ export function initialize(event_registry: Registry) } spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) }) - .then(async ({ source, constraints }) => { + .then(async ({ source, config }) => { if (!source) { return; } @@ -262,7 +262,7 @@ export function initialize(event_registry: Registry) return; } - await broadcast.changeSource(source, constraints); + await broadcast.changeSource(source, config); }).catch(error => { logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error); createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open(); diff --git a/shared/js/settings.ts b/shared/js/settings.ts index 88ee2d44..88c0cebd 100644 --- a/shared/js/settings.ts +++ b/shared/js/settings.ts @@ -555,6 +555,20 @@ export class Settings extends StaticSettings { valueType: "number", }; + static readonly KEY_VIDEO_DYNAMIC_QUALITY: ValuedSettingsKey = { + key: 'video_dynamic_quality', + defaultValue: true, + description: "Dynamically decrease video quality in order to archive a higher framerate.", + valueType: "boolean", + }; + + static readonly KEY_VIDEO_DYNAMIC_FRAME_RATE: ValuedSettingsKey = { + key: 'video_dynamic_frame_rate', + defaultValue: true, + description: "Dynamically decrease video framerate to allow higher video resolutions.", + valueType: "boolean", + }; + static readonly FN_LOG_ENABLED: (category: string) => SettingsKey = category => { return { key: "log." + category.toLowerCase() + ".enabled", diff --git a/shared/js/tree/Server.ts b/shared/js/tree/Server.ts index 6b606ad7..f028e3bf 100644 --- a/shared/js/tree/Server.ts +++ b/shared/js/tree/Server.ts @@ -204,12 +204,10 @@ export class ServerEntry extends ChannelTreeEntry { createServerModal(this, properties => { log.info(LogCategory.SERVER, tr("Changing server properties %o"), properties); console.log(tr("Changed properties: %o"), properties); - if (properties) { - if(Object.keys(properties)) { - return this.channelTree.client.serverConnection.send_command("serveredit", properties).then(() => { - this.channelTree.client.sound.play(Sound.SERVER_EDITED_SELF); - }); - } + if (Object.keys(properties || {}).length > 0) { + return this.channelTree.client.serverConnection.send_command("serveredit", properties).then(() => { + this.channelTree.client.sound.play(Sound.SERVER_EDITED_SELF); + }); } return Promise.resolve(); }); diff --git a/shared/js/ui/modal/video-source/Controller.tsx b/shared/js/ui/modal/video-source/Controller.tsx index 73cc920a..a91fe365 100644 --- a/shared/js/ui/modal/video-source/Controller.tsx +++ b/shared/js/ui/modal/video-source/Controller.tsx @@ -4,7 +4,7 @@ import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definition import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer"; import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource"; import {LogCategory, logError, logWarn} from "tc-shared/log"; -import {BroadcastConstraints, VideoBroadcastType} from "tc-shared/connection/VideoConnection"; +import {VideoBroadcastConfig, VideoBroadcastType} from "tc-shared/connection/VideoConnection"; import {Settings, settings} from "tc-shared/settings"; import {tr} from "tc-shared/i18n/localize"; @@ -19,14 +19,14 @@ export type VideoSourceModalAction = { } | { mode: "edit", source: VideoSource, - broadcastConstraints: BroadcastConstraints + broadcastConstraints: VideoBroadcastConfig }; /** * @param type The video type which should be prompted * @param mode */ -export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, constraints: BroadcastConstraints | undefined }> { +export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, config: VideoBroadcastConfig | undefined }> { const controller = new VideoSourceController(type); let defaultSelectDevice: string | true; @@ -41,7 +41,7 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode controller.destroy(); return { source: resultSource, - constraints: resultConstraints + config: resultConstraints }; } else { /* Select failed. We'll open the modal and show the error. */ @@ -91,11 +91,11 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode controller.destroy(); return { source: resultSource, - constraints: resultConstraints + config: resultConstraints }; } -function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: BroadcastConstraints) { +function updateBroadcastConfigFromSource(source: VideoSource, constraints: VideoBroadcastConfig) { const videoTrack = source.getStream().getVideoTracks()[0]; const trackSettings = videoTrack.getSettings(); @@ -104,7 +104,7 @@ function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: constraints.maxFrameRate = trackSettings.frameRate; } -async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise { +async function generateAndApplyDefaultConfig(source: VideoSource) : Promise { const videoTrack = source.getStream().getVideoTracks()[0]; let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT); @@ -116,7 +116,12 @@ async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise maxHeight = Math.min(maxHeight, capabilities.maxHeight); maxWidth = Math.min(maxWidth, capabilities.maxWidth); - const broadcastConstraints: BroadcastConstraints = {} as any; + /* FIXME: Get these values somewhere else! */ + const broadcastConstraints: VideoBroadcastConfig = { + maxBandwidth: 1_600_000, + keyframeInterval: 0 + } as VideoBroadcastConfig; + { let ratio = 1; @@ -137,23 +142,22 @@ async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise } } - broadcastConstraints.dynamicQuality = true; - broadcastConstraints.dynamicFrameRate = true; - broadcastConstraints.maxBandwidth = 10_000_000; + broadcastConstraints.dynamicQuality = settings.static_global(Settings.KEY_VIDEO_DYNAMIC_QUALITY); + broadcastConstraints.dynamicFrameRate = settings.static_global(Settings.KEY_VIDEO_DYNAMIC_FRAME_RATE); try { - await applyBroadcastConstraints(source, broadcastConstraints); + await applyBroadcastConfig(source, broadcastConstraints); } catch (error) { - logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast constraints: %o"), error); + logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast config: %o"), error); } - updateBroadcastConstraintsFromSource(source, broadcastConstraints); + updateBroadcastConfigFromSource(source, broadcastConstraints); return broadcastConstraints; } /* May throws an overconstraint error */ -async function applyBroadcastConstraints(source: VideoSource, constraints: BroadcastConstraints) { +async function applyBroadcastConfig(source: VideoSource, constraints: VideoBroadcastConfig) { const videoTrack = source.getStream().getVideoTracks()[0]; if(!videoTrack) { return; } @@ -183,7 +187,7 @@ class VideoSourceController { private readonly type: VideoBroadcastType; private currentSource: VideoSource | string; - private currentConstraints: BroadcastConstraints; + private currentConstraints: VideoBroadcastConfig; /* preselected current source id */ private currentSourceId: string; @@ -204,6 +208,8 @@ class VideoSourceController { this.events.on("query_start_button", () => this.notifyStartButton()); this.events.on("query_setting_dimension", () => this.notifySettingDimension()); this.events.on("query_setting_framerate", () => this.notifySettingFramerate()); + this.events.on("query_setting_bitrate_max", () => this.notifySettingBitrate()); + this.events.on("query_setting_keyframe_sender", () => this.notifySettingKeyframeInterval()); this.events.on("action_request_permissions", () => { getVideoDriver().requestPermissions().then(result => { @@ -289,6 +295,14 @@ class VideoSourceController { this.events.on("action_setting_framerate", event => { this.currentConstraints.maxFrameRate = event.frameRate; }); + + this.events.on("action_setting_bitrate_max", event => { + this.currentConstraints.maxBandwidth = event.bitrate; + }); + + this.events.on("action_setting_keyframe_sender", event => { + this.currentConstraints.keyframeInterval = event.interval; + }); } destroy() { @@ -310,7 +324,7 @@ class VideoSourceController { if(this.currentConstraints) { try { /* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */ - await applyBroadcastConstraints(source, this.currentConstraints); + await applyBroadcastConfig(source, this.currentConstraints); } catch (error) { logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error); this.currentConstraints = undefined; @@ -318,7 +332,7 @@ class VideoSourceController { } if(!this.currentConstraints) { - this.currentConstraints = await generateAndApplyDefaultConstraints(source); + this.currentConstraints = await generateAndApplyDefaultConfig(source); } } @@ -328,9 +342,11 @@ class VideoSourceController { this.notifyCurrentSource(); this.notifySettingDimension(); this.notifySettingFramerate(); + this.notifySettingBitrate(); + this.notifySettingKeyframeInterval(); } - async useSettings(source: VideoSource, constraints: BroadcastConstraints) { + async useSettings(source: VideoSource, constraints: VideoBroadcastConfig) { if(typeof this.currentSource === "object") { this.currentSource.deref(); } @@ -342,6 +358,8 @@ class VideoSourceController { this.notifyCurrentSource(); this.notifySettingDimension(); this.notifySettingFramerate(); + this.notifySettingBitrate(); + this.notifySettingKeyframeInterval(); } async selectSource(sourceId: string) : Promise { @@ -387,7 +405,7 @@ class VideoSourceController { return typeof this.currentSource === "object" ? this.currentSource : undefined; } - getBroadcastConstraints() : BroadcastConstraints { + getBroadcastConstraints() : VideoBroadcastConfig { return this.currentConstraints; } @@ -528,4 +546,23 @@ class VideoSourceController { this.events.fire_react("notify_settings_framerate", { frameRate: undefined }); } }; + + private notifySettingBitrate() { + if(this.currentConstraints) { + this.events.fire_react("notify_setting_bitrate_max", { + bitrate: { + allowedBitrate: 0, + bitrate: this.currentConstraints.maxBandwidth + } + }); + } else { + this.events.fire_react("notify_setting_bitrate_max", undefined); + } + } + + private notifySettingKeyframeInterval() { + this.events.fire_react("notify_settings_keyframe_sender", { + interval: this.currentConstraints?.keyframeInterval || 0 + }); + } } \ No newline at end of file diff --git a/shared/js/ui/modal/video-source/Definitions.ts b/shared/js/ui/modal/video-source/Definitions.ts index 2b8b7bd1..6c346b5f 100644 --- a/shared/js/ui/modal/video-source/Definitions.ts +++ b/shared/js/ui/modal/video-source/Definitions.ts @@ -51,6 +51,11 @@ export type SettingFrameRate = { current: number }; +export type SettingBitrate = { + allowedBitrate: number | -1 + bitrate: number | 0, +}; + export interface ModalVideoSourceEvents { action_cancel: {}, action_start: {}, @@ -58,6 +63,8 @@ export interface ModalVideoSourceEvents { action_select_source: { id: string | undefined }, action_setting_dimension: { width: number, height: number }, action_setting_framerate: { frameRate: number }, + action_setting_bitrate_max: { bitrate: number | 0 }, + action_setting_keyframe_sender: { interval: number | 0 }, action_toggle_screen_capture_device_select: { shown: boolean }, action_preselect_screen_capture_device: { deviceId: string }, @@ -67,7 +74,9 @@ export interface ModalVideoSourceEvents { query_start_button: {}, query_setting_dimension: {}, query_setting_framerate: {}, - query_screen_capture_devices: { } + query_setting_bitrate_max: {}, + query_setting_keyframe_sender: {}, + query_screen_capture_devices: {} notify_source: { state: VideoSourceState } notify_device_list: { status: DeviceListResult }, @@ -91,7 +100,13 @@ export interface ModalVideoSourceEvents { }, notify_screen_capture_devices: { devices: ScreenCaptureDeviceList - } + }, + notify_setting_bitrate_max: { + bitrate: SettingBitrate | undefined + }, + notify_settings_keyframe_sender: { + interval: number | 0 + }, notify_destroy: {} } diff --git a/shared/js/ui/modal/video-source/Renderer.scss b/shared/js/ui/modal/video-source/Renderer.scss index c323934a..5be589c9 100644 --- a/shared/js/ui/modal/video-source/Renderer.scss +++ b/shared/js/ui/modal/video-source/Renderer.scss @@ -1,6 +1,8 @@ @import "../../../../css/static/mixin"; @import "../../../../css/static/properties"; +//#96903a + .container { display: flex; flex-direction: column; diff --git a/shared/js/ui/modal/video-source/Renderer.tsx b/shared/js/ui/modal/video-source/Renderer.tsx index 518c1d36..c07c40aa 100644 --- a/shared/js/ui/modal/video-source/Renderer.tsx +++ b/shared/js/ui/modal/video-source/Renderer.tsx @@ -2,12 +2,12 @@ import {Registry} from "tc-shared/events"; import * as React from "react"; import { DeviceListResult, - ModalVideoSourceEvents, ScreenCaptureDeviceList, SettingFrameRate, + ModalVideoSourceEvents, ScreenCaptureDeviceList, SettingBitrate, SettingFrameRate, VideoPreviewStatus, VideoSourceState } from "tc-shared/ui/modal/video-source/Definitions"; import {InternalModal} from "tc-shared/ui/react-elements/internal-modal/Controller"; import {Translatable, VariadicTranslatable} from "tc-shared/ui/react-elements/i18n"; -import {Select} from "tc-shared/ui/react-elements/InputField"; +import {BoxedInputField, Select} from "tc-shared/ui/react-elements/InputField"; import {Button} from "tc-shared/ui/react-elements/Button"; import {useContext, useEffect, useRef, useState} from "react"; import {VideoBroadcastType} from "tc-shared/connection/VideoConnection"; @@ -16,6 +16,7 @@ import {Checkbox} from "tc-shared/ui/react-elements/Checkbox"; import {Tab, TabEntry} from "tc-shared/ui/react-elements/Tab"; import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots"; import {ScreenCaptureDevice} from "tc-shared/video/VideoSource"; +import {useTr} from "tc-shared/ui/react-elements/Helper"; const cssStyle = require("./Renderer.scss"); const ModalEvents = React.createContext>(undefined); @@ -540,6 +541,49 @@ const SettingFramerate = () => { ); } +const SettingBps = () => { + const events = useContext(ModalEvents); + + const [ bitrate, setBitrate ] = useState(() => { + events.fire("query_setting_bitrate_max"); + return undefined; + }); + events.reactUse("notify_setting_bitrate_max", event => { + setBitrate(event.bitrate); + setCurrentValue(undefined); + }); + + const [ currentValue, setCurrentValue ] = useState(undefined); + + const advanced = useContext(AdvancedSettings); + if(!advanced) { + return null; + } + + return ( +
+
+
Bitrate
+
{bitrate ? (bitrate.bitrate / 1000).toFixed() + " kbps" : ""}
+
+
+ { + const numValue = (parseInt(value) * 1000) || 0; + bitrate.bitrate = numValue; + events.fire("action_setting_bitrate_max", { bitrate: numValue }); + setCurrentValue(undefined); + }} + onInput={value => setCurrentValue(value)} + type={"number"} + /> +
+
+ ); +} + const calculateBps = (width: number, height: number, frameRate: number) => { /* Based on the tables showed here: http://www.lighterra.com/papers/videoencodingh264/ */ const estimatedBitsPerPixed = 3.9; @@ -609,6 +653,7 @@ const Settings = React.memo(() => {
+
diff --git a/web/app/voice/Connection.ts b/web/app/voice/Connection.ts index 280c51a3..c09a7887 100644 --- a/web/app/voice/Connection.ts +++ b/web/app/voice/Connection.ts @@ -73,7 +73,7 @@ export class RtpVoiceConnection extends AbstractVoiceConnection { const localClientId = this.rtcConnection.getConnection().client.getClientId(); for(const data of event.arguments) { if(parseInt(data["clid"]) === localClientId) { - this.rtcConnection.startTrackBroadcast("audio").catch(error => { + this.rtcConnection.startAudioBroadcast().catch(error => { logError(LogCategory.VOICE, tr("Failed to start voice audio broadcasting after channel switch: %o"), error); this.localFailedReason = tr("Failed to start audio broadcasting"); this.setConnectionState(VoiceConnectionStatus.Failed); @@ -422,7 +422,7 @@ export class RtpVoiceConnection extends AbstractVoiceConnection { private handleRtcConnectionStateChanged(event: RTCConnectionEvents["notify_state_changed"]) { switch (event.newState) { case RTPConnectionState.CONNECTED: - this.rtcConnection.startTrackBroadcast("audio").then(() => { + this.rtcConnection.startAudioBroadcast().then(() => { logTrace(LogCategory.VOICE, tr("Local audio broadcasting has been started successfully")); this.setConnectionState(VoiceConnectionStatus.Connected); }).catch(error => {