diff --git a/shared/js/connection/VideoConnection.ts b/shared/js/connection/VideoConnection.ts index 10a3dfa3..bcda07e1 100644 --- a/shared/js/connection/VideoConnection.ts +++ b/shared/js/connection/VideoConnection.ts @@ -1,6 +1,5 @@ import {VideoSource} from "tc-shared/video/VideoSource"; import {Registry} from "tc-shared/events"; -import {ConnectionStatus} from "tc-shared/ui/frames/footer/StatusDefinitions"; import {ConnectionStatistics} from "tc-shared/connection/ConnectionBase"; export type VideoBroadcastType = "camera" | "screen"; @@ -78,6 +77,39 @@ export type LocalVideoBroadcastState = { state: "broadcasting" } +export interface BroadcastConstraints { + /** + * Ideal and max video width + */ + width: number, + + /** + * Ideal and max video height + */ + height: number, + + /** + * Dynamically change the video quality related to bandwidth constraints. + */ + dynamicQuality: boolean, + + /** + * Max bandwidth which should be used (in bits/second) + */ + maxBandwidth: number, + + /** + * Maximal frame rate for the video. + * This might be ignored by some browsers. + */ + maxFrameRate: number, + + /** + * The maximal + */ + dynamicFrameRate: boolean +} + export interface LocalVideoBroadcast { getEvents() : Registry; @@ -90,13 +122,18 @@ export interface LocalVideoBroadcast { /** * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) + * @param constraints */ - startBroadcasting(source: VideoSource) : Promise; + startBroadcasting(source: VideoSource, constraints: BroadcastConstraints) : Promise; /** * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) + * @param constraints */ - changeSource(source: VideoSource) : Promise; + changeSource(source: VideoSource, constraints: BroadcastConstraints) : Promise; + + getConstraints() : BroadcastConstraints | undefined; + applyConstraints(constraints: BroadcastConstraints) : Promise; stopBroadcasting(); } diff --git a/shared/js/connection/rtc/Connection.ts b/shared/js/connection/rtc/Connection.ts index 7314067a..92c0cc09 100644 --- a/shared/js/connection/rtc/Connection.ts +++ b/shared/js/connection/rtc/Connection.ts @@ -222,7 +222,7 @@ class CommandHandler extends AbstractCommandHandler { }).then(() => this.handle["peer"].createAnswer()) .then(async answer => { if(RTCConnection.kEnableSdpTrace) { - const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", data.mode as string)); + const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", answer.type as string)); gr.collapsed(true); gr.log("%s", answer.sdp); gr.end(); @@ -235,7 +235,7 @@ class CommandHandler extends AbstractCommandHandler { .then(answer => { answer.sdp = SdpCompressor.compressSdp(answer.sdp, kSdpCompressionMode); if(RTCConnection.kEnableSdpTrace) { - const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", data.mode as string)); + const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", answer.type as string)); gr.collapsed(true); gr.log("%s", answer.sdp); gr.end(); @@ -810,7 +810,8 @@ export class RTCConnection { iceServers: [{ urls: ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302"] }] }); - const kAddGenericTransceiver = false; + /* If set to false FF failed: FIXME! */ + const kAddGenericTransceiver = true; if(this.audioSupport) { this.currentTransceiver["audio"] = this.peer.addTransceiver("audio"); @@ -880,19 +881,23 @@ export class RTCConnection { } await this.currentTransceiver[type].sender.replaceTrack(target); - if(target) { - console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection); - this.currentTransceiver[type].direction = "sendrecv"; - } else if(type === "video" || type === "video-screen") { - /* - * We don't need to stop & start the audio transceivers every time we're toggling the stream state. - * This would be a much overall cost than just keeping it going. - * - * The video streams instead are not toggling that much and since they split up the bandwidth between them, - * we've to shut them down if they're no needed. This not only allows the one stream to take full advantage - * of the bandwidth it also reduces resource usage. - */ - //this.currentTransceiver[type].direction = "recvonly"; + + /* Firefox has some crazy issues */ + if(window.detectedBrowser.name !== "firefox") { + if(target) { + console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection); + this.currentTransceiver[type].direction = "sendrecv"; + } else if(type === "video" || type === "video-screen") { + /* + * We don't need to stop & start the audio transceivers every time we're toggling the stream state. + * This would be a much overall cost than just keeping it going. + * + * The video streams instead are not toggling that much and since they split up the bandwidth between them, + * we've to shut them down if they're no needed. This not only allows the one stream to take full advantage + * of the bandwidth it also reduces resource usage. + */ + //this.currentTransceiver[type].direction = "recvonly"; + } } logTrace(LogCategory.WEBRTC, "Replaced track for %o (Fallback: %o)", type, target === fallback); } @@ -1108,8 +1113,9 @@ export class RTCConnection { logWarn(LogCategory.WEBRTC, tr("Received remote audio track %d but audio has been disabled. Dropping track."), ssrc); return; } + const track = new InternalRemoteRTPAudioTrack(ssrc, event.transceiver); - logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %d"), ssrc); + logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %o"), ssrc); if(tempInfo?.info !== undefined) { track.handleAssignment(tempInfo.info); this.events.fire("notify_audio_assignment_changed", { @@ -1123,7 +1129,7 @@ export class RTCConnection { this.remoteAudioTracks[ssrc] = track; } else if(event.track.kind === "video") { const track = new InternalRemoteRTPVideoTrack(ssrc, event.transceiver); - logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %d"), ssrc); + logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %o"), ssrc); if(tempInfo?.info !== undefined) { track.handleAssignment(tempInfo.info); this.events.fire("notify_video_assignment_changed", { diff --git a/shared/js/connection/rtc/video/Connection.ts b/shared/js/connection/rtc/video/Connection.ts index 9e1315a0..7ee05c5c 100644 --- a/shared/js/connection/rtc/video/Connection.ts +++ b/shared/js/connection/rtc/video/Connection.ts @@ -1,4 +1,5 @@ import { + BroadcastConstraints, LocalVideoBroadcast, LocalVideoBroadcastEvents, LocalVideoBroadcastState, @@ -27,6 +28,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { private state: LocalVideoBroadcastState; private currentSource: VideoSource; + private currentConstrints: BroadcastConstraints; private broadcastStartId: number; private localStartPromise: Promise; @@ -70,55 +72,66 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { return Promise.resolve(undefined); } - async changeSource(source: VideoSource): Promise { - const videoTracks = source.getStream().getVideoTracks(); - if(videoTracks.length === 0) { - throw tr("missing video stream track"); - } - + async changeSource(source: VideoSource, constraints: BroadcastConstraints): Promise { let sourceRef = source.ref(); - while(this.localStartPromise) { - await this.localStartPromise; - } - - if(this.state.state !== "broadcasting") { - sourceRef.deref(); - throw tr("not broadcasting anything"); - } - - const startId = ++this.broadcastStartId; - let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; try { - await this.handle.getRTCConnection().setTrackSource(rtcBroadcastType, videoTracks[0]); - } catch (error) { - if(this.broadcastStartId !== startId) { - /* broadcast start has been canceled */ - return; + if(this.currentSource !== source) { + console.error("Source changed"); + const videoTracks = source.getStream().getVideoTracks(); + if(videoTracks.length === 0) { + throw tr("missing video stream track"); + } + + while(this.localStartPromise) { + await this.localStartPromise; + } + + if(this.state.state !== "broadcasting") { + throw tr("not broadcasting anything"); + } + + /* Apply the constraints to the current source */ + await this.doApplyConstraints(constraints, source); + + const startId = ++this.broadcastStartId; + let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; + try { + await this.handle.getRTCConnection().setTrackSource(rtcBroadcastType, videoTracks[0]); + } catch (error) { + if(this.broadcastStartId !== startId) { + /* broadcast start has been canceled */ + return; + } + + logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error); + throw tr("failed to change video track"); + } + + this.setCurrentSource(sourceRef); + } else if(!_.isEqual(this.currentConstrints, constraints)) { + console.error("Constraints changed"); + await this.applyConstraints(constraints); } - + } finally { sourceRef.deref(); - logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error); - throw tr("failed to change video track"); } - - this.setCurrentSource(sourceRef); - sourceRef.deref(); } private setCurrentSource(source: VideoSource | undefined) { if(this.currentSource) { this.currentSource.deref(); + this.currentConstrints = undefined; } this.currentSource = source?.ref(); } - async startBroadcasting(source: VideoSource): Promise { + async startBroadcasting(source: VideoSource, constraints: BroadcastConstraints): Promise { const sourceRef = source.ref(); while(this.localStartPromise) { await this.localStartPromise; } - const promise = this.doStartBroadcast(source); + const promise = this.doStartBroadcast(source, constraints); this.localStartPromise = promise.catch(() => {}); this.localStartPromise.then(() => this.localStartPromise = undefined); try { @@ -128,7 +141,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } } - private async doStartBroadcast(source: VideoSource) { + private async doStartBroadcast(source: VideoSource, constraints: BroadcastConstraints) { const videoTracks = source.getStream().getVideoTracks(); if(videoTracks.length === 0) { throw tr("missing video stream track"); @@ -143,6 +156,23 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { return; } + try { + await this.applyConstraints(constraints); + } catch (error) { + if(this.broadcastStartId !== startId) { + /* broadcast start has been canceled */ + return; + } + + logError(LogCategory.WEBRTC, tr("Failed to apply video constraints for broadcast %s: %o"), this.type, error); + this.stopBroadcasting(true, { state: "failed", reason: tr("Failed to apply video constraints") }); + throw tr("Failed to apply video constraints"); + } + if(this.broadcastStartId !== startId) { + /* broadcast start has been canceled */ + return; + } + let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; try { @@ -183,6 +213,47 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { this.setState({ state: "broadcasting" }); } + async applyConstraints(constraints: BroadcastConstraints): Promise { + await this.doApplyConstraints(constraints, this.currentSource); + } + + private async doApplyConstraints(constraints: BroadcastConstraints, source: VideoSource): Promise { + const capabilities = source.getCapabilities(); + const videoConstraints: MediaTrackConstraints = {}; + + if(constraints.dynamicQuality && capabilities) { + videoConstraints.width = { + min: capabilities.minWidth, + max: constraints.width, + ideal: constraints.width + }; + + videoConstraints.height = { + min: capabilities.minHeight, + max: constraints.height, + ideal: constraints.height + }; + } else { + videoConstraints.width = constraints.width; + videoConstraints.height = constraints.height; + } + + if(constraints.dynamicFrameRate && capabilities) { + videoConstraints.frameRate = { + min: capabilities.minFrameRate, + max: constraints.maxFrameRate, + ideal: constraints.maxFrameRate + }; + } else { + videoConstraints.frameRate = constraints.maxFrameRate; + } + + await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints); + this.currentConstrints = constraints; + + /* TODO: Bandwidth update? */ + } + stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) { if(this.state.state === "stopped" && (!stopState || _.isEqual(stopState, this.state))) { return; @@ -241,6 +312,10 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast { } })(); } + + getConstraints(): BroadcastConstraints | undefined { + return this.currentConstrints; + } } export class RtpVideoConnection implements VideoConnection { diff --git a/shared/js/connection/rtc/video/VideoClient.ts b/shared/js/connection/rtc/video/VideoClient.ts index 653beb1c..10483629 100644 --- a/shared/js/connection/rtc/video/VideoClient.ts +++ b/shared/js/connection/rtc/video/VideoClient.ts @@ -78,8 +78,8 @@ export class RtpVideoClient implements VideoClient { throw tr("failed to receive stream"); } }).catch(error => { - this.updateBroadcastState(broadcastType); this.joinedStates[broadcastType] = false; + this.updateBroadcastState(broadcastType); logError(LogCategory.VIDEO, tr("Failed to join video broadcast: %o"), error); throw tr("failed to join broadcast"); }); diff --git a/shared/js/events/ClientGlobalControlHandler.ts b/shared/js/events/ClientGlobalControlHandler.ts index 8368ed96..00ba4ed6 100644 --- a/shared/js/events/ClientGlobalControlHandler.ts +++ b/shared/js/events/ClientGlobalControlHandler.ts @@ -17,8 +17,7 @@ import {spawnModalCssVariableEditor} from "tc-shared/ui/modal/css-editor/Control import {server_connections} from "tc-shared/ConnectionManager"; import {spawnAbout} from "tc-shared/ui/modal/ModalAbout"; import {spawnVideoSourceSelectModal} from "tc-shared/ui/modal/video-source/Controller"; -import {LogCategory, logError} from "tc-shared/log"; -import {getVideoDriver} from "tc-shared/video/VideoSource"; +import {LogCategory, logError, logWarn} from "tc-shared/log"; import {spawnEchoTestModal} from "tc-shared/ui/modal/echo-test/Controller"; /* @@ -193,14 +192,15 @@ export function initialize(event_registry: Registry) return; } - spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? "quick" : "default", event.defaultDevice).then(async source => { + spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice }) + .then(async ({ source, constraints }) => { if(!source) { return; } try { const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType); if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") { console.error("Change source"); - broadcast.changeSource(source).catch(error => { + broadcast.changeSource(source, constraints).catch(error => { logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error); if(typeof error !== "string") { error = tr("lookup the console for detail"); @@ -214,7 +214,7 @@ export function initialize(event_registry: Registry) }); } else { console.error("Start broadcast"); - broadcast.startBroadcasting(source).catch(error => { + broadcast.startBroadcasting(source, constraints).catch(error => { logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error); if(typeof error !== "string") { error = tr("lookup the console for detail"); @@ -237,4 +237,35 @@ export function initialize(event_registry: Registry) broadcast.stopBroadcasting(); } }); + + event_registry.on("action_edit_video_broadcasting", event => { + const connection = event.connection; + if(!connection.connected) { + createErrorModal(tr("You're not connected"), tr("You're not connected to any server!")).open(); + return; + } + + const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType); + if(!broadcast || (broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing")) { + createErrorModal(tr("You're not broadcasting"), tr("You're not broadcasting any video!")).open(); + return; + } + + spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) }) + .then(async ({ source, constraints }) => { + if (!source) { + return; + } + + if(broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing") { + createErrorModal(tr("Video broadcast has ended"), tr("The video broadcast has ended.\nUpdate failed.")).open(); + return; + } + + await broadcast.changeSource(source, constraints); + }).catch(error => { + logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error); + createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open(); + }); + }); } \ No newline at end of file diff --git a/shared/js/media/Video.ts b/shared/js/media/Video.ts index cebc959f..68a1d98a 100644 --- a/shared/js/media/Video.ts +++ b/shared/js/media/Video.ts @@ -4,13 +4,13 @@ import { VideoDriver, VideoDriverEvents, VideoPermissionStatus, - VideoSource + VideoSource, VideoSourceCapabilities, VideoSourceInitialSettings } from "tc-shared/video/VideoSource"; import {Registry} from "tc-shared/events"; import {MediaStreamRequestResult} from "tc-shared/voice/RecorderBase"; import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log"; import {queryMediaPermissions, requestMediaStream, stopMediaStream} from "tc-shared/media/Stream"; -import { tr } from "tc-shared/i18n/localize"; +import {tr} from "tc-shared/i18n/localize"; declare global { interface MediaDevices { @@ -225,7 +225,9 @@ export class WebVideoDriver implements VideoDriver { try { const source = await navigator.mediaDevices.getDisplayMedia({ audio: false, video: true }); const videoTrack = source.getVideoTracks()[0]; - if(!videoTrack) { throw tr("missing video track"); } + if(!videoTrack) { + throw tr("missing video track"); + } logDebug(LogCategory.VIDEO, tr("Display media received with settings: %o"), videoTrack.getSettings()); return new WebVideoSource(videoTrack.getSettings().deviceId, tr("Screen"), source); @@ -248,10 +250,19 @@ export class WebVideoSource implements VideoSource { private readonly stream: MediaStream; private referenceCount = 1; + private initialSettings: VideoSourceInitialSettings; + constructor(deviceId: string, displayName: string, stream: MediaStream) { this.deviceId = deviceId; this.displayName = displayName; this.stream = stream; + + const settings = stream.getVideoTracks()[0].getSettings(); + this.initialSettings = { + frameRate: settings.frameRate, + height: settings.height, + width: settings.width + }; } destroy() { @@ -270,6 +281,26 @@ export class WebVideoSource implements VideoSource { return this.stream; } + getInitialSettings(): VideoSourceInitialSettings { + return this.initialSettings; + } + + getCapabilities(): VideoSourceCapabilities { + const videoTrack = this.stream.getVideoTracks()[0]; + const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined; + + return { + minWidth: capabilities?.width?.min || 1, + maxWidth: capabilities?.width?.max || this.initialSettings.width, + + minHeight: capabilities?.height?.min || 1, + maxHeight: capabilities?.height?.max || this.initialSettings.height, + + minFrameRate: capabilities?.frameRate?.min || 1, + maxFrameRate: capabilities?.frameRate?.max || this.initialSettings.frameRate + }; + } + deref() { this.referenceCount -= 1; diff --git a/shared/js/settings.ts b/shared/js/settings.ts index 5672e82c..88ee2d44 100644 --- a/shared/js/settings.ts +++ b/shared/js/settings.ts @@ -541,6 +541,20 @@ export class Settings extends StaticSettings { valueType: "number", }; + static readonly KEY_VIDEO_DEFAULT_MAX_WIDTH: ValuedSettingsKey = { + key: 'video_default_max_width', + defaultValue: 1280, + description: "The default maximal width of the video being crated.", + valueType: "number", + }; + + static readonly KEY_VIDEO_DEFAULT_MAX_HEIGHT: ValuedSettingsKey = { + key: 'video_default_max_height', + defaultValue: 720, + description: "The default maximal height of the video being crated.", + valueType: "number", + }; + static readonly FN_LOG_ENABLED: (category: string) => SettingsKey = category => { return { key: "log." + category.toLowerCase() + ".enabled", diff --git a/shared/js/ui/frames/control-bar/Controller.ts b/shared/js/ui/frames/control-bar/Controller.ts index f3f12515..b9b0a20c 100644 --- a/shared/js/ui/frames/control-bar/Controller.ts +++ b/shared/js/ui/frames/control-bar/Controller.ts @@ -418,6 +418,16 @@ export function initializeControlBarController(events: Registry { + if(infoHandler.getCurrentHandler()) { + global_client_actions.fire("action_edit_video_broadcasting", { + connection: infoHandler.getCurrentHandler(), + broadcastType: event.broadcastType + }); + } else { + createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open(); + } + }); return infoHandler; } \ No newline at end of file diff --git a/shared/js/ui/frames/control-bar/Renderer.tsx b/shared/js/ui/frames/control-bar/Renderer.tsx index f2efa595..40e75ed4 100644 --- a/shared/js/ui/frames/control-bar/Renderer.tsx +++ b/shared/js/ui/frames/control-bar/Renderer.tsx @@ -294,7 +294,7 @@ const VideoButton = (props: { type: VideoBroadcastType }) => { diff --git a/shared/js/ui/modal/video-source/Controller.tsx b/shared/js/ui/modal/video-source/Controller.tsx index 2c42a2bf..73cc920a 100644 --- a/shared/js/ui/modal/video-source/Controller.tsx +++ b/shared/js/ui/modal/video-source/Controller.tsx @@ -3,56 +3,74 @@ import {spawnReactModal} from "tc-shared/ui/react-elements/Modal"; import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definitions"; import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer"; import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource"; -import {LogCategory, logError} from "tc-shared/log"; -import {VideoBroadcastType} from "tc-shared/connection/VideoConnection"; +import {LogCategory, logError, logWarn} from "tc-shared/log"; +import {BroadcastConstraints, VideoBroadcastType} from "tc-shared/connection/VideoConnection"; +import {Settings, settings} from "tc-shared/settings"; +import {tr} from "tc-shared/i18n/localize"; -type SourceConstraints = { width?: number, height?: number, frameRate?: number }; +export type VideoSourceModalAction = { + mode: "select-quick", + defaultDevice?: string +} | { + mode: "select-default", + defaultDevice?: string +} | { + mode: "new" +} | { + mode: "edit", + source: VideoSource, + broadcastConstraints: BroadcastConstraints +}; /** * @param type The video type which should be prompted - * @param selectMode - * @param defaultDeviceId + * @param mode */ -export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, selectMode: "quick" | "default" | "none", defaultDeviceId?: string) : Promise { +export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, constraints: BroadcastConstraints | undefined }> { const controller = new VideoSourceController(type); - let defaultSelectSource = selectMode === "default"; - if(selectMode === "quick") { + let defaultSelectDevice: string | true; + if(mode.mode === "select-quick") { /* We need the modal itself for the native client in order to present the window selector */ if(type === "camera" || __build.target === "web") { /* Try to get the default device. If we succeeded directly return that */ - if(await controller.selectSource(defaultDeviceId)) { - const source = controller.getCurrentSource()?.ref(); + if(await controller.selectSource(mode.defaultDevice)) { + /* select succeeded */ + const resultSource = controller.getCurrentSource()?.ref(); + const resultConstraints = controller.getBroadcastConstraints(); controller.destroy(); - - return source; + return { + source: resultSource, + constraints: resultConstraints + }; + } else { + /* Select failed. We'll open the modal and show the error. */ } } else { - defaultSelectSource = true; + defaultSelectDevice = mode.defaultDevice || true; } + } else if(mode.mode === "select-default") { + defaultSelectDevice = mode.defaultDevice || true; + } else if(mode.mode === "edit") { + await controller.useSettings(mode.source, mode.broadcastConstraints); } - const modal = spawnReactModal(ModalVideoSource, controller.events, type); + const modal = spawnReactModal(ModalVideoSource, controller.events, type, mode.mode === "edit"); controller.events.on(["action_start", "action_cancel"], () => modal.destroy()); modal.show().then(() => { - if(defaultSelectSource) { + if(defaultSelectDevice) { if(type === "screen" && getVideoDriver().screenQueryAvailable()) { controller.events.fire_react("action_toggle_screen_capture_device_select", { shown: true }); } else { - controller.selectSource(defaultDeviceId); + controller.selectSource(defaultSelectDevice === true ? undefined : defaultSelectDevice); } } }); - let refSource: { source: VideoSource } = { source: undefined }; - controller.events.on("action_start", () => { - refSource.source?.deref(); - refSource.source = controller.getCurrentSource()?.ref(); - }); - await new Promise(resolve => { - if(defaultSelectSource && selectMode === "quick") { + if(mode.mode === "select-quick" && __build.target !== "web") { + /* We need the modal event for quick select */ const callbackRemove = controller.events.on("notify_video_preview", event => { if(event.status.status === "error") { callbackRemove(); @@ -60,8 +78,6 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele if(event.status.status === "preview") { /* we've successfully selected something */ - refSource.source = controller.getCurrentSource()?.ref(); - modal.hide(); modal.destroy(); } }); @@ -70,8 +86,96 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele modal.events.one(["destroy", "close"], resolve); }); + const resultSource = controller.getCurrentSource()?.ref(); + const resultConstraints = controller.getBroadcastConstraints(); controller.destroy(); - return refSource.source; + return { + source: resultSource, + constraints: resultConstraints + }; +} + +function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: BroadcastConstraints) { + const videoTrack = source.getStream().getVideoTracks()[0]; + const trackSettings = videoTrack.getSettings(); + + constraints.width = trackSettings.width; + constraints.height = trackSettings.height; + constraints.maxFrameRate = trackSettings.frameRate; +} + +async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise { + const videoTrack = source.getStream().getVideoTracks()[0]; + + let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT); + let maxWidth = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_WIDTH); + + const trackSettings = videoTrack.getSettings(); + const capabilities = source.getCapabilities(); + + maxHeight = Math.min(maxHeight, capabilities.maxHeight); + maxWidth = Math.min(maxWidth, capabilities.maxWidth); + + const broadcastConstraints: BroadcastConstraints = {} as any; + { + let ratio = 1; + + if(trackSettings.height > maxHeight) { + ratio = Math.min(maxHeight / trackSettings.height, ratio); + } + + if(trackSettings.width > maxWidth) { + ratio = Math.min(maxWidth / trackSettings.width, ratio); + } + + if(ratio !== 1) { + broadcastConstraints.width = Math.ceil(ratio * trackSettings.width); + broadcastConstraints.height = Math.ceil(ratio * trackSettings.height); + } else { + broadcastConstraints.width = trackSettings.width; + broadcastConstraints.height = trackSettings.height; + } + } + + broadcastConstraints.dynamicQuality = true; + broadcastConstraints.dynamicFrameRate = true; + broadcastConstraints.maxBandwidth = 10_000_000; + + try { + await applyBroadcastConstraints(source, broadcastConstraints); + } catch (error) { + logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast constraints: %o"), error); + } + + updateBroadcastConstraintsFromSource(source, broadcastConstraints); + + return broadcastConstraints; +} + +/* May throws an overconstraint error */ +async function applyBroadcastConstraints(source: VideoSource, constraints: BroadcastConstraints) { + const videoTrack = source.getStream().getVideoTracks()[0]; + if(!videoTrack) { return; } + + await videoTrack.applyConstraints({ + frameRate: constraints.dynamicFrameRate ? { + min: 1, + max: constraints.maxFrameRate, + ideal: constraints.maxFrameRate + } : constraints.maxFrameRate, + + width: constraints.dynamicQuality ? { + min: 1, + max: constraints.width, + ideal: constraints.width + } : constraints.width, + + height: constraints.dynamicQuality ? { + min: 1, + max: constraints.height, + ideal: constraints.height + } : constraints.height + }); } class VideoSourceController { @@ -79,7 +183,7 @@ class VideoSourceController { private readonly type: VideoBroadcastType; private currentSource: VideoSource | string; - private currentConstraints: SourceConstraints; + private currentConstraints: BroadcastConstraints; /* preselected current source id */ private currentSourceId: string; @@ -177,24 +281,13 @@ class VideoSourceController { })); } - const applyConstraints = async () => { - if(typeof this.currentSource === "object") { - const videoTrack = this.currentSource.getStream().getVideoTracks()[0]; - if(!videoTrack) { return; } - - await videoTrack.applyConstraints(this.currentConstraints); - } - }; - this.events.on("action_setting_dimension", event => { this.currentConstraints.height = event.height; this.currentConstraints.width = event.width; - applyConstraints().then(undefined); }); this.events.on("action_setting_framerate", event => { - this.currentConstraints.frameRate = event.frameRate; - applyConstraints().then(undefined); + this.currentConstraints.maxFrameRate = event.frameRate; }); } @@ -208,12 +301,27 @@ class VideoSourceController { this.events.destroy(); } - setCurrentSource(source: VideoSource | string | undefined) { + async setCurrentSource(source: VideoSource | string | undefined) { if(typeof this.currentSource === "object") { this.currentSource.deref(); } - this.currentConstraints = {}; + if(typeof source === "object") { + if(this.currentConstraints) { + try { + /* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */ + await applyBroadcastConstraints(source, this.currentConstraints); + } catch (error) { + logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error); + this.currentConstraints = undefined; + } + } + + if(!this.currentConstraints) { + this.currentConstraints = await generateAndApplyDefaultConstraints(source); + } + } + this.currentSource = source; this.notifyVideoPreview(); this.notifyStartButton(); @@ -222,6 +330,20 @@ class VideoSourceController { this.notifySettingFramerate(); } + async useSettings(source: VideoSource, constraints: BroadcastConstraints) { + if(typeof this.currentSource === "object") { + this.currentSource.deref(); + } + + this.currentSource = source.ref(); + this.currentConstraints = constraints; + this.notifyVideoPreview(); + this.notifyStartButton(); + this.notifyCurrentSource(); + this.notifySettingDimension(); + this.notifySettingFramerate(); + } + async selectSource(sourceId: string) : Promise { const driver = getVideoDriver(); @@ -244,17 +366,17 @@ class VideoSourceController { try { const stream = await streamPromise; - this.setCurrentSource(stream); + await this.setCurrentSource(stream); this.fallbackCurrentSourceName = stream?.getName() || tr("No stream"); return !!stream; } catch (error) { this.fallbackCurrentSourceName = tr("failed to attach to device"); if(typeof error === "string") { - this.setCurrentSource(error); + await this.setCurrentSource(error); } else { logError(LogCategory.GENERAL, tr("Failed to open capture device %s: %o"), sourceId, error); - this.setCurrentSource(tr("Failed to open capture device (Lookup the console)")); + await this.setCurrentSource(tr("Failed to open capture device (Lookup the console)")); } return false; @@ -265,6 +387,10 @@ class VideoSourceController { return typeof this.currentSource === "object" ? this.currentSource : undefined; } + getBroadcastConstraints() : BroadcastConstraints { + return this.currentConstraints; + } + private notifyStartButton() { this.events.fire_react("notify_start_button", { enabled: typeof this.currentSource === "object" }) } @@ -291,7 +417,7 @@ class VideoSourceController { }); } - private notifyScreenCaptureDevices(){ + private notifyScreenCaptureDevices() { const driver = getVideoDriver(); driver.queryScreenCaptureDevices().then(devices => { this.events.fire_react("notify_screen_capture_devices", { devices: { status: "success", devices: devices }}); @@ -305,7 +431,7 @@ class VideoSourceController { }) } - private notifyVideoPreview(){ + private notifyVideoPreview() { const driver = getVideoDriver(); switch (driver.getPermissionStatus()) { case VideoPermissionStatus.SystemDenied: @@ -333,7 +459,7 @@ class VideoSourceController { } }; - private notifyCurrentSource(){ + private notifyCurrentSource() { if(typeof this.currentSource === "object") { this.events.fire_react("notify_source", { state: { @@ -358,25 +484,25 @@ class VideoSourceController { } } - private notifySettingDimension(){ + private notifySettingDimension() { if(typeof this.currentSource === "object") { - const videoTrack = this.currentSource.getStream().getVideoTracks()[0]; - const settings = videoTrack.getSettings(); - const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined; + const initialSettings = this.currentSource.getInitialSettings(); + const capabilities = this.currentSource.getCapabilities(); + const constraints = this.currentConstraints; this.events.fire_react("notify_setting_dimension", { setting: { - minWidth: capabilities?.width ? capabilities.width.min : 1, - maxWidth: capabilities?.width ? capabilities.width.max : settings.width, + minWidth: capabilities.minWidth, + maxWidth: capabilities.maxWidth, - minHeight: capabilities?.height ? capabilities.height.min : 1, - maxHeight: capabilities?.height ? capabilities.height.max : settings.height, + minHeight: capabilities.minHeight, + maxHeight: capabilities.maxHeight, - originalWidth: settings.width, - originalHeight: settings.height, + originalWidth: initialSettings.width, + originalHeight: initialSettings.height, - currentWidth: settings.width, - currentHeight: settings.height + currentWidth: constraints.width, + currentHeight: constraints.height } }); } else { @@ -386,16 +512,16 @@ class VideoSourceController { notifySettingFramerate() { if(typeof this.currentSource === "object") { - const videoTrack = this.currentSource.getStream().getVideoTracks()[0]; - const settings = videoTrack.getSettings(); - const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined; + const initialSettings = this.currentSource.getInitialSettings(); + const capabilities = this.currentSource.getCapabilities(); const round = (value: number) => Math.round(value * 100) / 100; this.events.fire_react("notify_settings_framerate", { frameRate: { - min: round(capabilities?.frameRate ? capabilities.frameRate.min : 1), - max: round(capabilities?.frameRate ? capabilities.frameRate.max : settings.frameRate), - original: round(settings.frameRate) + min: round(capabilities.minFrameRate), + max: round(capabilities.maxFrameRate), + original: round(initialSettings.frameRate), + current: round(this.currentConstraints.maxFrameRate) } }); } else { diff --git a/shared/js/ui/modal/video-source/Definitions.ts b/shared/js/ui/modal/video-source/Definitions.ts index 0252a68a..2b8b7bd1 100644 --- a/shared/js/ui/modal/video-source/Definitions.ts +++ b/shared/js/ui/modal/video-source/Definitions.ts @@ -48,6 +48,7 @@ export type SettingFrameRate = { min: number, max: number, original: number, + current: number }; export interface ModalVideoSourceEvents { diff --git a/shared/js/ui/modal/video-source/Renderer.tsx b/shared/js/ui/modal/video-source/Renderer.tsx index b4d442e7..518c1d36 100644 --- a/shared/js/ui/modal/video-source/Renderer.tsx +++ b/shared/js/ui/modal/video-source/Renderer.tsx @@ -233,7 +233,7 @@ const VideoPreview = () => { ); } -const ButtonStart = () => { +const ButtonStart = (props: { editMode: boolean }) => { const events = useContext(ModalEvents); const [ enabled, setEnabled ] = useState(() => { events.fire("query_start_button"); @@ -248,7 +248,7 @@ const ButtonStart = () => { disabled={!enabled} onClick={() => enabled && events.fire("action_start")} > - Start + {props.editMode ? Apply Changed : Start} ); } @@ -317,7 +317,7 @@ const SettingDimension = () => { setHeight(event.setting.currentHeight); refSliderWidth.current?.setState({ value: event.setting.currentWidth }); refSliderHeight.current?.setState({ value: event.setting.currentHeight }); - setSelectValue("original"); + setSelectValue("current"); } else { setSettings(undefined); setSelectValue("no-source"); @@ -419,6 +419,7 @@ const SettingDimension = () => { )} + @@ -486,7 +487,7 @@ const SettingFramerate = () => { setFrameRate(event.frameRate); setCurrentRate(event.frameRate ? event.frameRate.original : 1); if(event.frameRate) { - setSelectedValue(event.frameRate.original.toString()); + setSelectedValue(event.frameRate.current.toString()); } else { setSelectedValue("no-source"); } @@ -497,6 +498,9 @@ const SettingFramerate = () => { if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.original) === -1) { FrameRates[frameRate.original.toString()] = frameRate.original; } + if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.current) === -1) { + FrameRates[frameRate.current.toString()] = frameRate.current; + } } return ( @@ -758,12 +762,14 @@ const ScreenCaptureDeviceSelect = React.memo(() => { export class ModalVideoSource extends InternalModal { protected readonly events: Registry; private readonly sourceType: VideoBroadcastType; + private readonly editMode: boolean; - constructor(events: Registry, type: VideoBroadcastType) { + constructor(events: Registry, type: VideoBroadcastType, editMode: boolean) { super(); this.sourceType = type; this.events = events; + this.editMode = editMode; } renderBody(): React.ReactElement { @@ -793,7 +799,7 @@ export class ModalVideoSource extends InternalModal { - + diff --git a/shared/js/video/VideoSource.ts b/shared/js/video/VideoSource.ts index a7d3be18..1f0e738d 100644 --- a/shared/js/video/VideoSource.ts +++ b/shared/js/video/VideoSource.ts @@ -1,11 +1,30 @@ import {Registry} from "tc-shared/events"; import { tr } from "tc-shared/i18n/localize"; +export interface VideoSourceCapabilities { + minWidth: number, + maxWidth: number, + + minHeight: number, + maxHeight: number, + + minFrameRate: number, + maxFrameRate: number +} + +export interface VideoSourceInitialSettings { + width: number, + height: number, + frameRate: number +} + export interface VideoSource { getId() : string; getName() : string; getStream() : MediaStream; + getCapabilities() : VideoSourceCapabilities; + getInitialSettings() : VideoSourceInitialSettings; /** Add a new reference to this stream */ ref() : this;