Fixed some minor bugs and made broadcast settings reeditable

master
WolverinDEV 2020-12-16 22:06:46 +01:00 committed by WolverinDEV
parent 87d5e4697d
commit bd5148e944
13 changed files with 489 additions and 133 deletions

View File

@ -1,6 +1,5 @@
import {VideoSource} from "tc-shared/video/VideoSource";
import {Registry} from "tc-shared/events";
import {ConnectionStatus} from "tc-shared/ui/frames/footer/StatusDefinitions";
import {ConnectionStatistics} from "tc-shared/connection/ConnectionBase";
export type VideoBroadcastType = "camera" | "screen";
@ -78,6 +77,39 @@ export type LocalVideoBroadcastState = {
state: "broadcasting"
}
export interface BroadcastConstraints {
/**
* Ideal and max video width
*/
width: number,
/**
* Ideal and max video height
*/
height: number,
/**
* Dynamically change the video quality related to bandwidth constraints.
*/
dynamicQuality: boolean,
/**
* Max bandwidth which should be used (in bits/second)
*/
maxBandwidth: number,
/**
* Maximal frame rate for the video.
* This might be ignored by some browsers.
*/
maxFrameRate: number,
/**
* The maximal
*/
dynamicFrameRate: boolean
}
export interface LocalVideoBroadcast {
getEvents() : Registry<LocalVideoBroadcastEvents>;
@ -90,13 +122,18 @@ export interface LocalVideoBroadcast {
/**
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints
*/
startBroadcasting(source: VideoSource) : Promise<void>;
startBroadcasting(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>;
/**
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints
*/
changeSource(source: VideoSource) : Promise<void>;
changeSource(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>;
getConstraints() : BroadcastConstraints | undefined;
applyConstraints(constraints: BroadcastConstraints) : Promise<void>;
stopBroadcasting();
}

View File

@ -222,7 +222,7 @@ class CommandHandler extends AbstractCommandHandler {
}).then(() => this.handle["peer"].createAnswer())
.then(async answer => {
if(RTCConnection.kEnableSdpTrace) {
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", data.mode as string));
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", answer.type as string));
gr.collapsed(true);
gr.log("%s", answer.sdp);
gr.end();
@ -235,7 +235,7 @@ class CommandHandler extends AbstractCommandHandler {
.then(answer => {
answer.sdp = SdpCompressor.compressSdp(answer.sdp, kSdpCompressionMode);
if(RTCConnection.kEnableSdpTrace) {
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", data.mode as string));
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", answer.type as string));
gr.collapsed(true);
gr.log("%s", answer.sdp);
gr.end();
@ -810,7 +810,8 @@ export class RTCConnection {
iceServers: [{ urls: ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302"] }]
});
const kAddGenericTransceiver = false;
/* If set to false FF failed: FIXME! */
const kAddGenericTransceiver = true;
if(this.audioSupport) {
this.currentTransceiver["audio"] = this.peer.addTransceiver("audio");
@ -880,19 +881,23 @@ export class RTCConnection {
}
await this.currentTransceiver[type].sender.replaceTrack(target);
if(target) {
console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection);
this.currentTransceiver[type].direction = "sendrecv";
} else if(type === "video" || type === "video-screen") {
/*
* We don't need to stop & start the audio transceivers every time we're toggling the stream state.
* This would be a much overall cost than just keeping it going.
*
* The video streams instead are not toggling that much and since they split up the bandwidth between them,
* we've to shut them down if they're no needed. This not only allows the one stream to take full advantage
* of the bandwidth it also reduces resource usage.
*/
//this.currentTransceiver[type].direction = "recvonly";
/* Firefox has some crazy issues */
if(window.detectedBrowser.name !== "firefox") {
if(target) {
console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection);
this.currentTransceiver[type].direction = "sendrecv";
} else if(type === "video" || type === "video-screen") {
/*
* We don't need to stop & start the audio transceivers every time we're toggling the stream state.
* This would be a much overall cost than just keeping it going.
*
* The video streams instead are not toggling that much and since they split up the bandwidth between them,
* we've to shut them down if they're no needed. This not only allows the one stream to take full advantage
* of the bandwidth it also reduces resource usage.
*/
//this.currentTransceiver[type].direction = "recvonly";
}
}
logTrace(LogCategory.WEBRTC, "Replaced track for %o (Fallback: %o)", type, target === fallback);
}
@ -1108,8 +1113,9 @@ export class RTCConnection {
logWarn(LogCategory.WEBRTC, tr("Received remote audio track %d but audio has been disabled. Dropping track."), ssrc);
return;
}
const track = new InternalRemoteRTPAudioTrack(ssrc, event.transceiver);
logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %d"), ssrc);
logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %o"), ssrc);
if(tempInfo?.info !== undefined) {
track.handleAssignment(tempInfo.info);
this.events.fire("notify_audio_assignment_changed", {
@ -1123,7 +1129,7 @@ export class RTCConnection {
this.remoteAudioTracks[ssrc] = track;
} else if(event.track.kind === "video") {
const track = new InternalRemoteRTPVideoTrack(ssrc, event.transceiver);
logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %d"), ssrc);
logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %o"), ssrc);
if(tempInfo?.info !== undefined) {
track.handleAssignment(tempInfo.info);
this.events.fire("notify_video_assignment_changed", {

View File

@ -1,4 +1,5 @@
import {
BroadcastConstraints,
LocalVideoBroadcast,
LocalVideoBroadcastEvents,
LocalVideoBroadcastState,
@ -27,6 +28,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
private state: LocalVideoBroadcastState;
private currentSource: VideoSource;
private currentConstrints: BroadcastConstraints;
private broadcastStartId: number;
private localStartPromise: Promise<void>;
@ -70,55 +72,66 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return Promise.resolve(undefined);
}
async changeSource(source: VideoSource): Promise<void> {
const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) {
throw tr("missing video stream track");
}
async changeSource(source: VideoSource, constraints: BroadcastConstraints): Promise<void> {
let sourceRef = source.ref();
while(this.localStartPromise) {
await this.localStartPromise;
}
if(this.state.state !== "broadcasting") {
sourceRef.deref();
throw tr("not broadcasting anything");
}
const startId = ++this.broadcastStartId;
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
try {
await this.handle.getRTCConnection().setTrackSource(rtcBroadcastType, videoTracks[0]);
} catch (error) {
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
if(this.currentSource !== source) {
console.error("Source changed");
const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) {
throw tr("missing video stream track");
}
while(this.localStartPromise) {
await this.localStartPromise;
}
if(this.state.state !== "broadcasting") {
throw tr("not broadcasting anything");
}
/* Apply the constraints to the current source */
await this.doApplyConstraints(constraints, source);
const startId = ++this.broadcastStartId;
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
try {
await this.handle.getRTCConnection().setTrackSource(rtcBroadcastType, videoTracks[0]);
} catch (error) {
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
}
logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error);
throw tr("failed to change video track");
}
this.setCurrentSource(sourceRef);
} else if(!_.isEqual(this.currentConstrints, constraints)) {
console.error("Constraints changed");
await this.applyConstraints(constraints);
}
} finally {
sourceRef.deref();
logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error);
throw tr("failed to change video track");
}
this.setCurrentSource(sourceRef);
sourceRef.deref();
}
private setCurrentSource(source: VideoSource | undefined) {
if(this.currentSource) {
this.currentSource.deref();
this.currentConstrints = undefined;
}
this.currentSource = source?.ref();
}
async startBroadcasting(source: VideoSource): Promise<void> {
async startBroadcasting(source: VideoSource, constraints: BroadcastConstraints): Promise<void> {
const sourceRef = source.ref();
while(this.localStartPromise) {
await this.localStartPromise;
}
const promise = this.doStartBroadcast(source);
const promise = this.doStartBroadcast(source, constraints);
this.localStartPromise = promise.catch(() => {});
this.localStartPromise.then(() => this.localStartPromise = undefined);
try {
@ -128,7 +141,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
}
}
private async doStartBroadcast(source: VideoSource) {
private async doStartBroadcast(source: VideoSource, constraints: BroadcastConstraints) {
const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) {
throw tr("missing video stream track");
@ -143,6 +156,23 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return;
}
try {
await this.applyConstraints(constraints);
} catch (error) {
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
}
logError(LogCategory.WEBRTC, tr("Failed to apply video constraints for broadcast %s: %o"), this.type, error);
this.stopBroadcasting(true, { state: "failed", reason: tr("Failed to apply video constraints") });
throw tr("Failed to apply video constraints");
}
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
}
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
try {
@ -183,6 +213,47 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
this.setState({ state: "broadcasting" });
}
async applyConstraints(constraints: BroadcastConstraints): Promise<void> {
await this.doApplyConstraints(constraints, this.currentSource);
}
private async doApplyConstraints(constraints: BroadcastConstraints, source: VideoSource): Promise<void> {
const capabilities = source.getCapabilities();
const videoConstraints: MediaTrackConstraints = {};
if(constraints.dynamicQuality && capabilities) {
videoConstraints.width = {
min: capabilities.minWidth,
max: constraints.width,
ideal: constraints.width
};
videoConstraints.height = {
min: capabilities.minHeight,
max: constraints.height,
ideal: constraints.height
};
} else {
videoConstraints.width = constraints.width;
videoConstraints.height = constraints.height;
}
if(constraints.dynamicFrameRate && capabilities) {
videoConstraints.frameRate = {
min: capabilities.minFrameRate,
max: constraints.maxFrameRate,
ideal: constraints.maxFrameRate
};
} else {
videoConstraints.frameRate = constraints.maxFrameRate;
}
await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints);
this.currentConstrints = constraints;
/* TODO: Bandwidth update? */
}
stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) {
if(this.state.state === "stopped" && (!stopState || _.isEqual(stopState, this.state))) {
return;
@ -241,6 +312,10 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
}
})();
}
getConstraints(): BroadcastConstraints | undefined {
return this.currentConstrints;
}
}
export class RtpVideoConnection implements VideoConnection {

View File

@ -78,8 +78,8 @@ export class RtpVideoClient implements VideoClient {
throw tr("failed to receive stream");
}
}).catch(error => {
this.updateBroadcastState(broadcastType);
this.joinedStates[broadcastType] = false;
this.updateBroadcastState(broadcastType);
logError(LogCategory.VIDEO, tr("Failed to join video broadcast: %o"), error);
throw tr("failed to join broadcast");
});

View File

@ -17,8 +17,7 @@ import {spawnModalCssVariableEditor} from "tc-shared/ui/modal/css-editor/Control
import {server_connections} from "tc-shared/ConnectionManager";
import {spawnAbout} from "tc-shared/ui/modal/ModalAbout";
import {spawnVideoSourceSelectModal} from "tc-shared/ui/modal/video-source/Controller";
import {LogCategory, logError} from "tc-shared/log";
import {getVideoDriver} from "tc-shared/video/VideoSource";
import {LogCategory, logError, logWarn} from "tc-shared/log";
import {spawnEchoTestModal} from "tc-shared/ui/modal/echo-test/Controller";
/*
@ -193,14 +192,15 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
return;
}
spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? "quick" : "default", event.defaultDevice).then(async source => {
spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice })
.then(async ({ source, constraints }) => {
if(!source) { return; }
try {
const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType);
if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") {
console.error("Change source");
broadcast.changeSource(source).catch(error => {
broadcast.changeSource(source, constraints).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error);
if(typeof error !== "string") {
error = tr("lookup the console for detail");
@ -214,7 +214,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
});
} else {
console.error("Start broadcast");
broadcast.startBroadcasting(source).catch(error => {
broadcast.startBroadcasting(source, constraints).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error);
if(typeof error !== "string") {
error = tr("lookup the console for detail");
@ -237,4 +237,35 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
broadcast.stopBroadcasting();
}
});
event_registry.on("action_edit_video_broadcasting", event => {
const connection = event.connection;
if(!connection.connected) {
createErrorModal(tr("You're not connected"), tr("You're not connected to any server!")).open();
return;
}
const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType);
if(!broadcast || (broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing")) {
createErrorModal(tr("You're not broadcasting"), tr("You're not broadcasting any video!")).open();
return;
}
spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) })
.then(async ({ source, constraints }) => {
if (!source) {
return;
}
if(broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing") {
createErrorModal(tr("Video broadcast has ended"), tr("The video broadcast has ended.\nUpdate failed.")).open();
return;
}
await broadcast.changeSource(source, constraints);
}).catch(error => {
logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error);
createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open();
});
});
}

View File

@ -4,13 +4,13 @@ import {
VideoDriver,
VideoDriverEvents,
VideoPermissionStatus,
VideoSource
VideoSource, VideoSourceCapabilities, VideoSourceInitialSettings
} from "tc-shared/video/VideoSource";
import {Registry} from "tc-shared/events";
import {MediaStreamRequestResult} from "tc-shared/voice/RecorderBase";
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
import {queryMediaPermissions, requestMediaStream, stopMediaStream} from "tc-shared/media/Stream";
import { tr } from "tc-shared/i18n/localize";
import {tr} from "tc-shared/i18n/localize";
declare global {
interface MediaDevices {
@ -225,7 +225,9 @@ export class WebVideoDriver implements VideoDriver {
try {
const source = await navigator.mediaDevices.getDisplayMedia({ audio: false, video: true });
const videoTrack = source.getVideoTracks()[0];
if(!videoTrack) { throw tr("missing video track"); }
if(!videoTrack) {
throw tr("missing video track");
}
logDebug(LogCategory.VIDEO, tr("Display media received with settings: %o"), videoTrack.getSettings());
return new WebVideoSource(videoTrack.getSettings().deviceId, tr("Screen"), source);
@ -248,10 +250,19 @@ export class WebVideoSource implements VideoSource {
private readonly stream: MediaStream;
private referenceCount = 1;
private initialSettings: VideoSourceInitialSettings;
constructor(deviceId: string, displayName: string, stream: MediaStream) {
this.deviceId = deviceId;
this.displayName = displayName;
this.stream = stream;
const settings = stream.getVideoTracks()[0].getSettings();
this.initialSettings = {
frameRate: settings.frameRate,
height: settings.height,
width: settings.width
};
}
destroy() {
@ -270,6 +281,26 @@ export class WebVideoSource implements VideoSource {
return this.stream;
}
getInitialSettings(): VideoSourceInitialSettings {
return this.initialSettings;
}
getCapabilities(): VideoSourceCapabilities {
const videoTrack = this.stream.getVideoTracks()[0];
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined;
return {
minWidth: capabilities?.width?.min || 1,
maxWidth: capabilities?.width?.max || this.initialSettings.width,
minHeight: capabilities?.height?.min || 1,
maxHeight: capabilities?.height?.max || this.initialSettings.height,
minFrameRate: capabilities?.frameRate?.min || 1,
maxFrameRate: capabilities?.frameRate?.max || this.initialSettings.frameRate
};
}
deref() {
this.referenceCount -= 1;

View File

@ -541,6 +541,20 @@ export class Settings extends StaticSettings {
valueType: "number",
};
static readonly KEY_VIDEO_DEFAULT_MAX_WIDTH: ValuedSettingsKey<number> = {
key: 'video_default_max_width',
defaultValue: 1280,
description: "The default maximal width of the video being crated.",
valueType: "number",
};
static readonly KEY_VIDEO_DEFAULT_MAX_HEIGHT: ValuedSettingsKey<number> = {
key: 'video_default_max_height',
defaultValue: 720,
description: "The default maximal height of the video being crated.",
valueType: "number",
};
static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => {
return {
key: "log." + category.toLowerCase() + ".enabled",

View File

@ -418,6 +418,16 @@ export function initializeControlBarController(events: Registry<ControlBarEvents
createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open();
}
});
events.on("action_manage_video", event => {
if(infoHandler.getCurrentHandler()) {
global_client_actions.fire("action_edit_video_broadcasting", {
connection: infoHandler.getCurrentHandler(),
broadcastType: event.broadcastType
});
} else {
createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open();
}
});
return infoHandler;
}

View File

@ -294,7 +294,7 @@ const VideoButton = (props: { type: VideoBroadcastType }) => {
<Button switched={false} colorTheme={"red"} autoSwitch={false} iconNormal={icon}
onToggle={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})}
tooltip={tooltip} key={"disable"}>
{/* <DropdownEntry icon={icon} text={dropdownTextManage} onClick={() => events.fire("action_manage_video", { broadcastType: props.type })} /> TODO! */}
<DropdownEntry icon={icon} text={dropdownTextManage} onClick={() => events.fire("action_manage_video", { broadcastType: props.type })} />
<DropdownEntry icon={icon} text={dropdownTextStop} onClick={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})} />
{props.type === "camera" ? <VideoDeviceList key={"list"} /> : null}
</Button>

View File

@ -3,56 +3,74 @@ import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definitions";
import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer";
import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource";
import {LogCategory, logError} from "tc-shared/log";
import {VideoBroadcastType} from "tc-shared/connection/VideoConnection";
import {LogCategory, logError, logWarn} from "tc-shared/log";
import {BroadcastConstraints, VideoBroadcastType} from "tc-shared/connection/VideoConnection";
import {Settings, settings} from "tc-shared/settings";
import {tr} from "tc-shared/i18n/localize";
type SourceConstraints = { width?: number, height?: number, frameRate?: number };
export type VideoSourceModalAction = {
mode: "select-quick",
defaultDevice?: string
} | {
mode: "select-default",
defaultDevice?: string
} | {
mode: "new"
} | {
mode: "edit",
source: VideoSource,
broadcastConstraints: BroadcastConstraints
};
/**
* @param type The video type which should be prompted
* @param selectMode
* @param defaultDeviceId
* @param mode
*/
export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, selectMode: "quick" | "default" | "none", defaultDeviceId?: string) : Promise<VideoSource> {
export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, constraints: BroadcastConstraints | undefined }> {
const controller = new VideoSourceController(type);
let defaultSelectSource = selectMode === "default";
if(selectMode === "quick") {
let defaultSelectDevice: string | true;
if(mode.mode === "select-quick") {
/* We need the modal itself for the native client in order to present the window selector */
if(type === "camera" || __build.target === "web") {
/* Try to get the default device. If we succeeded directly return that */
if(await controller.selectSource(defaultDeviceId)) {
const source = controller.getCurrentSource()?.ref();
if(await controller.selectSource(mode.defaultDevice)) {
/* select succeeded */
const resultSource = controller.getCurrentSource()?.ref();
const resultConstraints = controller.getBroadcastConstraints();
controller.destroy();
return source;
return {
source: resultSource,
constraints: resultConstraints
};
} else {
/* Select failed. We'll open the modal and show the error. */
}
} else {
defaultSelectSource = true;
defaultSelectDevice = mode.defaultDevice || true;
}
} else if(mode.mode === "select-default") {
defaultSelectDevice = mode.defaultDevice || true;
} else if(mode.mode === "edit") {
await controller.useSettings(mode.source, mode.broadcastConstraints);
}
const modal = spawnReactModal(ModalVideoSource, controller.events, type);
const modal = spawnReactModal(ModalVideoSource, controller.events, type, mode.mode === "edit");
controller.events.on(["action_start", "action_cancel"], () => modal.destroy());
modal.show().then(() => {
if(defaultSelectSource) {
if(defaultSelectDevice) {
if(type === "screen" && getVideoDriver().screenQueryAvailable()) {
controller.events.fire_react("action_toggle_screen_capture_device_select", { shown: true });
} else {
controller.selectSource(defaultDeviceId);
controller.selectSource(defaultSelectDevice === true ? undefined : defaultSelectDevice);
}
}
});
let refSource: { source: VideoSource } = { source: undefined };
controller.events.on("action_start", () => {
refSource.source?.deref();
refSource.source = controller.getCurrentSource()?.ref();
});
await new Promise(resolve => {
if(defaultSelectSource && selectMode === "quick") {
if(mode.mode === "select-quick" && __build.target !== "web") {
/* We need the modal event for quick select */
const callbackRemove = controller.events.on("notify_video_preview", event => {
if(event.status.status === "error") {
callbackRemove();
@ -60,8 +78,6 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele
if(event.status.status === "preview") {
/* we've successfully selected something */
refSource.source = controller.getCurrentSource()?.ref();
modal.hide();
modal.destroy();
}
});
@ -70,8 +86,96 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele
modal.events.one(["destroy", "close"], resolve);
});
const resultSource = controller.getCurrentSource()?.ref();
const resultConstraints = controller.getBroadcastConstraints();
controller.destroy();
return refSource.source;
return {
source: resultSource,
constraints: resultConstraints
};
}
function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: BroadcastConstraints) {
const videoTrack = source.getStream().getVideoTracks()[0];
const trackSettings = videoTrack.getSettings();
constraints.width = trackSettings.width;
constraints.height = trackSettings.height;
constraints.maxFrameRate = trackSettings.frameRate;
}
async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise<BroadcastConstraints> {
const videoTrack = source.getStream().getVideoTracks()[0];
let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT);
let maxWidth = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_WIDTH);
const trackSettings = videoTrack.getSettings();
const capabilities = source.getCapabilities();
maxHeight = Math.min(maxHeight, capabilities.maxHeight);
maxWidth = Math.min(maxWidth, capabilities.maxWidth);
const broadcastConstraints: BroadcastConstraints = {} as any;
{
let ratio = 1;
if(trackSettings.height > maxHeight) {
ratio = Math.min(maxHeight / trackSettings.height, ratio);
}
if(trackSettings.width > maxWidth) {
ratio = Math.min(maxWidth / trackSettings.width, ratio);
}
if(ratio !== 1) {
broadcastConstraints.width = Math.ceil(ratio * trackSettings.width);
broadcastConstraints.height = Math.ceil(ratio * trackSettings.height);
} else {
broadcastConstraints.width = trackSettings.width;
broadcastConstraints.height = trackSettings.height;
}
}
broadcastConstraints.dynamicQuality = true;
broadcastConstraints.dynamicFrameRate = true;
broadcastConstraints.maxBandwidth = 10_000_000;
try {
await applyBroadcastConstraints(source, broadcastConstraints);
} catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast constraints: %o"), error);
}
updateBroadcastConstraintsFromSource(source, broadcastConstraints);
return broadcastConstraints;
}
/* May throws an overconstraint error */
async function applyBroadcastConstraints(source: VideoSource, constraints: BroadcastConstraints) {
const videoTrack = source.getStream().getVideoTracks()[0];
if(!videoTrack) { return; }
await videoTrack.applyConstraints({
frameRate: constraints.dynamicFrameRate ? {
min: 1,
max: constraints.maxFrameRate,
ideal: constraints.maxFrameRate
} : constraints.maxFrameRate,
width: constraints.dynamicQuality ? {
min: 1,
max: constraints.width,
ideal: constraints.width
} : constraints.width,
height: constraints.dynamicQuality ? {
min: 1,
max: constraints.height,
ideal: constraints.height
} : constraints.height
});
}
class VideoSourceController {
@ -79,7 +183,7 @@ class VideoSourceController {
private readonly type: VideoBroadcastType;
private currentSource: VideoSource | string;
private currentConstraints: SourceConstraints;
private currentConstraints: BroadcastConstraints;
/* preselected current source id */
private currentSourceId: string;
@ -177,24 +281,13 @@ class VideoSourceController {
}));
}
const applyConstraints = async () => {
if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0];
if(!videoTrack) { return; }
await videoTrack.applyConstraints(this.currentConstraints);
}
};
this.events.on("action_setting_dimension", event => {
this.currentConstraints.height = event.height;
this.currentConstraints.width = event.width;
applyConstraints().then(undefined);
});
this.events.on("action_setting_framerate", event => {
this.currentConstraints.frameRate = event.frameRate;
applyConstraints().then(undefined);
this.currentConstraints.maxFrameRate = event.frameRate;
});
}
@ -208,12 +301,27 @@ class VideoSourceController {
this.events.destroy();
}
setCurrentSource(source: VideoSource | string | undefined) {
async setCurrentSource(source: VideoSource | string | undefined) {
if(typeof this.currentSource === "object") {
this.currentSource.deref();
}
this.currentConstraints = {};
if(typeof source === "object") {
if(this.currentConstraints) {
try {
/* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */
await applyBroadcastConstraints(source, this.currentConstraints);
} catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error);
this.currentConstraints = undefined;
}
}
if(!this.currentConstraints) {
this.currentConstraints = await generateAndApplyDefaultConstraints(source);
}
}
this.currentSource = source;
this.notifyVideoPreview();
this.notifyStartButton();
@ -222,6 +330,20 @@ class VideoSourceController {
this.notifySettingFramerate();
}
async useSettings(source: VideoSource, constraints: BroadcastConstraints) {
if(typeof this.currentSource === "object") {
this.currentSource.deref();
}
this.currentSource = source.ref();
this.currentConstraints = constraints;
this.notifyVideoPreview();
this.notifyStartButton();
this.notifyCurrentSource();
this.notifySettingDimension();
this.notifySettingFramerate();
}
async selectSource(sourceId: string) : Promise<boolean> {
const driver = getVideoDriver();
@ -244,17 +366,17 @@ class VideoSourceController {
try {
const stream = await streamPromise;
this.setCurrentSource(stream);
await this.setCurrentSource(stream);
this.fallbackCurrentSourceName = stream?.getName() || tr("No stream");
return !!stream;
} catch (error) {
this.fallbackCurrentSourceName = tr("failed to attach to device");
if(typeof error === "string") {
this.setCurrentSource(error);
await this.setCurrentSource(error);
} else {
logError(LogCategory.GENERAL, tr("Failed to open capture device %s: %o"), sourceId, error);
this.setCurrentSource(tr("Failed to open capture device (Lookup the console)"));
await this.setCurrentSource(tr("Failed to open capture device (Lookup the console)"));
}
return false;
@ -265,6 +387,10 @@ class VideoSourceController {
return typeof this.currentSource === "object" ? this.currentSource : undefined;
}
getBroadcastConstraints() : BroadcastConstraints {
return this.currentConstraints;
}
private notifyStartButton() {
this.events.fire_react("notify_start_button", { enabled: typeof this.currentSource === "object" })
}
@ -291,7 +417,7 @@ class VideoSourceController {
});
}
private notifyScreenCaptureDevices(){
private notifyScreenCaptureDevices() {
const driver = getVideoDriver();
driver.queryScreenCaptureDevices().then(devices => {
this.events.fire_react("notify_screen_capture_devices", { devices: { status: "success", devices: devices }});
@ -305,7 +431,7 @@ class VideoSourceController {
})
}
private notifyVideoPreview(){
private notifyVideoPreview() {
const driver = getVideoDriver();
switch (driver.getPermissionStatus()) {
case VideoPermissionStatus.SystemDenied:
@ -333,7 +459,7 @@ class VideoSourceController {
}
};
private notifyCurrentSource(){
private notifyCurrentSource() {
if(typeof this.currentSource === "object") {
this.events.fire_react("notify_source", {
state: {
@ -358,25 +484,25 @@ class VideoSourceController {
}
}
private notifySettingDimension(){
private notifySettingDimension() {
if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0];
const settings = videoTrack.getSettings();
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined;
const initialSettings = this.currentSource.getInitialSettings();
const capabilities = this.currentSource.getCapabilities();
const constraints = this.currentConstraints;
this.events.fire_react("notify_setting_dimension", {
setting: {
minWidth: capabilities?.width ? capabilities.width.min : 1,
maxWidth: capabilities?.width ? capabilities.width.max : settings.width,
minWidth: capabilities.minWidth,
maxWidth: capabilities.maxWidth,
minHeight: capabilities?.height ? capabilities.height.min : 1,
maxHeight: capabilities?.height ? capabilities.height.max : settings.height,
minHeight: capabilities.minHeight,
maxHeight: capabilities.maxHeight,
originalWidth: settings.width,
originalHeight: settings.height,
originalWidth: initialSettings.width,
originalHeight: initialSettings.height,
currentWidth: settings.width,
currentHeight: settings.height
currentWidth: constraints.width,
currentHeight: constraints.height
}
});
} else {
@ -386,16 +512,16 @@ class VideoSourceController {
notifySettingFramerate() {
if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0];
const settings = videoTrack.getSettings();
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined;
const initialSettings = this.currentSource.getInitialSettings();
const capabilities = this.currentSource.getCapabilities();
const round = (value: number) => Math.round(value * 100) / 100;
this.events.fire_react("notify_settings_framerate", {
frameRate: {
min: round(capabilities?.frameRate ? capabilities.frameRate.min : 1),
max: round(capabilities?.frameRate ? capabilities.frameRate.max : settings.frameRate),
original: round(settings.frameRate)
min: round(capabilities.minFrameRate),
max: round(capabilities.maxFrameRate),
original: round(initialSettings.frameRate),
current: round(this.currentConstraints.maxFrameRate)
}
});
} else {

View File

@ -48,6 +48,7 @@ export type SettingFrameRate = {
min: number,
max: number,
original: number,
current: number
};
export interface ModalVideoSourceEvents {

View File

@ -233,7 +233,7 @@ const VideoPreview = () => {
);
}
const ButtonStart = () => {
const ButtonStart = (props: { editMode: boolean }) => {
const events = useContext(ModalEvents);
const [ enabled, setEnabled ] = useState(() => {
events.fire("query_start_button");
@ -248,7 +248,7 @@ const ButtonStart = () => {
disabled={!enabled}
onClick={() => enabled && events.fire("action_start")}
>
<Translatable>Start</Translatable>
{props.editMode ? <Translatable key={"edit"}>Apply Changed</Translatable> : <Translatable key={"start"}>Start</Translatable>}
</Button>
);
}
@ -317,7 +317,7 @@ const SettingDimension = () => {
setHeight(event.setting.currentHeight);
refSliderWidth.current?.setState({ value: event.setting.currentWidth });
refSliderHeight.current?.setState({ value: event.setting.currentHeight });
setSelectValue("original");
setSelectValue("current");
} else {
setSettings(undefined);
setSelectValue("no-source");
@ -419,6 +419,7 @@ const SettingDimension = () => {
<option value={dimensionId} key={dimensionId}>{DimensionPresets[dimensionId].name + " (" + boundsString(dimensionId as any) + ")"}</option>
)}
<option value={"original"} key={"original"}>{tr("Default")} ({(settings ? settings.originalWidth + "x" + settings.originalHeight : "0x0")})</option>
<option value={"current"} key={"current"} style={{ display: "none" }}>{width + "x" + height}</option>
<option value={"custom"} key={"custom"} style={{ display: advanced ? undefined : "none" }}>{tr("Custom")}</option>
<option value={"no-source"} key={"no-source"} style={{ display: "none" }}>{tr("No source selected")}</option>
</Select>
@ -486,7 +487,7 @@ const SettingFramerate = () => {
setFrameRate(event.frameRate);
setCurrentRate(event.frameRate ? event.frameRate.original : 1);
if(event.frameRate) {
setSelectedValue(event.frameRate.original.toString());
setSelectedValue(event.frameRate.current.toString());
} else {
setSelectedValue("no-source");
}
@ -497,6 +498,9 @@ const SettingFramerate = () => {
if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.original) === -1) {
FrameRates[frameRate.original.toString()] = frameRate.original;
}
if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.current) === -1) {
FrameRates[frameRate.current.toString()] = frameRate.current;
}
}
return (
@ -758,12 +762,14 @@ const ScreenCaptureDeviceSelect = React.memo(() => {
export class ModalVideoSource extends InternalModal {
protected readonly events: Registry<ModalVideoSourceEvents>;
private readonly sourceType: VideoBroadcastType;
private readonly editMode: boolean;
constructor(events: Registry<ModalVideoSourceEvents>, type: VideoBroadcastType) {
constructor(events: Registry<ModalVideoSourceEvents>, type: VideoBroadcastType, editMode: boolean) {
super();
this.sourceType = type;
this.events = events;
this.editMode = editMode;
}
renderBody(): React.ReactElement {
@ -793,7 +799,7 @@ export class ModalVideoSource extends InternalModal {
<Button type={"small"} color={"red"} onClick={() => this.events.fire("action_cancel")}>
<Translatable>Cancel</Translatable>
</Button>
<ButtonStart />
<ButtonStart editMode={this.editMode} />
</div>
</div>
<ScreenCaptureDeviceSelect />

View File

@ -1,11 +1,30 @@
import {Registry} from "tc-shared/events";
import { tr } from "tc-shared/i18n/localize";
export interface VideoSourceCapabilities {
minWidth: number,
maxWidth: number,
minHeight: number,
maxHeight: number,
minFrameRate: number,
maxFrameRate: number
}
export interface VideoSourceInitialSettings {
width: number,
height: number,
frameRate: number
}
export interface VideoSource {
getId() : string;
getName() : string;
getStream() : MediaStream;
getCapabilities() : VideoSourceCapabilities;
getInitialSettings() : VideoSourceInitialSettings;
/** Add a new reference to this stream */
ref() : this;