Fixed some minor bugs and made broadcast settings reeditable

This commit is contained in:
WolverinDEV 2020-12-16 22:06:46 +01:00
parent 2b5bc27471
commit d18701b984
13 changed files with 489 additions and 133 deletions

View file

@ -1,6 +1,5 @@
import {VideoSource} from "tc-shared/video/VideoSource"; import {VideoSource} from "tc-shared/video/VideoSource";
import {Registry} from "tc-shared/events"; import {Registry} from "tc-shared/events";
import {ConnectionStatus} from "tc-shared/ui/frames/footer/StatusDefinitions";
import {ConnectionStatistics} from "tc-shared/connection/ConnectionBase"; import {ConnectionStatistics} from "tc-shared/connection/ConnectionBase";
export type VideoBroadcastType = "camera" | "screen"; export type VideoBroadcastType = "camera" | "screen";
@ -78,6 +77,39 @@ export type LocalVideoBroadcastState = {
state: "broadcasting" state: "broadcasting"
} }
export interface BroadcastConstraints {
/**
* Ideal and max video width
*/
width: number,
/**
* Ideal and max video height
*/
height: number,
/**
* Dynamically change the video quality related to bandwidth constraints.
*/
dynamicQuality: boolean,
/**
* Max bandwidth which should be used (in bits/second)
*/
maxBandwidth: number,
/**
* Maximal frame rate for the video.
* This might be ignored by some browsers.
*/
maxFrameRate: number,
/**
* The maximal
*/
dynamicFrameRate: boolean
}
export interface LocalVideoBroadcast { export interface LocalVideoBroadcast {
getEvents() : Registry<LocalVideoBroadcastEvents>; getEvents() : Registry<LocalVideoBroadcastEvents>;
@ -90,13 +122,18 @@ export interface LocalVideoBroadcast {
/** /**
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints
*/ */
startBroadcasting(source: VideoSource) : Promise<void>; startBroadcasting(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>;
/** /**
* @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!) * @param source The source of the broadcast (No ownership will be taken. The voice connection must ref the source by itself!)
* @param constraints
*/ */
changeSource(source: VideoSource) : Promise<void>; changeSource(source: VideoSource, constraints: BroadcastConstraints) : Promise<void>;
getConstraints() : BroadcastConstraints | undefined;
applyConstraints(constraints: BroadcastConstraints) : Promise<void>;
stopBroadcasting(); stopBroadcasting();
} }

View file

@ -222,7 +222,7 @@ class CommandHandler extends AbstractCommandHandler {
}).then(() => this.handle["peer"].createAnswer()) }).then(() => this.handle["peer"].createAnswer())
.then(async answer => { .then(async answer => {
if(RTCConnection.kEnableSdpTrace) { if(RTCConnection.kEnableSdpTrace) {
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", data.mode as string)); const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Original local SDP ({})", answer.type as string));
gr.collapsed(true); gr.collapsed(true);
gr.log("%s", answer.sdp); gr.log("%s", answer.sdp);
gr.end(); gr.end();
@ -235,7 +235,7 @@ class CommandHandler extends AbstractCommandHandler {
.then(answer => { .then(answer => {
answer.sdp = SdpCompressor.compressSdp(answer.sdp, kSdpCompressionMode); answer.sdp = SdpCompressor.compressSdp(answer.sdp, kSdpCompressionMode);
if(RTCConnection.kEnableSdpTrace) { if(RTCConnection.kEnableSdpTrace) {
const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", data.mode as string)); const gr = logGroupNative(LogType.TRACE, LogCategory.WEBRTC, tra("Patched local SDP ({})", answer.type as string));
gr.collapsed(true); gr.collapsed(true);
gr.log("%s", answer.sdp); gr.log("%s", answer.sdp);
gr.end(); gr.end();
@ -810,7 +810,8 @@ export class RTCConnection {
iceServers: [{ urls: ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302"] }] iceServers: [{ urls: ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302"] }]
}); });
const kAddGenericTransceiver = false; /* If set to false FF failed: FIXME! */
const kAddGenericTransceiver = true;
if(this.audioSupport) { if(this.audioSupport) {
this.currentTransceiver["audio"] = this.peer.addTransceiver("audio"); this.currentTransceiver["audio"] = this.peer.addTransceiver("audio");
@ -880,6 +881,9 @@ export class RTCConnection {
} }
await this.currentTransceiver[type].sender.replaceTrack(target); await this.currentTransceiver[type].sender.replaceTrack(target);
/* Firefox has some crazy issues */
if(window.detectedBrowser.name !== "firefox") {
if(target) { if(target) {
console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection); console.error("Setting sendrecv from %o", this.currentTransceiver[type].direction, this.currentTransceiver[type].currentDirection);
this.currentTransceiver[type].direction = "sendrecv"; this.currentTransceiver[type].direction = "sendrecv";
@ -894,6 +898,7 @@ export class RTCConnection {
*/ */
//this.currentTransceiver[type].direction = "recvonly"; //this.currentTransceiver[type].direction = "recvonly";
} }
}
logTrace(LogCategory.WEBRTC, "Replaced track for %o (Fallback: %o)", type, target === fallback); logTrace(LogCategory.WEBRTC, "Replaced track for %o (Fallback: %o)", type, target === fallback);
} }
} }
@ -1108,8 +1113,9 @@ export class RTCConnection {
logWarn(LogCategory.WEBRTC, tr("Received remote audio track %d but audio has been disabled. Dropping track."), ssrc); logWarn(LogCategory.WEBRTC, tr("Received remote audio track %d but audio has been disabled. Dropping track."), ssrc);
return; return;
} }
const track = new InternalRemoteRTPAudioTrack(ssrc, event.transceiver); const track = new InternalRemoteRTPAudioTrack(ssrc, event.transceiver);
logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %d"), ssrc); logDebug(LogCategory.WEBRTC, tr("Received remote audio track on ssrc %o"), ssrc);
if(tempInfo?.info !== undefined) { if(tempInfo?.info !== undefined) {
track.handleAssignment(tempInfo.info); track.handleAssignment(tempInfo.info);
this.events.fire("notify_audio_assignment_changed", { this.events.fire("notify_audio_assignment_changed", {
@ -1123,7 +1129,7 @@ export class RTCConnection {
this.remoteAudioTracks[ssrc] = track; this.remoteAudioTracks[ssrc] = track;
} else if(event.track.kind === "video") { } else if(event.track.kind === "video") {
const track = new InternalRemoteRTPVideoTrack(ssrc, event.transceiver); const track = new InternalRemoteRTPVideoTrack(ssrc, event.transceiver);
logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %d"), ssrc); logDebug(LogCategory.WEBRTC, tr("Received remote video track on ssrc %o"), ssrc);
if(tempInfo?.info !== undefined) { if(tempInfo?.info !== undefined) {
track.handleAssignment(tempInfo.info); track.handleAssignment(tempInfo.info);
this.events.fire("notify_video_assignment_changed", { this.events.fire("notify_video_assignment_changed", {

View file

@ -1,4 +1,5 @@
import { import {
BroadcastConstraints,
LocalVideoBroadcast, LocalVideoBroadcast,
LocalVideoBroadcastEvents, LocalVideoBroadcastEvents,
LocalVideoBroadcastState, LocalVideoBroadcastState,
@ -27,6 +28,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
private state: LocalVideoBroadcastState; private state: LocalVideoBroadcastState;
private currentSource: VideoSource; private currentSource: VideoSource;
private currentConstrints: BroadcastConstraints;
private broadcastStartId: number; private broadcastStartId: number;
private localStartPromise: Promise<void>; private localStartPromise: Promise<void>;
@ -70,22 +72,27 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return Promise.resolve(undefined); return Promise.resolve(undefined);
} }
async changeSource(source: VideoSource): Promise<void> { async changeSource(source: VideoSource, constraints: BroadcastConstraints): Promise<void> {
let sourceRef = source.ref();
try {
if(this.currentSource !== source) {
console.error("Source changed");
const videoTracks = source.getStream().getVideoTracks(); const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) { if(videoTracks.length === 0) {
throw tr("missing video stream track"); throw tr("missing video stream track");
} }
let sourceRef = source.ref();
while(this.localStartPromise) { while(this.localStartPromise) {
await this.localStartPromise; await this.localStartPromise;
} }
if(this.state.state !== "broadcasting") { if(this.state.state !== "broadcasting") {
sourceRef.deref();
throw tr("not broadcasting anything"); throw tr("not broadcasting anything");
} }
/* Apply the constraints to the current source */
await this.doApplyConstraints(constraints, source);
const startId = ++this.broadcastStartId; const startId = ++this.broadcastStartId;
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
try { try {
@ -96,29 +103,35 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return; return;
} }
sourceRef.deref();
logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error); logError(LogCategory.WEBRTC, tr("Failed to change video track for broadcast %s: %o"), this.type, error);
throw tr("failed to change video track"); throw tr("failed to change video track");
} }
this.setCurrentSource(sourceRef); this.setCurrentSource(sourceRef);
} else if(!_.isEqual(this.currentConstrints, constraints)) {
console.error("Constraints changed");
await this.applyConstraints(constraints);
}
} finally {
sourceRef.deref(); sourceRef.deref();
} }
}
private setCurrentSource(source: VideoSource | undefined) { private setCurrentSource(source: VideoSource | undefined) {
if(this.currentSource) { if(this.currentSource) {
this.currentSource.deref(); this.currentSource.deref();
this.currentConstrints = undefined;
} }
this.currentSource = source?.ref(); this.currentSource = source?.ref();
} }
async startBroadcasting(source: VideoSource): Promise<void> { async startBroadcasting(source: VideoSource, constraints: BroadcastConstraints): Promise<void> {
const sourceRef = source.ref(); const sourceRef = source.ref();
while(this.localStartPromise) { while(this.localStartPromise) {
await this.localStartPromise; await this.localStartPromise;
} }
const promise = this.doStartBroadcast(source); const promise = this.doStartBroadcast(source, constraints);
this.localStartPromise = promise.catch(() => {}); this.localStartPromise = promise.catch(() => {});
this.localStartPromise.then(() => this.localStartPromise = undefined); this.localStartPromise.then(() => this.localStartPromise = undefined);
try { try {
@ -128,7 +141,7 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
} }
private async doStartBroadcast(source: VideoSource) { private async doStartBroadcast(source: VideoSource, constraints: BroadcastConstraints) {
const videoTracks = source.getStream().getVideoTracks(); const videoTracks = source.getStream().getVideoTracks();
if(videoTracks.length === 0) { if(videoTracks.length === 0) {
throw tr("missing video stream track"); throw tr("missing video stream track");
@ -143,6 +156,23 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
return; return;
} }
try {
await this.applyConstraints(constraints);
} catch (error) {
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
}
logError(LogCategory.WEBRTC, tr("Failed to apply video constraints for broadcast %s: %o"), this.type, error);
this.stopBroadcasting(true, { state: "failed", reason: tr("Failed to apply video constraints") });
throw tr("Failed to apply video constraints");
}
if(this.broadcastStartId !== startId) {
/* broadcast start has been canceled */
return;
}
let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen"; let rtcBroadcastType: RTCBroadcastableTrackType = this.type === "camera" ? "video" : "video-screen";
try { try {
@ -183,6 +213,47 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
this.setState({ state: "broadcasting" }); this.setState({ state: "broadcasting" });
} }
async applyConstraints(constraints: BroadcastConstraints): Promise<void> {
await this.doApplyConstraints(constraints, this.currentSource);
}
private async doApplyConstraints(constraints: BroadcastConstraints, source: VideoSource): Promise<void> {
const capabilities = source.getCapabilities();
const videoConstraints: MediaTrackConstraints = {};
if(constraints.dynamicQuality && capabilities) {
videoConstraints.width = {
min: capabilities.minWidth,
max: constraints.width,
ideal: constraints.width
};
videoConstraints.height = {
min: capabilities.minHeight,
max: constraints.height,
ideal: constraints.height
};
} else {
videoConstraints.width = constraints.width;
videoConstraints.height = constraints.height;
}
if(constraints.dynamicFrameRate && capabilities) {
videoConstraints.frameRate = {
min: capabilities.minFrameRate,
max: constraints.maxFrameRate,
ideal: constraints.maxFrameRate
};
} else {
videoConstraints.frameRate = constraints.maxFrameRate;
}
await source.getStream().getVideoTracks()[0]?.applyConstraints(constraints);
this.currentConstrints = constraints;
/* TODO: Bandwidth update? */
}
stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) { stopBroadcasting(skipRtcStop?: boolean, stopState?: LocalVideoBroadcastState) {
if(this.state.state === "stopped" && (!stopState || _.isEqual(stopState, this.state))) { if(this.state.state === "stopped" && (!stopState || _.isEqual(stopState, this.state))) {
return; return;
@ -241,6 +312,10 @@ class LocalRtpVideoBroadcast implements LocalVideoBroadcast {
} }
})(); })();
} }
getConstraints(): BroadcastConstraints | undefined {
return this.currentConstrints;
}
} }
export class RtpVideoConnection implements VideoConnection { export class RtpVideoConnection implements VideoConnection {

View file

@ -78,8 +78,8 @@ export class RtpVideoClient implements VideoClient {
throw tr("failed to receive stream"); throw tr("failed to receive stream");
} }
}).catch(error => { }).catch(error => {
this.updateBroadcastState(broadcastType);
this.joinedStates[broadcastType] = false; this.joinedStates[broadcastType] = false;
this.updateBroadcastState(broadcastType);
logError(LogCategory.VIDEO, tr("Failed to join video broadcast: %o"), error); logError(LogCategory.VIDEO, tr("Failed to join video broadcast: %o"), error);
throw tr("failed to join broadcast"); throw tr("failed to join broadcast");
}); });

View file

@ -17,8 +17,7 @@ import {spawnModalCssVariableEditor} from "tc-shared/ui/modal/css-editor/Control
import {server_connections} from "tc-shared/ConnectionManager"; import {server_connections} from "tc-shared/ConnectionManager";
import {spawnAbout} from "tc-shared/ui/modal/ModalAbout"; import {spawnAbout} from "tc-shared/ui/modal/ModalAbout";
import {spawnVideoSourceSelectModal} from "tc-shared/ui/modal/video-source/Controller"; import {spawnVideoSourceSelectModal} from "tc-shared/ui/modal/video-source/Controller";
import {LogCategory, logError} from "tc-shared/log"; import {LogCategory, logError, logWarn} from "tc-shared/log";
import {getVideoDriver} from "tc-shared/video/VideoSource";
import {spawnEchoTestModal} from "tc-shared/ui/modal/echo-test/Controller"; import {spawnEchoTestModal} from "tc-shared/ui/modal/echo-test/Controller";
/* /*
@ -193,14 +192,15 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
return; return;
} }
spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? "quick" : "default", event.defaultDevice).then(async source => { spawnVideoSourceSelectModal(event.broadcastType, event.quickSelect ? { mode: "select-quick", defaultDevice: event.defaultDevice } : { mode: "select-default", defaultDevice: event.defaultDevice })
.then(async ({ source, constraints }) => {
if(!source) { return; } if(!source) { return; }
try { try {
const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType); const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType);
if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") { if(broadcast.getState().state === "initializing" || broadcast.getState().state === "broadcasting") {
console.error("Change source"); console.error("Change source");
broadcast.changeSource(source).catch(error => { broadcast.changeSource(source, constraints).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error); logError(LogCategory.VIDEO, tr("Failed to change broadcast source: %o"), event.broadcastType, error);
if(typeof error !== "string") { if(typeof error !== "string") {
error = tr("lookup the console for detail"); error = tr("lookup the console for detail");
@ -214,7 +214,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
}); });
} else { } else {
console.error("Start broadcast"); console.error("Start broadcast");
broadcast.startBroadcasting(source).catch(error => { broadcast.startBroadcasting(source, constraints).catch(error => {
logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error); logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error);
if(typeof error !== "string") { if(typeof error !== "string") {
error = tr("lookup the console for detail"); error = tr("lookup the console for detail");
@ -237,4 +237,35 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
broadcast.stopBroadcasting(); broadcast.stopBroadcasting();
} }
}); });
event_registry.on("action_edit_video_broadcasting", event => {
const connection = event.connection;
if(!connection.connected) {
createErrorModal(tr("You're not connected"), tr("You're not connected to any server!")).open();
return;
}
const broadcast = connection.getServerConnection().getVideoConnection().getLocalBroadcast(event.broadcastType);
if(!broadcast || (broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing")) {
createErrorModal(tr("You're not broadcasting"), tr("You're not broadcasting any video!")).open();
return;
}
spawnVideoSourceSelectModal(event.broadcastType, { mode: "edit", source: broadcast.getSource(), broadcastConstraints: Object.assign({}, broadcast.getConstraints()) })
.then(async ({ source, constraints }) => {
if (!source) {
return;
}
if(broadcast.getState().state !== "broadcasting" && broadcast.getState().state !== "initializing") {
createErrorModal(tr("Video broadcast has ended"), tr("The video broadcast has ended.\nUpdate failed.")).open();
return;
}
await broadcast.changeSource(source, constraints);
}).catch(error => {
logWarn(LogCategory.VIDEO, tr("Failed to edit video broadcast: %o"), error);
createErrorModal(tr("Broadcast update failed"), tr("We failed to update the current video broadcast settings.\nThe old settings will be used.")).open();
});
});
} }

View file

@ -4,13 +4,13 @@ import {
VideoDriver, VideoDriver,
VideoDriverEvents, VideoDriverEvents,
VideoPermissionStatus, VideoPermissionStatus,
VideoSource VideoSource, VideoSourceCapabilities, VideoSourceInitialSettings
} from "tc-shared/video/VideoSource"; } from "tc-shared/video/VideoSource";
import {Registry} from "tc-shared/events"; import {Registry} from "tc-shared/events";
import {MediaStreamRequestResult} from "tc-shared/voice/RecorderBase"; import {MediaStreamRequestResult} from "tc-shared/voice/RecorderBase";
import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log"; import {LogCategory, logDebug, logError, logWarn} from "tc-shared/log";
import {queryMediaPermissions, requestMediaStream, stopMediaStream} from "tc-shared/media/Stream"; import {queryMediaPermissions, requestMediaStream, stopMediaStream} from "tc-shared/media/Stream";
import { tr } from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
declare global { declare global {
interface MediaDevices { interface MediaDevices {
@ -225,7 +225,9 @@ export class WebVideoDriver implements VideoDriver {
try { try {
const source = await navigator.mediaDevices.getDisplayMedia({ audio: false, video: true }); const source = await navigator.mediaDevices.getDisplayMedia({ audio: false, video: true });
const videoTrack = source.getVideoTracks()[0]; const videoTrack = source.getVideoTracks()[0];
if(!videoTrack) { throw tr("missing video track"); } if(!videoTrack) {
throw tr("missing video track");
}
logDebug(LogCategory.VIDEO, tr("Display media received with settings: %o"), videoTrack.getSettings()); logDebug(LogCategory.VIDEO, tr("Display media received with settings: %o"), videoTrack.getSettings());
return new WebVideoSource(videoTrack.getSettings().deviceId, tr("Screen"), source); return new WebVideoSource(videoTrack.getSettings().deviceId, tr("Screen"), source);
@ -248,10 +250,19 @@ export class WebVideoSource implements VideoSource {
private readonly stream: MediaStream; private readonly stream: MediaStream;
private referenceCount = 1; private referenceCount = 1;
private initialSettings: VideoSourceInitialSettings;
constructor(deviceId: string, displayName: string, stream: MediaStream) { constructor(deviceId: string, displayName: string, stream: MediaStream) {
this.deviceId = deviceId; this.deviceId = deviceId;
this.displayName = displayName; this.displayName = displayName;
this.stream = stream; this.stream = stream;
const settings = stream.getVideoTracks()[0].getSettings();
this.initialSettings = {
frameRate: settings.frameRate,
height: settings.height,
width: settings.width
};
} }
destroy() { destroy() {
@ -270,6 +281,26 @@ export class WebVideoSource implements VideoSource {
return this.stream; return this.stream;
} }
getInitialSettings(): VideoSourceInitialSettings {
return this.initialSettings;
}
getCapabilities(): VideoSourceCapabilities {
const videoTrack = this.stream.getVideoTracks()[0];
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined;
return {
minWidth: capabilities?.width?.min || 1,
maxWidth: capabilities?.width?.max || this.initialSettings.width,
minHeight: capabilities?.height?.min || 1,
maxHeight: capabilities?.height?.max || this.initialSettings.height,
minFrameRate: capabilities?.frameRate?.min || 1,
maxFrameRate: capabilities?.frameRate?.max || this.initialSettings.frameRate
};
}
deref() { deref() {
this.referenceCount -= 1; this.referenceCount -= 1;

View file

@ -541,6 +541,20 @@ export class Settings extends StaticSettings {
valueType: "number", valueType: "number",
}; };
static readonly KEY_VIDEO_DEFAULT_MAX_WIDTH: ValuedSettingsKey<number> = {
key: 'video_default_max_width',
defaultValue: 1280,
description: "The default maximal width of the video being crated.",
valueType: "number",
};
static readonly KEY_VIDEO_DEFAULT_MAX_HEIGHT: ValuedSettingsKey<number> = {
key: 'video_default_max_height',
defaultValue: 720,
description: "The default maximal height of the video being crated.",
valueType: "number",
};
static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => { static readonly FN_LOG_ENABLED: (category: string) => SettingsKey<boolean> = category => {
return { return {
key: "log." + category.toLowerCase() + ".enabled", key: "log." + category.toLowerCase() + ".enabled",

View file

@ -418,6 +418,16 @@ export function initializeControlBarController(events: Registry<ControlBarEvents
createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open(); createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open();
} }
}); });
events.on("action_manage_video", event => {
if(infoHandler.getCurrentHandler()) {
global_client_actions.fire("action_edit_video_broadcasting", {
connection: infoHandler.getCurrentHandler(),
broadcastType: event.broadcastType
});
} else {
createErrorModal(tr("Missing connection handler"), tr("Cannot start video broadcasting with a missing connection handler")).open();
}
});
return infoHandler; return infoHandler;
} }

View file

@ -294,7 +294,7 @@ const VideoButton = (props: { type: VideoBroadcastType }) => {
<Button switched={false} colorTheme={"red"} autoSwitch={false} iconNormal={icon} <Button switched={false} colorTheme={"red"} autoSwitch={false} iconNormal={icon}
onToggle={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})} onToggle={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})}
tooltip={tooltip} key={"disable"}> tooltip={tooltip} key={"disable"}>
{/* <DropdownEntry icon={icon} text={dropdownTextManage} onClick={() => events.fire("action_manage_video", { broadcastType: props.type })} /> TODO! */} <DropdownEntry icon={icon} text={dropdownTextManage} onClick={() => events.fire("action_manage_video", { broadcastType: props.type })} />
<DropdownEntry icon={icon} text={dropdownTextStop} onClick={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})} /> <DropdownEntry icon={icon} text={dropdownTextStop} onClick={() => events.fire("action_toggle_video", {enable: false, broadcastType: props.type})} />
{props.type === "camera" ? <VideoDeviceList key={"list"} /> : null} {props.type === "camera" ? <VideoDeviceList key={"list"} /> : null}
</Button> </Button>

View file

@ -3,56 +3,74 @@ import {spawnReactModal} from "tc-shared/ui/react-elements/Modal";
import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definitions"; import {ModalVideoSourceEvents} from "tc-shared/ui/modal/video-source/Definitions";
import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer"; import {ModalVideoSource} from "tc-shared/ui/modal/video-source/Renderer";
import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource"; import {getVideoDriver, VideoPermissionStatus, VideoSource} from "tc-shared/video/VideoSource";
import {LogCategory, logError} from "tc-shared/log"; import {LogCategory, logError, logWarn} from "tc-shared/log";
import {VideoBroadcastType} from "tc-shared/connection/VideoConnection"; import {BroadcastConstraints, VideoBroadcastType} from "tc-shared/connection/VideoConnection";
import {Settings, settings} from "tc-shared/settings";
import {tr} from "tc-shared/i18n/localize";
type SourceConstraints = { width?: number, height?: number, frameRate?: number }; export type VideoSourceModalAction = {
mode: "select-quick",
defaultDevice?: string
} | {
mode: "select-default",
defaultDevice?: string
} | {
mode: "new"
} | {
mode: "edit",
source: VideoSource,
broadcastConstraints: BroadcastConstraints
};
/** /**
* @param type The video type which should be prompted * @param type The video type which should be prompted
* @param selectMode * @param mode
* @param defaultDeviceId
*/ */
export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, selectMode: "quick" | "default" | "none", defaultDeviceId?: string) : Promise<VideoSource> { export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, mode: VideoSourceModalAction) : Promise<{ source: VideoSource | undefined, constraints: BroadcastConstraints | undefined }> {
const controller = new VideoSourceController(type); const controller = new VideoSourceController(type);
let defaultSelectSource = selectMode === "default"; let defaultSelectDevice: string | true;
if(selectMode === "quick") { if(mode.mode === "select-quick") {
/* We need the modal itself for the native client in order to present the window selector */ /* We need the modal itself for the native client in order to present the window selector */
if(type === "camera" || __build.target === "web") { if(type === "camera" || __build.target === "web") {
/* Try to get the default device. If we succeeded directly return that */ /* Try to get the default device. If we succeeded directly return that */
if(await controller.selectSource(defaultDeviceId)) { if(await controller.selectSource(mode.defaultDevice)) {
const source = controller.getCurrentSource()?.ref(); /* select succeeded */
const resultSource = controller.getCurrentSource()?.ref();
const resultConstraints = controller.getBroadcastConstraints();
controller.destroy(); controller.destroy();
return {
return source; source: resultSource,
constraints: resultConstraints
};
} else {
/* Select failed. We'll open the modal and show the error. */
} }
} else { } else {
defaultSelectSource = true; defaultSelectDevice = mode.defaultDevice || true;
} }
} else if(mode.mode === "select-default") {
defaultSelectDevice = mode.defaultDevice || true;
} else if(mode.mode === "edit") {
await controller.useSettings(mode.source, mode.broadcastConstraints);
} }
const modal = spawnReactModal(ModalVideoSource, controller.events, type); const modal = spawnReactModal(ModalVideoSource, controller.events, type, mode.mode === "edit");
controller.events.on(["action_start", "action_cancel"], () => modal.destroy()); controller.events.on(["action_start", "action_cancel"], () => modal.destroy());
modal.show().then(() => { modal.show().then(() => {
if(defaultSelectSource) { if(defaultSelectDevice) {
if(type === "screen" && getVideoDriver().screenQueryAvailable()) { if(type === "screen" && getVideoDriver().screenQueryAvailable()) {
controller.events.fire_react("action_toggle_screen_capture_device_select", { shown: true }); controller.events.fire_react("action_toggle_screen_capture_device_select", { shown: true });
} else { } else {
controller.selectSource(defaultDeviceId); controller.selectSource(defaultSelectDevice === true ? undefined : defaultSelectDevice);
} }
} }
}); });
let refSource: { source: VideoSource } = { source: undefined };
controller.events.on("action_start", () => {
refSource.source?.deref();
refSource.source = controller.getCurrentSource()?.ref();
});
await new Promise(resolve => { await new Promise(resolve => {
if(defaultSelectSource && selectMode === "quick") { if(mode.mode === "select-quick" && __build.target !== "web") {
/* We need the modal event for quick select */
const callbackRemove = controller.events.on("notify_video_preview", event => { const callbackRemove = controller.events.on("notify_video_preview", event => {
if(event.status.status === "error") { if(event.status.status === "error") {
callbackRemove(); callbackRemove();
@ -60,8 +78,6 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele
if(event.status.status === "preview") { if(event.status.status === "preview") {
/* we've successfully selected something */ /* we've successfully selected something */
refSource.source = controller.getCurrentSource()?.ref();
modal.hide();
modal.destroy(); modal.destroy();
} }
}); });
@ -70,8 +86,96 @@ export async function spawnVideoSourceSelectModal(type: VideoBroadcastType, sele
modal.events.one(["destroy", "close"], resolve); modal.events.one(["destroy", "close"], resolve);
}); });
const resultSource = controller.getCurrentSource()?.ref();
const resultConstraints = controller.getBroadcastConstraints();
controller.destroy(); controller.destroy();
return refSource.source; return {
source: resultSource,
constraints: resultConstraints
};
}
function updateBroadcastConstraintsFromSource(source: VideoSource, constraints: BroadcastConstraints) {
const videoTrack = source.getStream().getVideoTracks()[0];
const trackSettings = videoTrack.getSettings();
constraints.width = trackSettings.width;
constraints.height = trackSettings.height;
constraints.maxFrameRate = trackSettings.frameRate;
}
async function generateAndApplyDefaultConstraints(source: VideoSource) : Promise<BroadcastConstraints> {
const videoTrack = source.getStream().getVideoTracks()[0];
let maxHeight = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_HEIGHT);
let maxWidth = settings.static_global(Settings.KEY_VIDEO_DEFAULT_MAX_WIDTH);
const trackSettings = videoTrack.getSettings();
const capabilities = source.getCapabilities();
maxHeight = Math.min(maxHeight, capabilities.maxHeight);
maxWidth = Math.min(maxWidth, capabilities.maxWidth);
const broadcastConstraints: BroadcastConstraints = {} as any;
{
let ratio = 1;
if(trackSettings.height > maxHeight) {
ratio = Math.min(maxHeight / trackSettings.height, ratio);
}
if(trackSettings.width > maxWidth) {
ratio = Math.min(maxWidth / trackSettings.width, ratio);
}
if(ratio !== 1) {
broadcastConstraints.width = Math.ceil(ratio * trackSettings.width);
broadcastConstraints.height = Math.ceil(ratio * trackSettings.height);
} else {
broadcastConstraints.width = trackSettings.width;
broadcastConstraints.height = trackSettings.height;
}
}
broadcastConstraints.dynamicQuality = true;
broadcastConstraints.dynamicFrameRate = true;
broadcastConstraints.maxBandwidth = 10_000_000;
try {
await applyBroadcastConstraints(source, broadcastConstraints);
} catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply initial default broadcast constraints: %o"), error);
}
updateBroadcastConstraintsFromSource(source, broadcastConstraints);
return broadcastConstraints;
}
/* May throws an overconstraint error */
async function applyBroadcastConstraints(source: VideoSource, constraints: BroadcastConstraints) {
const videoTrack = source.getStream().getVideoTracks()[0];
if(!videoTrack) { return; }
await videoTrack.applyConstraints({
frameRate: constraints.dynamicFrameRate ? {
min: 1,
max: constraints.maxFrameRate,
ideal: constraints.maxFrameRate
} : constraints.maxFrameRate,
width: constraints.dynamicQuality ? {
min: 1,
max: constraints.width,
ideal: constraints.width
} : constraints.width,
height: constraints.dynamicQuality ? {
min: 1,
max: constraints.height,
ideal: constraints.height
} : constraints.height
});
} }
class VideoSourceController { class VideoSourceController {
@ -79,7 +183,7 @@ class VideoSourceController {
private readonly type: VideoBroadcastType; private readonly type: VideoBroadcastType;
private currentSource: VideoSource | string; private currentSource: VideoSource | string;
private currentConstraints: SourceConstraints; private currentConstraints: BroadcastConstraints;
/* preselected current source id */ /* preselected current source id */
private currentSourceId: string; private currentSourceId: string;
@ -177,24 +281,13 @@ class VideoSourceController {
})); }));
} }
const applyConstraints = async () => {
if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0];
if(!videoTrack) { return; }
await videoTrack.applyConstraints(this.currentConstraints);
}
};
this.events.on("action_setting_dimension", event => { this.events.on("action_setting_dimension", event => {
this.currentConstraints.height = event.height; this.currentConstraints.height = event.height;
this.currentConstraints.width = event.width; this.currentConstraints.width = event.width;
applyConstraints().then(undefined);
}); });
this.events.on("action_setting_framerate", event => { this.events.on("action_setting_framerate", event => {
this.currentConstraints.frameRate = event.frameRate; this.currentConstraints.maxFrameRate = event.frameRate;
applyConstraints().then(undefined);
}); });
} }
@ -208,12 +301,27 @@ class VideoSourceController {
this.events.destroy(); this.events.destroy();
} }
setCurrentSource(source: VideoSource | string | undefined) { async setCurrentSource(source: VideoSource | string | undefined) {
if(typeof this.currentSource === "object") { if(typeof this.currentSource === "object") {
this.currentSource.deref(); this.currentSource.deref();
} }
this.currentConstraints = {}; if(typeof source === "object") {
if(this.currentConstraints) {
try {
/* TODO: Automatically scale down resolution if new one isn't capable of supplying our current resolution */
await applyBroadcastConstraints(source, this.currentConstraints);
} catch (error) {
logWarn(LogCategory.VIDEO, tr("Failed to apply broadcast constraints to new source: %o"), error);
this.currentConstraints = undefined;
}
}
if(!this.currentConstraints) {
this.currentConstraints = await generateAndApplyDefaultConstraints(source);
}
}
this.currentSource = source; this.currentSource = source;
this.notifyVideoPreview(); this.notifyVideoPreview();
this.notifyStartButton(); this.notifyStartButton();
@ -222,6 +330,20 @@ class VideoSourceController {
this.notifySettingFramerate(); this.notifySettingFramerate();
} }
async useSettings(source: VideoSource, constraints: BroadcastConstraints) {
if(typeof this.currentSource === "object") {
this.currentSource.deref();
}
this.currentSource = source.ref();
this.currentConstraints = constraints;
this.notifyVideoPreview();
this.notifyStartButton();
this.notifyCurrentSource();
this.notifySettingDimension();
this.notifySettingFramerate();
}
async selectSource(sourceId: string) : Promise<boolean> { async selectSource(sourceId: string) : Promise<boolean> {
const driver = getVideoDriver(); const driver = getVideoDriver();
@ -244,17 +366,17 @@ class VideoSourceController {
try { try {
const stream = await streamPromise; const stream = await streamPromise;
this.setCurrentSource(stream); await this.setCurrentSource(stream);
this.fallbackCurrentSourceName = stream?.getName() || tr("No stream"); this.fallbackCurrentSourceName = stream?.getName() || tr("No stream");
return !!stream; return !!stream;
} catch (error) { } catch (error) {
this.fallbackCurrentSourceName = tr("failed to attach to device"); this.fallbackCurrentSourceName = tr("failed to attach to device");
if(typeof error === "string") { if(typeof error === "string") {
this.setCurrentSource(error); await this.setCurrentSource(error);
} else { } else {
logError(LogCategory.GENERAL, tr("Failed to open capture device %s: %o"), sourceId, error); logError(LogCategory.GENERAL, tr("Failed to open capture device %s: %o"), sourceId, error);
this.setCurrentSource(tr("Failed to open capture device (Lookup the console)")); await this.setCurrentSource(tr("Failed to open capture device (Lookup the console)"));
} }
return false; return false;
@ -265,6 +387,10 @@ class VideoSourceController {
return typeof this.currentSource === "object" ? this.currentSource : undefined; return typeof this.currentSource === "object" ? this.currentSource : undefined;
} }
getBroadcastConstraints() : BroadcastConstraints {
return this.currentConstraints;
}
private notifyStartButton() { private notifyStartButton() {
this.events.fire_react("notify_start_button", { enabled: typeof this.currentSource === "object" }) this.events.fire_react("notify_start_button", { enabled: typeof this.currentSource === "object" })
} }
@ -291,7 +417,7 @@ class VideoSourceController {
}); });
} }
private notifyScreenCaptureDevices(){ private notifyScreenCaptureDevices() {
const driver = getVideoDriver(); const driver = getVideoDriver();
driver.queryScreenCaptureDevices().then(devices => { driver.queryScreenCaptureDevices().then(devices => {
this.events.fire_react("notify_screen_capture_devices", { devices: { status: "success", devices: devices }}); this.events.fire_react("notify_screen_capture_devices", { devices: { status: "success", devices: devices }});
@ -305,7 +431,7 @@ class VideoSourceController {
}) })
} }
private notifyVideoPreview(){ private notifyVideoPreview() {
const driver = getVideoDriver(); const driver = getVideoDriver();
switch (driver.getPermissionStatus()) { switch (driver.getPermissionStatus()) {
case VideoPermissionStatus.SystemDenied: case VideoPermissionStatus.SystemDenied:
@ -333,7 +459,7 @@ class VideoSourceController {
} }
}; };
private notifyCurrentSource(){ private notifyCurrentSource() {
if(typeof this.currentSource === "object") { if(typeof this.currentSource === "object") {
this.events.fire_react("notify_source", { this.events.fire_react("notify_source", {
state: { state: {
@ -358,25 +484,25 @@ class VideoSourceController {
} }
} }
private notifySettingDimension(){ private notifySettingDimension() {
if(typeof this.currentSource === "object") { if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0]; const initialSettings = this.currentSource.getInitialSettings();
const settings = videoTrack.getSettings(); const capabilities = this.currentSource.getCapabilities();
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined; const constraints = this.currentConstraints;
this.events.fire_react("notify_setting_dimension", { this.events.fire_react("notify_setting_dimension", {
setting: { setting: {
minWidth: capabilities?.width ? capabilities.width.min : 1, minWidth: capabilities.minWidth,
maxWidth: capabilities?.width ? capabilities.width.max : settings.width, maxWidth: capabilities.maxWidth,
minHeight: capabilities?.height ? capabilities.height.min : 1, minHeight: capabilities.minHeight,
maxHeight: capabilities?.height ? capabilities.height.max : settings.height, maxHeight: capabilities.maxHeight,
originalWidth: settings.width, originalWidth: initialSettings.width,
originalHeight: settings.height, originalHeight: initialSettings.height,
currentWidth: settings.width, currentWidth: constraints.width,
currentHeight: settings.height currentHeight: constraints.height
} }
}); });
} else { } else {
@ -386,16 +512,16 @@ class VideoSourceController {
notifySettingFramerate() { notifySettingFramerate() {
if(typeof this.currentSource === "object") { if(typeof this.currentSource === "object") {
const videoTrack = this.currentSource.getStream().getVideoTracks()[0]; const initialSettings = this.currentSource.getInitialSettings();
const settings = videoTrack.getSettings(); const capabilities = this.currentSource.getCapabilities();
const capabilities = "getCapabilities" in videoTrack ? videoTrack.getCapabilities() : undefined;
const round = (value: number) => Math.round(value * 100) / 100; const round = (value: number) => Math.round(value * 100) / 100;
this.events.fire_react("notify_settings_framerate", { this.events.fire_react("notify_settings_framerate", {
frameRate: { frameRate: {
min: round(capabilities?.frameRate ? capabilities.frameRate.min : 1), min: round(capabilities.minFrameRate),
max: round(capabilities?.frameRate ? capabilities.frameRate.max : settings.frameRate), max: round(capabilities.maxFrameRate),
original: round(settings.frameRate) original: round(initialSettings.frameRate),
current: round(this.currentConstraints.maxFrameRate)
} }
}); });
} else { } else {

View file

@ -48,6 +48,7 @@ export type SettingFrameRate = {
min: number, min: number,
max: number, max: number,
original: number, original: number,
current: number
}; };
export interface ModalVideoSourceEvents { export interface ModalVideoSourceEvents {

View file

@ -233,7 +233,7 @@ const VideoPreview = () => {
); );
} }
const ButtonStart = () => { const ButtonStart = (props: { editMode: boolean }) => {
const events = useContext(ModalEvents); const events = useContext(ModalEvents);
const [ enabled, setEnabled ] = useState(() => { const [ enabled, setEnabled ] = useState(() => {
events.fire("query_start_button"); events.fire("query_start_button");
@ -248,7 +248,7 @@ const ButtonStart = () => {
disabled={!enabled} disabled={!enabled}
onClick={() => enabled && events.fire("action_start")} onClick={() => enabled && events.fire("action_start")}
> >
<Translatable>Start</Translatable> {props.editMode ? <Translatable key={"edit"}>Apply Changed</Translatable> : <Translatable key={"start"}>Start</Translatable>}
</Button> </Button>
); );
} }
@ -317,7 +317,7 @@ const SettingDimension = () => {
setHeight(event.setting.currentHeight); setHeight(event.setting.currentHeight);
refSliderWidth.current?.setState({ value: event.setting.currentWidth }); refSliderWidth.current?.setState({ value: event.setting.currentWidth });
refSliderHeight.current?.setState({ value: event.setting.currentHeight }); refSliderHeight.current?.setState({ value: event.setting.currentHeight });
setSelectValue("original"); setSelectValue("current");
} else { } else {
setSettings(undefined); setSettings(undefined);
setSelectValue("no-source"); setSelectValue("no-source");
@ -419,6 +419,7 @@ const SettingDimension = () => {
<option value={dimensionId} key={dimensionId}>{DimensionPresets[dimensionId].name + " (" + boundsString(dimensionId as any) + ")"}</option> <option value={dimensionId} key={dimensionId}>{DimensionPresets[dimensionId].name + " (" + boundsString(dimensionId as any) + ")"}</option>
)} )}
<option value={"original"} key={"original"}>{tr("Default")} ({(settings ? settings.originalWidth + "x" + settings.originalHeight : "0x0")})</option> <option value={"original"} key={"original"}>{tr("Default")} ({(settings ? settings.originalWidth + "x" + settings.originalHeight : "0x0")})</option>
<option value={"current"} key={"current"} style={{ display: "none" }}>{width + "x" + height}</option>
<option value={"custom"} key={"custom"} style={{ display: advanced ? undefined : "none" }}>{tr("Custom")}</option> <option value={"custom"} key={"custom"} style={{ display: advanced ? undefined : "none" }}>{tr("Custom")}</option>
<option value={"no-source"} key={"no-source"} style={{ display: "none" }}>{tr("No source selected")}</option> <option value={"no-source"} key={"no-source"} style={{ display: "none" }}>{tr("No source selected")}</option>
</Select> </Select>
@ -486,7 +487,7 @@ const SettingFramerate = () => {
setFrameRate(event.frameRate); setFrameRate(event.frameRate);
setCurrentRate(event.frameRate ? event.frameRate.original : 1); setCurrentRate(event.frameRate ? event.frameRate.original : 1);
if(event.frameRate) { if(event.frameRate) {
setSelectedValue(event.frameRate.original.toString()); setSelectedValue(event.frameRate.current.toString());
} else { } else {
setSelectedValue("no-source"); setSelectedValue("no-source");
} }
@ -497,6 +498,9 @@ const SettingFramerate = () => {
if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.original) === -1) { if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.original) === -1) {
FrameRates[frameRate.original.toString()] = frameRate.original; FrameRates[frameRate.original.toString()] = frameRate.original;
} }
if(Object.keys(FrameRates).findIndex(key => FrameRates[key] === frameRate.current) === -1) {
FrameRates[frameRate.current.toString()] = frameRate.current;
}
} }
return ( return (
@ -758,12 +762,14 @@ const ScreenCaptureDeviceSelect = React.memo(() => {
export class ModalVideoSource extends InternalModal { export class ModalVideoSource extends InternalModal {
protected readonly events: Registry<ModalVideoSourceEvents>; protected readonly events: Registry<ModalVideoSourceEvents>;
private readonly sourceType: VideoBroadcastType; private readonly sourceType: VideoBroadcastType;
private readonly editMode: boolean;
constructor(events: Registry<ModalVideoSourceEvents>, type: VideoBroadcastType) { constructor(events: Registry<ModalVideoSourceEvents>, type: VideoBroadcastType, editMode: boolean) {
super(); super();
this.sourceType = type; this.sourceType = type;
this.events = events; this.events = events;
this.editMode = editMode;
} }
renderBody(): React.ReactElement { renderBody(): React.ReactElement {
@ -793,7 +799,7 @@ export class ModalVideoSource extends InternalModal {
<Button type={"small"} color={"red"} onClick={() => this.events.fire("action_cancel")}> <Button type={"small"} color={"red"} onClick={() => this.events.fire("action_cancel")}>
<Translatable>Cancel</Translatable> <Translatable>Cancel</Translatable>
</Button> </Button>
<ButtonStart /> <ButtonStart editMode={this.editMode} />
</div> </div>
</div> </div>
<ScreenCaptureDeviceSelect /> <ScreenCaptureDeviceSelect />

View file

@ -1,11 +1,30 @@
import {Registry} from "tc-shared/events"; import {Registry} from "tc-shared/events";
import { tr } from "tc-shared/i18n/localize"; import { tr } from "tc-shared/i18n/localize";
export interface VideoSourceCapabilities {
minWidth: number,
maxWidth: number,
minHeight: number,
maxHeight: number,
minFrameRate: number,
maxFrameRate: number
}
export interface VideoSourceInitialSettings {
width: number,
height: number,
frameRate: number
}
export interface VideoSource { export interface VideoSource {
getId() : string; getId() : string;
getName() : string; getName() : string;
getStream() : MediaStream; getStream() : MediaStream;
getCapabilities() : VideoSourceCapabilities;
getInitialSettings() : VideoSourceInitialSettings;
/** Add a new reference to this stream */ /** Add a new reference to this stream */
ref() : this; ref() : this;