Added the option to mute/unmute remote video and some minor bugfixing

canary
WolverinDEV 2020-11-22 19:08:19 +01:00
parent 173ba5b696
commit 02a939da15
9 changed files with 268 additions and 61 deletions

View File

@ -183,7 +183,7 @@ export function initialize(event_registry: Registry<ClientGlobalControlEvents>)
if(!source) { return; }
try {
event.connection.getServerConnection().getVideoConnection().startBroadcasting("camera", source)
event.connection.getServerConnection().getVideoConnection().startBroadcasting(event.broadcastType, source)
.catch(error => {
logError(LogCategory.VIDEO, tr("Failed to start %s broadcasting: %o"), event.broadcastType, error);
if(typeof error !== "string") {

View File

@ -13,8 +13,11 @@ const cssStyle = require("./Renderer.scss");
let videoIdIndex = 0;
interface ClientVideoController {
destroy();
toggleMuteState(type: VideoBroadcastType, state: boolean);
notifyVideoInfo();
notifyVideo();
notifyMuteState();
}
class RemoteClientVideoController implements ClientVideoController {
@ -26,6 +29,11 @@ class RemoteClientVideoController implements ClientVideoController {
protected eventListener: (() => void)[];
protected eventListenerVideoClient: (() => void)[];
protected mutedState: {[T in VideoBroadcastType]: boolean} = {
screen: false,
camera: false
};
private currentBroadcastState: boolean;
constructor(client: ClientEntry, eventRegistry: Registry<ChannelVideoEvents>, videoId?: string) {
@ -56,7 +64,10 @@ class RemoteClientVideoController implements ClientVideoController {
const videoClient = this.client.getVideoClient();
if(videoClient) {
events.push(videoClient.getEvents().on("notify_broadcast_state_changed", () => this.notifyVideo()));
events.push(videoClient.getEvents().on("notify_broadcast_state_changed", () => {
this.notifyVideo();
this.notifyMuteState();
}));
}
}
@ -73,6 +84,14 @@ class RemoteClientVideoController implements ClientVideoController {
return videoClient && (videoClient.getVideoState("camera") !== VideoBroadcastState.Stopped || videoClient.getVideoState("screen") !== VideoBroadcastState.Stopped);
}
toggleMuteState(type: VideoBroadcastType, state: boolean) {
if(this.mutedState[type] === state) { return; }
this.mutedState[type] = state;
this.notifyVideo();
this.notifyMuteState();
}
notifyVideoInfo() {
this.events.fire_react("notify_video_info", {
videoId: this.videoId,
@ -88,31 +107,39 @@ class RemoteClientVideoController implements ClientVideoController {
notifyVideo() {
let broadcasting = false;
if(this.isVideoActive()) {
let streams = [];
let initializing = false;
let cameraStream, desktopStream;
const stateCamera = this.getBroadcastState("camera");
if(stateCamera === VideoBroadcastState.Running) {
streams.push(this.getBroadcastStream("camera"));
cameraStream = this.getBroadcastStream("camera")
if(cameraStream && this.mutedState["camera"]) {
cameraStream = "muted";
}
} else if(stateCamera === VideoBroadcastState.Initializing) {
initializing = true;
}
const stateScreen = this.getBroadcastState("screen");
if(stateScreen === VideoBroadcastState.Running) {
streams.push(this.getBroadcastStream("screen"));
desktopStream = this.getBroadcastStream("screen");
if(desktopStream && this.mutedState["screen"]) {
desktopStream = "muted";
}
} else if(stateScreen === VideoBroadcastState.Initializing) {
initializing = true;
}
if(streams.length > 0) {
if(cameraStream || desktopStream) {
broadcasting = true;
this.events.fire_react("notify_video", {
videoId: this.videoId,
status: {
status: "connected",
desktopStream: streams[1],
cameraStream: streams[0]
desktopStream: desktopStream,
cameraStream: cameraStream,
}
});
} else if(initializing) {
@ -126,6 +153,7 @@ class RemoteClientVideoController implements ClientVideoController {
videoId: this.videoId,
status: {
status: "connected",
cameraStream: undefined,
desktopStream: undefined
}
@ -146,6 +174,16 @@ class RemoteClientVideoController implements ClientVideoController {
}
}
notifyMuteState() {
this.events.fire_react("notify_video_mute_status", {
videoId: this.videoId,
status: {
camera: this.getBroadcastStream("camera") ? this.mutedState["camera"] ? "muted" : "available" : "unset",
screen: this.getBroadcastStream("screen") ? this.mutedState["screen"] ? "muted" : "available" : "unset",
}
});
}
protected isVideoActive() : boolean {
return typeof this.client.getVideoClient() !== "undefined";
}
@ -256,6 +294,16 @@ class ChannelVideoController {
}
});
this.events.on("action_toggle_mute", event => {
const controller = this.findVideoById(event.videoId);
if(!controller) {
logWarn(LogCategory.VIDEO, tr("Tried to toggle video mute state for a non existing video id (%s)."), event.videoId);
return;
}
controller.toggleMuteState(event.broadcastType, event.muted);
});
this.events.on("query_expended", () => this.events.fire_react("notify_expended", { expended: this.expended }));
this.events.on("query_videos", () => this.notifyVideoList());
this.events.on("query_spotlight", () => this.notifySpotlight());
@ -280,6 +328,16 @@ class ChannelVideoController {
controller.notifyVideo();
});
this.events.on("query_video_mute_status", event => {
const controller = this.findVideoById(event.videoId);
if(!controller) {
logWarn(LogCategory.VIDEO, tr("Tried to query mute state for a non existing video id (%s)."), event.videoId);
return;
}
controller.notifyMuteState();
});
const channelTree = this.connection.channelTree;
events.push(channelTree.events.on("notify_tree_reset", () => {
this.resetClientVideos();

View File

@ -9,8 +9,9 @@ export type ChannelVideo ={
status: "initializing",
} | {
status: "connected",
cameraStream: MediaStream | undefined,
desktopStream: MediaStream | undefined,
cameraStream: "muted" | MediaStream | undefined,
desktopStream: "muted" | MediaStream | undefined
} | {
status: "error",
message: string
@ -46,10 +47,19 @@ export type VideoStatistics = {
codec: { name: string, payloadType: number }
};
/**
* "muted": The video has been muted locally
* "unset": The video will be normally played
* "empty": No video available
*/
export type LocalVideoState = "muted" | "unset" | "empty";
export interface ChannelVideoEvents {
action_toggle_expended: { expended: boolean },
action_video_scroll: { direction: "left" | "right" },
action_set_spotlight: { videoId: string | undefined, expend: boolean },
action_set_fullscreen: { videoId: string | undefined },
action_toggle_mute: { videoId: string, broadcastType: VideoBroadcastType, muted: boolean },
query_expended: {},
query_videos: {},
@ -57,6 +67,7 @@ export interface ChannelVideoEvents {
query_video_info: { videoId: string },
query_video_statistics: { videoId: string, broadcastType: VideoBroadcastType },
query_spotlight: {},
query_video_mute_status: { videoId: string }
notify_expended: { expended: boolean },
notify_videos: {
@ -85,5 +96,9 @@ export interface ChannelVideoEvents {
videoId: string | undefined,
broadcastType: VideoBroadcastType,
statistics: VideoStatistics
},
notify_video_mute_status: {
videoId: string,
status: {[T in VideoBroadcastType] : "muted" | "available" | "unset"}
}
}

View File

@ -163,11 +163,6 @@ $small_height: 10em;
&:hover {
background-color: #3c3d3e;
}
.icon {
align-self: center;
font-size: 2em;
}
}
&.right {
@ -195,7 +190,7 @@ $small_height: 10em;
flex-shrink: 1;
flex-grow: 1;
.videoContainer .requestFullscreen {
.videoContainer .actionIcons {
opacity: .5;
}
}
@ -225,16 +220,23 @@ $small_height: 10em;
.video {
opacity: 1;
height: 100%;
width: 100%;
align-self: center;
}
.videoPrimary {
height: 100%;
width: 100%;
}
.videoSecondary {
.videoSecondary {
position: absolute;
top: 0;
right: 0;
max-width: 50%;
max-height: 50%;
border-bottom-left-radius: .2em;
}
.text {
@ -289,7 +291,7 @@ $small_height: 10em;
}
}
.requestFullscreen {
.actionIcons {
position: absolute;
bottom: 0;
@ -301,33 +303,52 @@ $small_height: 10em;
border-top-left-radius: .2em;
background-color: #353535;
padding: .25em;
padding: .2em .3em;
opacity: 0;
@include transition(all $button_hover_animation_time ease-in-out);
&.hidden {
display: none;
}
.iconContainer {
align-self: center;
display: flex;
padding: .2em;
margin-top: -1px;
margin-bottom: calc(-.1em - 1px);
cursor: pointer;
border-radius: .1em;
border: 1px solid transparent;
@include transition(all $button_hover_animation_time ease-in-out);
&:hover {
background-color: #ffffff1e;
}
&:not(:first-of-type) {
margin-left: .2em;
}
&.toggle {
&.disabled {
background-color: var(--menu-bar-button-background-activated-red);
border-color: var(--menu-bar-button-border-activated-red);
}
}
&.hidden {
display: none;
}
}
.icon {
flex-shrink: 0;
align-self: center;
}
}
&:hover {
.requestFullscreen {
.actionIcons {
opacity: 1;
}
}

View File

@ -9,6 +9,8 @@ import {LoadingDots} from "tc-shared/ui/react-elements/LoadingDots";
import {ClientTag} from "tc-shared/ui/tree/EntryTags";
import ResizeObserver from "resize-observer-polyfill";
import {LogCategory, logWarn} from "tc-shared/log";
import {spawnContextMenu} from "tc-shared/ui/ContextMenu";
import {VideoBroadcastType} from "tc-shared/connection/VideoConnection";
const EventContext = React.createContext<Registry<ChannelVideoEvents>>(undefined);
const HandlerIdContext = React.createContext<string>(undefined);
@ -74,7 +76,7 @@ const VideoInfo = React.memo((props: { videoId: string }) => {
);
});
const VideoStreamReplay = React.memo((props: { stream: MediaStream | undefined, className: string }) => {
const VideoStreamReplay = React.memo((props: { stream: MediaStream | undefined, className: string, title: string }) => {
const refVideo = useRef<HTMLVideoElement>();
useEffect(() => {
@ -83,14 +85,15 @@ const VideoStreamReplay = React.memo((props: { stream: MediaStream | undefined,
video.style.opacity = "1";
video.srcObject = props.stream;
video.autoplay = true;
video.play().then(undefined);
video.muted = true;
video.play().then(undefined).catch(undefined);
} else {
video.style.opacity = "0";
}
}, [ props.stream ]);
return (
<video ref={refVideo} className={cssStyle.video + " " + props.className} />
<video ref={refVideo} className={cssStyle.video + " " + props.className} title={props.title} />
)
});
@ -125,16 +128,27 @@ const VideoPlayer = React.memo((props: { videoId: string }) => {
</div>
);
} else if(state.status === "connected") {
if(state.desktopStream && state.cameraStream) {
/* TODO: Select primary and secondary and display them */
const desktopStream = state.desktopStream === "muted" ? undefined : state.desktopStream;
const cameraStream = state.cameraStream === "muted" ? undefined : state.cameraStream;
if(desktopStream && cameraStream) {
return (
<VideoStreamReplay stream={state.desktopStream} key={"replay-multi"} className={cssStyle.videoPrimary} />
<React.Fragment key={"replay-multi"}>
<VideoStreamReplay stream={desktopStream} className={cssStyle.videoPrimary} title={tr("Screen")} />
<VideoStreamReplay stream={cameraStream} className={cssStyle.videoSecondary} title={tr("Camera")} />
</React.Fragment>
);
} else {
const stream = state.desktopStream || state.cameraStream;
const stream = desktopStream || cameraStream;
if(stream) {
return (
<VideoStreamReplay stream={stream} key={"replay-single"} className={cssStyle.videoPrimary} />
<VideoStreamReplay stream={stream} key={"replay-single"} className={cssStyle.videoPrimary} title={desktopStream ? tr("Screen") : tr("Camera")} />
);
} else if(state.desktopStream || state.cameraStream) {
return (
<div className={cssStyle.text} key={"video-muted"}>
<div><Translatable>Video muted</Translatable></div>
</div>
);
} else {
return (
@ -157,11 +171,21 @@ const VideoPlayer = React.memo((props: { videoId: string }) => {
const VideoContainer = React.memo((props: { videoId: string, isSpotlight: boolean }) => {
const events = useContext(EventContext);
const refContainer = useRef<HTMLDivElement>();
const [ isFullscreen, setFullscreen ] = useState(false);
const fullscreenCapable = "requestFullscreen" in HTMLElement.prototype;
const [ isFullscreen, setFullscreen ] = useState(false);
const [ muteState, setMuteState ] = useState<{[T in VideoBroadcastType]: "muted" | "available" | "unset"}>(() => {
events.fire("query_video_mute_status", { videoId: props.videoId });
return { camera: "unset", screen: "unset" };
});
events.reactUse("notify_video_mute_status", event => {
if(event.videoId === props.videoId) {
setMuteState(event.status);
}
});
useEffect(() => {
if(!isFullscreen) { return; }
@ -180,36 +204,97 @@ const VideoContainer = React.memo((props: { videoId: string, isSpotlight: boolea
return () => document.removeEventListener("fullscreenchange", listener);
}, [ isFullscreen ]);
events.reactUse("action_set_fullscreen", event => {
if(event.videoId === props.videoId) {
if(!refContainer.current) { return; }
refContainer.current.requestFullscreen().then(() => {
setFullscreen(true);
}).catch(error => {
logWarn(LogCategory.GENERAL, tr("Failed to request fullscreen: %o"), error);
});
} else {
if(document.fullscreenElement === refContainer.current) {
document.exitFullscreen().then(undefined);
}
setFullscreen(false);
}
});
const toggleClass = (type: VideoBroadcastType) => {
if(props.videoId === kLocalVideoId || muteState[type] === "unset") {
return cssStyle.hidden;
}
return muteState[type] === "muted" ? cssStyle.disabled : "";
}
return (
<div
className={cssStyle.videoContainer}
onDoubleClick={() => {
if(props.isSpotlight) { return; }
events.fire("action_set_spotlight", { videoId: props.videoId, expend: true });
if(isFullscreen) {
events.fire("action_set_fullscreen", { videoId: undefined });
} else if(props.isSpotlight) {
events.fire("action_set_fullscreen", { videoId: props.videoId });
} else {
events.fire("action_set_spotlight", { videoId: props.videoId, expend: true });
}
}}
onContextMenu={event => {
event.preventDefault()
event.preventDefault();
spawnContextMenu({
pageY: event.pageY,
pageX: event.pageX
}, [
{
type: "normal",
label: isFullscreen ? tr("Release fullscreen") : tr("Show in fullscreen"),
icon: ClientIcon.Fullscreen,
click: () => {
events.fire("action_set_fullscreen", { videoId: isFullscreen ? undefined : props.videoId });
}
},
{
type: "normal",
label: props.isSpotlight ? tr("Release spotlight") : tr("Put client in spotlight"),
icon: ClientIcon.Fullscreen,
click: () => {
events.fire("action_set_spotlight", { videoId: props.isSpotlight ? undefined : props.videoId, expend: true });
}
}
]);
}}
ref={refContainer}
>
<VideoPlayer videoId={props.videoId} />
<VideoInfo videoId={props.videoId} />
<div className={cssStyle.requestFullscreen + " " + (isFullscreen || !fullscreenCapable ? cssStyle.hidden : "")}>
<div className={cssStyle.iconContainer} onClick={() => {
if(props.isSpotlight) {
if(!refContainer.current) { return; }
refContainer.current.requestFullscreen().then(() => {
setFullscreen(true);
}).catch(error => {
logWarn(LogCategory.GENERAL, tr("Failed to request fullscreen: %o"), error);
});
} else {
events.fire("action_set_spotlight", { videoId: props.videoId, expend: true });
}
}}>
<div className={cssStyle.actionIcons}>
<div className={cssStyle.iconContainer + " " + (!fullscreenCapable ? cssStyle.hidden : "")}
onClick={() => {
if(props.isSpotlight) {
events.fire("action_set_fullscreen", { videoId: isFullscreen ? undefined : props.videoId });
} else {
events.fire("action_set_spotlight", { videoId: props.videoId, expend: true });
}
}}
title={props.isSpotlight ? tr("Toggle fullscreen") : tr("Toggle spotlight")}
>
<ClientIconRenderer className={cssStyle.icon} icon={ClientIcon.Fullscreen} />
</div>
<div className={cssStyle.iconContainer + " " + cssStyle.toggle + " " + toggleClass("camera")}
onClick={() => events.fire("action_toggle_mute", { videoId: props.videoId, broadcastType: "camera", muted: muteState.camera === "available" })}
title={muteState["camera"] === "muted" ? tr("Unmute camera video") : tr("Mute camera video")}
>
<ClientIconRenderer className={cssStyle.icon} icon={ClientIcon.VideoMuted} />
</div>
<div className={cssStyle.iconContainer + " " + cssStyle.toggle + " " + toggleClass("screen")}
onClick={() => events.fire("action_toggle_mute", { videoId: props.videoId, broadcastType: "screen", muted: muteState.screen === "available" })}
title={muteState["screen"] === "muted" ? tr("Unmute screen video") : tr("Mute screen video")}
>
<ClientIconRenderer className={cssStyle.icon} icon={ClientIcon.ShareScreen} />
</div>
</div>
</div>
);

View File

@ -251,12 +251,13 @@ class CommandHandler extends AbstractCommandHandler {
client_id: parseInt(data["sclid"]),
client_database_id: parseInt(data["scldbid"]),
client_name: data["sclname"],
client_unique_id: data["scluid"]
client_unique_id: data["scluid"],
media: parseInt(data["media"])
});
} else {
this.handle["doMapStream"](ssrc, undefined);
}
} else if(command.command === "notifyrtcstateaudio") {
} else if(command.command === "notifyrtcstreamstate") {
const data = command.arguments[0];
const state = parseInt(data["state"]);
const ssrc = parseInt(data["streamid"]) >>> 0;
@ -269,7 +270,7 @@ class CommandHandler extends AbstractCommandHandler {
client_id: parseInt(data["sclid"]),
client_database_id: parseInt(data["scldbid"]),
client_name: data["sclname"],
client_unique_id: data["scluid"]
client_unique_id: data["scluid"],
});
} else {
logWarn(LogCategory.WEBRTC, tr("Received unknown/invalid rtc track state: %d"), state);
@ -1001,6 +1002,10 @@ export class RTCConnection {
track.handleStateNotify(state, info);
} else {
let tempStream = this.getOrCreateTempStream(ssrc);
if(typeof info.media === "undefined") {
/* the media will only be send on stream assignments, not on stream state changes */
info.media = tempStream.info?.media;
}
tempStream.info = info;
tempStream.status = state;
}

View File

@ -5,6 +5,8 @@ import * as aplayer from "tc-backend/web/audio/player";
export interface TrackClientInfo {
media?: number,
client_id: number,
client_database_id: number,
client_unique_id: string,

View File

@ -1,5 +1,6 @@
import {
VideoBroadcastState, VideoBroadcastStatistics,
VideoBroadcastState,
VideoBroadcastStatistics,
VideoBroadcastType,
VideoClient,
VideoConnection,
@ -65,9 +66,27 @@ export class RtpVideoConnection implements VideoConnection {
});
this.listenerRtcStateChanged = this.rtcConnection.getEvents().on("notify_state_changed", event => this.handleRtcConnectionStateChanged(event));
this.rtcConnection.getEvents().on("notify_video_assignment_changed", event => {
if(event.info) {
switch (event.info.media) {
case 2:
this.handleVideoAssignmentChanged("camera", event);
break;
/* TODO: Screen share?! */
this.rtcConnection.getEvents().on("notify_video_assignment_changed", event => this.handleVideoAssignmentChanged("camera", event));
case 3:
this.handleVideoAssignmentChanged("screen", event);
break;
default:
logWarn(LogCategory.WEBRTC, tr("Received video track %o assignment for invalid media: %o"), event.track.getSsrc(), event.info);
return;
}
} else {
/* track has been removed */
this.handleVideoAssignmentChanged("screen", event);
this.handleVideoAssignmentChanged("camera", event);
}
});
}
private setConnectionState(state: VideoConnectionStatus) {

View File

@ -56,8 +56,10 @@
}
.entry {
/*padding: 8px 12px;*/
padding-top: 1px;
padding-bottom: 1px;
padding-right: 12px;
cursor: pointer;
list-style-type: none;
transition: all .3s ease;