Quick way to replace matrix JS SDK with LiveKit
This commit is contained in:
parent
fb9dd7ff71
commit
ee1819a0b6
13 changed files with 177 additions and 800 deletions
|
|
@ -18,134 +18,63 @@ import React, { forwardRef } from "react";
|
|||
import { animated } from "@react-spring/web";
|
||||
import classNames from "classnames";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { LocalParticipant, RemoteParticipant, Track } from "livekit-client";
|
||||
import { useMediaTrack } from "@livekit/components-react";
|
||||
|
||||
import styles from "./VideoTile.module.css";
|
||||
import { ReactComponent as MicMutedIcon } from "../icons/MicMuted.svg";
|
||||
import { ReactComponent as VideoMutedIcon } from "../icons/VideoMuted.svg";
|
||||
import { AudioButton, FullscreenButton } from "../button/Button";
|
||||
import { ConnectionState } from "../room/useGroupCall";
|
||||
import { CallFeedDebugInfo } from "./useCallFeed";
|
||||
import { useShowInspector } from "../settings/useSetting";
|
||||
|
||||
interface Props {
|
||||
name: string;
|
||||
connectionState: ConnectionState;
|
||||
speaking?: boolean;
|
||||
audioMuted?: boolean;
|
||||
videoMuted?: boolean;
|
||||
screenshare?: boolean;
|
||||
avatar?: JSX.Element;
|
||||
mediaRef?: React.RefObject<MediaElement>;
|
||||
onOptionsPress?: () => void;
|
||||
localVolume?: number;
|
||||
hasAudio?: boolean;
|
||||
maximised?: boolean;
|
||||
fullscreen?: boolean;
|
||||
onFullscreen?: () => void;
|
||||
className?: string;
|
||||
showOptions?: boolean;
|
||||
isLocal?: boolean;
|
||||
disableSpeakingIndicator?: boolean;
|
||||
debugInfo: CallFeedDebugInfo;
|
||||
sfuParticipant: LocalParticipant | RemoteParticipant;
|
||||
}
|
||||
|
||||
export const VideoTile = forwardRef<HTMLDivElement, Props>(
|
||||
(
|
||||
{
|
||||
name,
|
||||
connectionState,
|
||||
speaking,
|
||||
audioMuted,
|
||||
videoMuted,
|
||||
screenshare,
|
||||
avatar,
|
||||
mediaRef,
|
||||
onOptionsPress,
|
||||
localVolume,
|
||||
hasAudio,
|
||||
maximised,
|
||||
fullscreen,
|
||||
onFullscreen,
|
||||
className,
|
||||
showOptions,
|
||||
isLocal,
|
||||
// TODO: disableSpeakingIndicator is not used atm.
|
||||
disableSpeakingIndicator,
|
||||
debugInfo,
|
||||
...rest
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const [showInspector] = useShowInspector();
|
||||
({ name, avatar, maximised, className, sfuParticipant, ...rest }, ref) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const toolbarButtons: JSX.Element[] = [];
|
||||
if (connectionState == ConnectionState.Connected && !isLocal) {
|
||||
if (hasAudio) {
|
||||
toolbarButtons.push(
|
||||
<AudioButton
|
||||
key="localVolume"
|
||||
className={styles.button}
|
||||
volume={localVolume}
|
||||
onPress={onOptionsPress}
|
||||
/>
|
||||
);
|
||||
const videoEl = React.useRef<HTMLVideoElement>(null);
|
||||
const { isMuted: cameraMuted } = useMediaTrack(
|
||||
Track.Source.Camera,
|
||||
sfuParticipant,
|
||||
{
|
||||
element: videoEl,
|
||||
}
|
||||
);
|
||||
|
||||
if (screenshare) {
|
||||
toolbarButtons.push(
|
||||
<FullscreenButton
|
||||
key="fullscreen"
|
||||
className={styles.button}
|
||||
fullscreen={fullscreen}
|
||||
onPress={onFullscreen}
|
||||
/>
|
||||
);
|
||||
const audioEl = React.useRef<HTMLAudioElement>(null);
|
||||
const { isMuted: microphoneMuted } = useMediaTrack(
|
||||
Track.Source.Microphone,
|
||||
sfuParticipant,
|
||||
{
|
||||
element: audioEl,
|
||||
}
|
||||
}
|
||||
|
||||
let caption: string;
|
||||
switch (connectionState) {
|
||||
case ConnectionState.EstablishingCall:
|
||||
caption = t("{{name}} (Connecting...)", { name });
|
||||
break;
|
||||
case ConnectionState.WaitMedia:
|
||||
// not strictly true, but probably easier to understand than, "Waiting for media"
|
||||
caption = t("{{name}} (Waiting for video...)", { name });
|
||||
break;
|
||||
case ConnectionState.Connected:
|
||||
caption = name;
|
||||
break;
|
||||
}
|
||||
);
|
||||
|
||||
return (
|
||||
<animated.div
|
||||
className={classNames(styles.videoTile, className, {
|
||||
[styles.isLocal]: isLocal,
|
||||
[styles.speaking]: speaking,
|
||||
[styles.muted]: audioMuted,
|
||||
[styles.screenshare]: screenshare,
|
||||
[styles.isLocal]: sfuParticipant.isLocal,
|
||||
[styles.speaking]: sfuParticipant.isSpeaking,
|
||||
[styles.muted]: microphoneMuted,
|
||||
[styles.screenshare]: false,
|
||||
[styles.maximised]: maximised,
|
||||
})}
|
||||
ref={ref}
|
||||
{...rest}
|
||||
>
|
||||
{showInspector && (
|
||||
<div className={classNames(styles.debugInfo)}>
|
||||
{JSON.stringify(debugInfo)}
|
||||
</div>
|
||||
)}
|
||||
{toolbarButtons.length > 0 && !maximised && (
|
||||
<div className={classNames(styles.toolbar)}>{toolbarButtons}</div>
|
||||
)}
|
||||
{videoMuted && (
|
||||
{cameraMuted && (
|
||||
<>
|
||||
<div className={styles.videoMutedOverlay} />
|
||||
{avatar}
|
||||
</>
|
||||
)}
|
||||
{!maximised &&
|
||||
(screenshare ? (
|
||||
(sfuParticipant.isScreenShareEnabled ? (
|
||||
<div className={styles.presenterLabel}>
|
||||
<span>{t("{{name}} is presenting", { name })}</span>
|
||||
</div>
|
||||
|
|
@ -156,13 +85,15 @@ export const VideoTile = forwardRef<HTMLDivElement, Props>(
|
|||
Mute state is currently sent over to-device messages, which
|
||||
aren't quite real-time, so this is an important kludge to make
|
||||
sure no one appears muted when they've clearly begun talking. */
|
||||
audioMuted && !videoMuted && !speaking && <MicMutedIcon />
|
||||
microphoneMuted &&
|
||||
!cameraMuted &&
|
||||
!sfuParticipant.isSpeaking && <MicMutedIcon />
|
||||
}
|
||||
{videoMuted && <VideoMutedIcon />}
|
||||
<span title={caption}>{caption}</span>
|
||||
{cameraMuted && <VideoMutedIcon />}
|
||||
</div>
|
||||
))}
|
||||
<video ref={mediaRef} playsInline disablePictureInPicture />
|
||||
<video ref={videoEl} />
|
||||
<audio ref={audioEl} />
|
||||
</animated.div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue