Merge branch 'main' into vu-animation

This commit is contained in:
Robin Townsend 2022-06-01 10:31:04 -04:00
commit ddac2ba5ef
15 changed files with 337 additions and 117 deletions

View file

@ -33,19 +33,6 @@ export function GroupCallView({
roomId, roomId,
groupCall, groupCall,
}) { }) {
const [showInspector, setShowInspector] = useState(
() => !!localStorage.getItem("matrix-group-call-inspector")
);
const onChangeShowInspector = useCallback((show) => {
setShowInspector(show);
if (show) {
localStorage.setItem("matrix-group-call-inspector", "true");
} else {
localStorage.removeItem("matrix-group-call-inspector");
}
}, []);
const { const {
state, state,
error, error,
@ -104,8 +91,6 @@ export function GroupCallView({
participants={participants} participants={participants}
userMediaFeeds={userMediaFeeds} userMediaFeeds={userMediaFeeds}
onLeave={onLeave} onLeave={onLeave}
setShowInspector={onChangeShowInspector}
showInspector={showInspector}
/> />
); );
} else { } else {
@ -126,8 +111,6 @@ export function GroupCallView({
isScreensharing={isScreensharing} isScreensharing={isScreensharing}
localScreenshareFeed={localScreenshareFeed} localScreenshareFeed={localScreenshareFeed}
screenshareFeeds={screenshareFeeds} screenshareFeeds={screenshareFeeds}
setShowInspector={onChangeShowInspector}
showInspector={showInspector}
roomId={roomId} roomId={roomId}
/> />
); );
@ -156,8 +139,6 @@ export function GroupCallView({
localVideoMuted={localVideoMuted} localVideoMuted={localVideoMuted}
toggleLocalVideoMuted={toggleLocalVideoMuted} toggleLocalVideoMuted={toggleLocalVideoMuted}
toggleMicrophoneMuted={toggleMicrophoneMuted} toggleMicrophoneMuted={toggleMicrophoneMuted}
setShowInspector={onChangeShowInspector}
showInspector={showInspector}
roomId={roomId} roomId={roomId}
/> />
); );

View file

@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
*/ */
import React, { useCallback, useMemo } from "react"; import React, { useCallback, useMemo, useRef } from "react";
import styles from "./InCallView.module.css"; import styles from "./InCallView.module.css";
import { import {
HangupButton, HangupButton,
@ -34,6 +34,7 @@ import { useRageshakeRequestModal } from "../settings/submit-rageshake";
import { RageshakeRequestModal } from "./RageshakeRequestModal"; import { RageshakeRequestModal } from "./RageshakeRequestModal";
import { usePreventScroll } from "@react-aria/overlays"; import { usePreventScroll } from "@react-aria/overlays";
import { useMediaHandler } from "../settings/useMediaHandler"; import { useMediaHandler } from "../settings/useMediaHandler";
import { useShowInspector } from "../settings/useSetting";
import { useModalTriggerState } from "../Modal"; import { useModalTriggerState } from "../Modal";
const canScreenshare = "getDisplayMedia" in navigator.mediaDevices; const canScreenshare = "getDisplayMedia" in navigator.mediaDevices;
@ -57,14 +58,16 @@ export function InCallView({
toggleScreensharing, toggleScreensharing,
isScreensharing, isScreensharing,
screenshareFeeds, screenshareFeeds,
setShowInspector,
showInspector,
roomId, roomId,
}) { }) {
usePreventScroll(); usePreventScroll();
const [layout, setLayout] = useVideoGridLayout(screenshareFeeds.length > 0); const [layout, setLayout] = useVideoGridLayout(screenshareFeeds.length > 0);
const { audioOutput } = useMediaHandler(); const { audioOutput } = useMediaHandler();
const [showInspector] = useShowInspector();
const audioContext = useRef();
if (!audioContext.current) audioContext.current = new AudioContext();
const { modalState: feedbackModalState, modalProps: feedbackModalProps } = const { modalState: feedbackModalState, modalProps: feedbackModalProps } =
useModalTriggerState(); useModalTriggerState();
@ -151,6 +154,7 @@ export function InCallView({
getAvatar={renderAvatar} getAvatar={renderAvatar}
showName={items.length > 2 || item.focused} showName={items.length > 2 || item.focused}
audioOutputDevice={audioOutput} audioOutputDevice={audioOutput}
audioContext={audioContext.current}
disableSpeakingIndicator={items.length < 3} disableSpeakingIndicator={items.length < 3}
{...rest} {...rest}
/> />
@ -169,8 +173,6 @@ export function InCallView({
<OverflowMenu <OverflowMenu
inCall inCall
roomId={roomId} roomId={roomId}
setShowInspector={setShowInspector}
showInspector={showInspector}
client={client} client={client}
groupCall={groupCall} groupCall={groupCall}
showInvite={true} showInvite={true}

View file

@ -41,8 +41,6 @@ export function LobbyView({
localVideoMuted, localVideoMuted,
toggleLocalVideoMuted, toggleLocalVideoMuted,
toggleMicrophoneMuted, toggleMicrophoneMuted,
setShowInspector,
showInspector,
roomId, roomId,
}) { }) {
const { stream } = useCallFeed(localCallFeed); const { stream } = useCallFeed(localCallFeed);
@ -101,8 +99,6 @@ export function LobbyView({
localVideoMuted={localVideoMuted} localVideoMuted={localVideoMuted}
toggleLocalVideoMuted={toggleLocalVideoMuted} toggleLocalVideoMuted={toggleLocalVideoMuted}
toggleMicrophoneMuted={toggleMicrophoneMuted} toggleMicrophoneMuted={toggleMicrophoneMuted}
setShowInspector={setShowInspector}
showInspector={showInspector}
stream={stream} stream={stream}
audioOutput={audioOutput} audioOutput={audioOutput}
/> />

View file

@ -31,8 +31,6 @@ import { FeedbackModal } from "./FeedbackModal";
export function OverflowMenu({ export function OverflowMenu({
roomId, roomId,
setShowInspector,
showInspector,
inCall, inCall,
groupCall, groupCall,
showInvite, showInvite,
@ -88,13 +86,7 @@ export function OverflowMenu({
</Menu> </Menu>
)} )}
</PopoverMenuTrigger> </PopoverMenuTrigger>
{settingsModalState.isOpen && ( {settingsModalState.isOpen && <SettingsModal {...settingsModalProps} />}
<SettingsModal
{...settingsModalProps}
setShowInspector={setShowInspector}
showInspector={showInspector}
/>
)}
{inviteModalState.isOpen && ( {inviteModalState.isOpen && (
<InviteModal roomId={roomId} {...inviteModalProps} /> <InviteModal roomId={roomId} {...inviteModalProps} />
)} )}

View file

@ -9,10 +9,12 @@
background-color: #21262c; background-color: #21262c;
position: relative; position: relative;
padding: 0; padding: 0;
cursor: pointer;
} }
.talking { .talking {
background-color: #0dbd8b; background-color: #0dbd8b;
cursor: unset;
} }
.error { .error {

View file

@ -44,8 +44,11 @@ function getPromptText(
activeSpeakerIsLocalUser: boolean, activeSpeakerIsLocalUser: boolean,
talkOverEnabled: boolean, talkOverEnabled: boolean,
activeSpeakerUserId: string, activeSpeakerUserId: string,
activeSpeakerDisplayName: string activeSpeakerDisplayName: string,
connected: boolean
): string { ): string {
if (!connected) return "Connection Lost";
const isTouchScreen = Boolean(window.ontouchstart !== undefined); const isTouchScreen = Boolean(window.ontouchstart !== undefined);
if (showTalkOverError) { if (showTalkOverError) {
@ -84,8 +87,6 @@ interface Props {
participants: RoomMember[]; participants: RoomMember[];
userMediaFeeds: CallFeed[]; userMediaFeeds: CallFeed[];
onLeave: () => void; onLeave: () => void;
setShowInspector: (boolean) => void;
showInspector: boolean;
} }
export const PTTCallView: React.FC<Props> = ({ export const PTTCallView: React.FC<Props> = ({
@ -97,8 +98,6 @@ export const PTTCallView: React.FC<Props> = ({
participants, participants,
userMediaFeeds, userMediaFeeds,
onLeave, onLeave,
setShowInspector,
showInspector,
}) => { }) => {
const { modalState: inviteModalState, modalProps: inviteModalProps } = const { modalState: inviteModalState, modalProps: inviteModalProps } =
useModalTriggerState(); useModalTriggerState();
@ -128,6 +127,7 @@ export const PTTCallView: React.FC<Props> = ({
startTalking, startTalking,
stopTalking, stopTalking,
transmitBlocked, transmitBlocked,
connected,
} = usePTT( } = usePTT(
client, client,
groupCall, groupCall,
@ -190,8 +190,6 @@ export const PTTCallView: React.FC<Props> = ({
<OverflowMenu <OverflowMenu
inCall inCall
roomId={roomId} roomId={roomId}
setShowInspector={setShowInspector}
showInspector={showInspector}
client={client} client={client}
groupCall={groupCall} groupCall={groupCall}
showInvite={false} showInvite={false}
@ -236,7 +234,8 @@ export const PTTCallView: React.FC<Props> = ({
activeSpeakerIsLocalUser, activeSpeakerIsLocalUser,
talkOverEnabled, talkOverEnabled,
activeSpeakerUserId, activeSpeakerUserId,
activeSpeakerDisplayName activeSpeakerDisplayName,
connected
)} )}
</p> </p>
{userMediaFeeds.map((callFeed) => ( {userMediaFeeds.map((callFeed) => (

View file

@ -35,8 +35,6 @@ export function VideoPreview({
localVideoMuted, localVideoMuted,
toggleLocalVideoMuted, toggleLocalVideoMuted,
toggleMicrophoneMuted, toggleMicrophoneMuted,
setShowInspector,
showInspector,
audioOutput, audioOutput,
stream, stream,
}) { }) {
@ -83,8 +81,6 @@ export function VideoPreview({
/> />
<OverflowMenu <OverflowMenu
roomId={roomId} roomId={roomId}
setShowInspector={setShowInspector}
showInspector={showInspector}
client={client} client={client}
feedbackModalState={feedbackModalState} feedbackModalState={feedbackModalState}
feedbackModalProps={feedbackModalProps} feedbackModalProps={feedbackModalProps}

View file

@ -18,10 +18,57 @@ import { useCallback, useEffect, useState } from "react";
import { import {
GroupCallEvent, GroupCallEvent,
GroupCallState, GroupCallState,
GroupCall,
} from "matrix-js-sdk/src/webrtc/groupCall"; } from "matrix-js-sdk/src/webrtc/groupCall";
import { MatrixCall } from "matrix-js-sdk/src/webrtc/call";
import { CallFeed } from "matrix-js-sdk/src/webrtc/callFeed";
import { RoomMember } from "matrix-js-sdk/src/models/room-member";
import { usePageUnload } from "./usePageUnload"; import { usePageUnload } from "./usePageUnload";
export function useGroupCall(groupCall) { export interface UseGroupCallType {
state: GroupCallState;
calls: MatrixCall[];
localCallFeed: CallFeed;
activeSpeaker: string;
userMediaFeeds: CallFeed[];
microphoneMuted: boolean;
localVideoMuted: boolean;
error: Error;
initLocalCallFeed: () => void;
enter: () => void;
leave: () => void;
toggleLocalVideoMuted: () => void;
toggleMicrophoneMuted: () => void;
toggleScreensharing: () => void;
requestingScreenshare: boolean;
isScreensharing: boolean;
screenshareFeeds: CallFeed[];
localScreenshareFeed: CallFeed;
localDesktopCapturerSourceId: string;
participants: RoomMember[];
hasLocalParticipant: boolean;
}
interface State {
state: GroupCallState;
calls: MatrixCall[];
localCallFeed: CallFeed;
activeSpeaker: string;
userMediaFeeds: CallFeed[];
error: Error;
microphoneMuted: boolean;
localVideoMuted: boolean;
screenshareFeeds: CallFeed[];
localScreenshareFeed: CallFeed;
localDesktopCapturerSourceId: string;
isScreensharing: boolean;
requestingScreenshare: boolean;
participants: RoomMember[];
hasLocalParticipant: boolean;
}
export function useGroupCall(groupCall: GroupCall): UseGroupCallType {
const [ const [
{ {
state, state,
@ -41,20 +88,25 @@ export function useGroupCall(groupCall) {
requestingScreenshare, requestingScreenshare,
}, },
setState, setState,
] = useState({ ] = useState<State>({
state: GroupCallState.LocalCallFeedUninitialized, state: GroupCallState.LocalCallFeedUninitialized,
calls: [], calls: [],
localCallFeed: null,
activeSpeaker: null,
userMediaFeeds: [], userMediaFeeds: [],
error: null,
microphoneMuted: false, microphoneMuted: false,
localVideoMuted: false, localVideoMuted: false,
screenshareFeeds: [],
isScreensharing: false, isScreensharing: false,
screenshareFeeds: [],
localScreenshareFeed: null,
localDesktopCapturerSourceId: null,
requestingScreenshare: false, requestingScreenshare: false,
participants: [], participants: [],
hasLocalParticipant: false, hasLocalParticipant: false,
}); });
const updateState = (state) => const updateState = (state: Partial<State>) =>
setState((prevState) => ({ ...prevState, ...state })); setState((prevState) => ({ ...prevState, ...state }));
useEffect(() => { useEffect(() => {
@ -75,25 +127,28 @@ export function useGroupCall(groupCall) {
}); });
} }
function onUserMediaFeedsChanged(userMediaFeeds) { function onUserMediaFeedsChanged(userMediaFeeds: CallFeed[]): void {
updateState({ updateState({
userMediaFeeds: [...userMediaFeeds], userMediaFeeds: [...userMediaFeeds],
}); });
} }
function onScreenshareFeedsChanged(screenshareFeeds) { function onScreenshareFeedsChanged(screenshareFeeds: CallFeed[]): void {
updateState({ updateState({
screenshareFeeds: [...screenshareFeeds], screenshareFeeds: [...screenshareFeeds],
}); });
} }
function onActiveSpeakerChanged(activeSpeaker) { function onActiveSpeakerChanged(activeSpeaker: string): void {
updateState({ updateState({
activeSpeaker: activeSpeaker, activeSpeaker: activeSpeaker,
}); });
} }
function onLocalMuteStateChanged(microphoneMuted, localVideoMuted) { function onLocalMuteStateChanged(
microphoneMuted: boolean,
localVideoMuted: boolean
): void {
updateState({ updateState({
microphoneMuted, microphoneMuted,
localVideoMuted, localVideoMuted,
@ -101,10 +156,10 @@ export function useGroupCall(groupCall) {
} }
function onLocalScreenshareStateChanged( function onLocalScreenshareStateChanged(
isScreensharing, isScreensharing: boolean,
localScreenshareFeed, localScreenshareFeed: CallFeed,
localDesktopCapturerSourceId localDesktopCapturerSourceId: string
) { ): void {
updateState({ updateState({
isScreensharing, isScreensharing,
localScreenshareFeed, localScreenshareFeed,
@ -112,13 +167,13 @@ export function useGroupCall(groupCall) {
}); });
} }
function onCallsChanged(calls) { function onCallsChanged(calls: MatrixCall[]): void {
updateState({ updateState({
calls: [...calls], calls: [...calls],
}); });
} }
function onParticipantsChanged(participants) { function onParticipantsChanged(participants: RoomMember[]): void {
updateState({ updateState({
participants: [...participants], participants: [...participants],
hasLocalParticipant: groupCall.hasLocalParticipant(), hasLocalParticipant: groupCall.hasLocalParticipant(),

View file

@ -15,10 +15,11 @@ limitations under the License.
*/ */
import { useCallback, useEffect, useState } from "react"; import { useCallback, useEffect, useState } from "react";
import { MatrixClient } from "matrix-js-sdk/src/client"; import { MatrixClient, ClientEvent } from "matrix-js-sdk/src/client";
import { GroupCall } from "matrix-js-sdk/src/webrtc/groupCall"; import { GroupCall } from "matrix-js-sdk/src/webrtc/groupCall";
import { CallFeed, CallFeedEvent } from "matrix-js-sdk/src/webrtc/callFeed"; import { CallFeed, CallFeedEvent } from "matrix-js-sdk/src/webrtc/callFeed";
import { logger } from "matrix-js-sdk/src/logger"; import { logger } from "matrix-js-sdk/src/logger";
import { SyncState } from "matrix-js-sdk/src/sync";
import { PlayClipFunction, PTTClipID } from "../sound/usePttSounds"; import { PlayClipFunction, PTTClipID } from "../sound/usePttSounds";
@ -30,6 +31,21 @@ function getActiveSpeakerFeed(
): CallFeed | null { ): CallFeed | null {
const activeSpeakerFeeds = feeds.filter((f) => !f.isAudioMuted()); const activeSpeakerFeeds = feeds.filter((f) => !f.isAudioMuted());
// make sure the feeds are in a deterministic order so every client picks
// the same one as the active speaker. The custom sort function sorts
// by user ID, so needs a collator of some kind to compare. We make a
// specific one to help ensure every client sorts the same way
// although of course user IDs shouldn't contain accented characters etc.
// anyway).
const collator = new Intl.Collator("en", {
sensitivity: "variant",
usage: "sort",
ignorePunctuation: false,
});
activeSpeakerFeeds.sort((a: CallFeed, b: CallFeed): number =>
collator.compare(a.userId, b.userId)
);
let activeSpeakerFeed = null; let activeSpeakerFeed = null;
let highestPowerLevel = null; let highestPowerLevel = null;
for (const feed of activeSpeakerFeeds) { for (const feed of activeSpeakerFeeds) {
@ -53,6 +69,11 @@ export interface PTTState {
startTalking: () => void; startTalking: () => void;
stopTalking: () => void; stopTalking: () => void;
transmitBlocked: boolean; transmitBlocked: boolean;
// connected is actually an indication of whether we're connected to the HS
// (ie. the client's syncing state) rather than media connection, since
// it's peer to peer so we can't really say which peer is 'disconnected' if
// there's only one other person in the call and they've lost Internet.
connected: boolean;
} }
export const usePTT = ( export const usePTT = (
@ -226,6 +247,17 @@ export const usePTT = (
setMicMuteWrapper(true); setMicMuteWrapper(true);
}, [setMicMuteWrapper]); }, [setMicMuteWrapper]);
// separate state for connected: we set it separately from other things
// in the client sync callback
const [connected, setConnected] = useState(true);
const onClientSync = useCallback(
(syncState: SyncState) => {
setConnected(syncState !== SyncState.Error);
},
[setConnected]
);
useEffect(() => { useEffect(() => {
function onKeyDown(event: KeyboardEvent): void { function onKeyDown(event: KeyboardEvent): void {
if (event.code === "Space") { if (event.code === "Space") {
@ -275,8 +307,18 @@ export const usePTT = (
pttButtonHeld, pttButtonHeld,
enablePTTButton, enablePTTButton,
setMicMuteWrapper, setMicMuteWrapper,
client,
onClientSync,
]); ]);
useEffect(() => {
client.on(ClientEvent.Sync, onClientSync);
return () => {
client.removeListener(ClientEvent.Sync, onClientSync);
};
}, [client, onClientSync]);
const setTalkOverEnabled = useCallback((talkOverEnabled) => { const setTalkOverEnabled = useCallback((talkOverEnabled) => {
setState((prevState) => ({ setState((prevState) => ({
...prevState, ...prevState,
@ -294,5 +336,6 @@ export const usePTT = (
startTalking, startTalking,
stopTalking, stopTalking,
transmitBlocked, transmitBlocked,
connected,
}; };
}; };

View file

@ -24,12 +24,13 @@ import { ReactComponent as DeveloperIcon } from "../icons/Developer.svg";
import { SelectInput } from "../input/SelectInput"; import { SelectInput } from "../input/SelectInput";
import { Item } from "@react-stately/collections"; import { Item } from "@react-stately/collections";
import { useMediaHandler } from "./useMediaHandler"; import { useMediaHandler } from "./useMediaHandler";
import { useSpatialAudio, useShowInspector } from "./useSetting";
import { FieldRow, InputField } from "../input/Input"; import { FieldRow, InputField } from "../input/Input";
import { Button } from "../button"; import { Button } from "../button";
import { useDownloadDebugLog } from "./submit-rageshake"; import { useDownloadDebugLog } from "./submit-rageshake";
import { Body } from "../typography/Typography"; import { Body } from "../typography/Typography";
export function SettingsModal({ setShowInspector, showInspector, ...rest }) { export const SettingsModal = (props) => {
const { const {
audioInput, audioInput,
audioInputs, audioInputs,
@ -41,6 +42,8 @@ export function SettingsModal({ setShowInspector, showInspector, ...rest }) {
audioOutputs, audioOutputs,
setAudioOutput, setAudioOutput,
} = useMediaHandler(); } = useMediaHandler();
const [spatialAudio, setSpatialAudio] = useSpatialAudio();
const [showInspector, setShowInspector] = useShowInspector();
const downloadDebugLog = useDownloadDebugLog(); const downloadDebugLog = useDownloadDebugLog();
@ -50,7 +53,7 @@ export function SettingsModal({ setShowInspector, showInspector, ...rest }) {
isDismissable isDismissable
mobileFullScreen mobileFullScreen
className={styles.settingsModal} className={styles.settingsModal}
{...rest} {...props}
> >
<TabContainer className={styles.tabContainer}> <TabContainer className={styles.tabContainer}>
<TabItem <TabItem
@ -81,6 +84,15 @@ export function SettingsModal({ setShowInspector, showInspector, ...rest }) {
))} ))}
</SelectInput> </SelectInput>
)} )}
<FieldRow>
<InputField
id="spatialAudio"
label="Spatial audio (experimental)"
type="checkbox"
checked={spatialAudio}
onChange={(e) => setSpatialAudio(e.target.checked)}
/>
</FieldRow>
</TabItem> </TabItem>
<TabItem <TabItem
title={ title={
@ -130,4 +142,4 @@ export function SettingsModal({ setShowInspector, showInspector, ...rest }) {
</TabContainer> </TabContainer>
</Modal> </Modal>
); );
} };

View file

@ -0,0 +1,56 @@
/*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { EventEmitter } from "events";
import { useMemo, useState, useEffect, useCallback } from "react";
// Bus to notify other useSetting consumers when a setting is changed
const settingsBus = new EventEmitter();
// Like useState, but reads from and persists the value to localStorage
const useSetting = <T>(
name: string,
defaultValue: T
): [T, (value: T) => void] => {
const key = useMemo(() => `matrix-setting-${name}`, [name]);
const [value, setValue] = useState<T>(() => {
const item = localStorage.getItem(key);
return item == null ? defaultValue : JSON.parse(item);
});
useEffect(() => {
settingsBus.on(name, setValue);
return () => {
settingsBus.off(name, setValue);
};
}, [name, setValue]);
return [
value,
useCallback(
(newValue: T) => {
setValue(newValue);
localStorage.setItem(key, JSON.stringify(newValue));
settingsBus.emit(name, newValue);
},
[name, key, setValue]
),
];
};
export const useSpatialAudio = () => useSetting("spatial-audio", false);
export const useShowInspector = () => useSetting("show-inspector", false);

View file

@ -14,14 +14,16 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
*/ */
import React from "react"; import React, { forwardRef } from "react";
import { animated } from "@react-spring/web"; import { animated } from "@react-spring/web";
import classNames from "classnames"; import classNames from "classnames";
import styles from "./VideoTile.module.css"; import styles from "./VideoTile.module.css";
import { ReactComponent as MicMutedIcon } from "../icons/MicMuted.svg"; import { ReactComponent as MicMutedIcon } from "../icons/MicMuted.svg";
import { ReactComponent as VideoMutedIcon } from "../icons/VideoMuted.svg"; import { ReactComponent as VideoMutedIcon } from "../icons/VideoMuted.svg";
export function VideoTile({ export const VideoTile = forwardRef(
(
{
className, className,
isLocal, isLocal,
speaking, speaking,
@ -34,7 +36,9 @@ export function VideoTile({
showName, showName,
mediaRef, mediaRef,
...rest ...rest
}) { },
ref
) => {
return ( return (
<animated.div <animated.div
className={classNames(styles.videoTile, className, { className={classNames(styles.videoTile, className, {
@ -43,6 +47,7 @@ export function VideoTile({
[styles.muted]: audioMuted, [styles.muted]: audioMuted,
[styles.screenshare]: screenshare, [styles.screenshare]: screenshare,
})} })}
ref={ref}
{...rest} {...rest}
> >
{(videoMuted || noVideo) && ( {(videoMuted || noVideo) && (
@ -68,3 +73,4 @@ export function VideoTile({
</animated.div> </animated.div>
); );
} }
);

View file

@ -5,6 +5,10 @@
overflow: hidden; overflow: hidden;
cursor: pointer; cursor: pointer;
touch-action: none; touch-action: none;
/* HACK: This has no visual effect due to the short duration, but allows the
JS to detect movement via the transform property for audio spatialization */
transition: transform 0.000000001s;
} }
.videoTile * { .videoTile * {

View file

@ -17,7 +17,7 @@ limitations under the License.
import { SDPStreamMetadataPurpose } from "matrix-js-sdk/src/webrtc/callEventTypes"; import { SDPStreamMetadataPurpose } from "matrix-js-sdk/src/webrtc/callEventTypes";
import React from "react"; import React from "react";
import { useCallFeed } from "./useCallFeed"; import { useCallFeed } from "./useCallFeed";
import { useMediaStream } from "./useMediaStream"; import { useSpatialMediaStream } from "./useMediaStream";
import { useRoomMemberName } from "./useRoomMemberName"; import { useRoomMemberName } from "./useRoomMemberName";
import { VideoTile } from "./VideoTile"; import { VideoTile } from "./VideoTile";
@ -28,6 +28,7 @@ export function VideoTileContainer({
getAvatar, getAvatar,
showName, showName,
audioOutputDevice, audioOutputDevice,
audioContext,
disableSpeakingIndicator, disableSpeakingIndicator,
...rest ...rest
}) { }) {
@ -42,7 +43,12 @@ export function VideoTileContainer({
member, member,
} = useCallFeed(item.callFeed); } = useCallFeed(item.callFeed);
const { rawDisplayName } = useRoomMemberName(member); const { rawDisplayName } = useRoomMemberName(member);
const mediaRef = useMediaStream(stream, audioOutputDevice, isLocal); const [tileRef, mediaRef] = useSpatialMediaStream(
stream,
audioOutputDevice,
audioContext,
isLocal
);
// Firefox doesn't respect the disablePictureInPicture attribute // Firefox doesn't respect the disablePictureInPicture attribute
// https://bugzilla.mozilla.org/show_bug.cgi?id=1611831 // https://bugzilla.mozilla.org/show_bug.cgi?id=1611831
@ -57,6 +63,7 @@ export function VideoTileContainer({
screenshare={purpose === SDPStreamMetadataPurpose.Screenshare} screenshare={purpose === SDPStreamMetadataPurpose.Screenshare}
name={rawDisplayName} name={rawDisplayName}
showName={showName} showName={showName}
ref={tileRef}
mediaRef={mediaRef} mediaRef={mediaRef}
avatar={getAvatar && getAvatar(member, width, height)} avatar={getAvatar && getAvatar(member, width, height)}
{...rest} {...rest}

View file

@ -16,6 +16,8 @@ limitations under the License.
import { useRef, useEffect } from "react"; import { useRef, useEffect } from "react";
import { useSpatialAudio } from "../settings/useSetting";
export function useMediaStream(stream, audioOutputDevice, mute = false) { export function useMediaStream(stream, audioOutputDevice, mute = false) {
const mediaRef = useRef(); const mediaRef = useRef();
@ -55,7 +57,8 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
mediaRef.current !== undefined mediaRef.current !== undefined
) { ) {
console.log(`useMediaStream setSinkId ${audioOutputDevice}`); console.log(`useMediaStream setSinkId ${audioOutputDevice}`);
mediaRef.current.setSinkId(audioOutputDevice); // Chrome for Android doesn't support this
mediaRef.current.setSinkId?.(audioOutputDevice);
} }
}, [audioOutputDevice]); }, [audioOutputDevice]);
@ -73,3 +76,69 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
return mediaRef; return mediaRef;
} }
export const useSpatialMediaStream = (
stream,
audioOutputDevice,
audioContext,
mute = false
) => {
const tileRef = useRef();
const [spatialAudio] = useSpatialAudio();
// If spatial audio is enabled, we handle audio separately from the video element
const mediaRef = useMediaStream(
stream,
audioOutputDevice,
spatialAudio || mute
);
const pannerNodeRef = useRef();
if (!pannerNodeRef.current) {
pannerNodeRef.current = new PannerNode(audioContext, {
panningModel: "HRTF",
refDistance: 3,
});
}
const sourceRef = useRef();
useEffect(() => {
if (spatialAudio && tileRef.current && !mute) {
if (!sourceRef.current) {
sourceRef.current = audioContext.createMediaStreamSource(stream);
}
const tile = tileRef.current;
const source = sourceRef.current;
const pannerNode = pannerNodeRef.current;
const updatePosition = () => {
const bounds = tile.getBoundingClientRect();
const windowSize = Math.max(window.innerWidth, window.innerHeight);
// Position the source relative to its placement in the window
pannerNodeRef.current.positionX.value =
(bounds.x + bounds.width / 2) / windowSize - 0.5;
pannerNodeRef.current.positionY.value =
(bounds.y + bounds.height / 2) / windowSize - 0.5;
// Put the source in front of the listener
pannerNodeRef.current.positionZ.value = -2;
};
updatePosition();
source.connect(pannerNode);
pannerNode.connect(audioContext.destination);
// HACK: We abuse the CSS transitionrun event to detect when the tile
// moves, because useMeasure, IntersectionObserver, etc. all have no
// ability to track changes in the CSS transform property
tile.addEventListener("transitionrun", updatePosition);
return () => {
tile.removeEventListener("transitionrun", updatePosition);
source.disconnect();
pannerNode.disconnect();
};
}
}, [stream, spatialAudio, audioContext, mute]);
return [tileRef, mediaRef];
};