- {audioMuted && !(videoMuted && !noVideo) &&
}
- {videoMuted && !noVideo &&
}
- {showName &&
{name}}
+export const VideoTile = forwardRef(
+ (
+ {
+ className,
+ isLocal,
+ speaking,
+ audioMuted,
+ noVideo,
+ videoMuted,
+ screenshare,
+ avatar,
+ name,
+ showName,
+ mediaRef,
+ ...rest
+ },
+ ref
+ ) => {
+ return (
+
+ {(videoMuted || noVideo) && (
+ <>
+
+ {avatar}
+ >
+ )}
+ {screenshare ? (
+
+ {`${name} is presenting`}
- )
- )}
-
-
- );
-}
+ ) : (
+ (showName || audioMuted || (videoMuted && !noVideo)) && (
+
+ {audioMuted && !(videoMuted && !noVideo) && }
+ {videoMuted && !noVideo && }
+ {showName && {name}}
+
+ )
+ )}
+
+
+ );
+ }
+);
diff --git a/src/video-grid/VideoTile.module.css b/src/video-grid/VideoTile.module.css
index f5e2d11..0948488 100644
--- a/src/video-grid/VideoTile.module.css
+++ b/src/video-grid/VideoTile.module.css
@@ -5,6 +5,10 @@
overflow: hidden;
cursor: pointer;
touch-action: none;
+
+ /* HACK: This has no visual effect due to the short duration, but allows the
+ JS to detect movement via the transform property for audio spatialization */
+ transition: transform 0.000000001s;
}
.videoTile * {
diff --git a/src/video-grid/VideoTileContainer.jsx b/src/video-grid/VideoTileContainer.jsx
index dddba38..a4cf400 100644
--- a/src/video-grid/VideoTileContainer.jsx
+++ b/src/video-grid/VideoTileContainer.jsx
@@ -17,7 +17,7 @@ limitations under the License.
import { SDPStreamMetadataPurpose } from "matrix-js-sdk/src/webrtc/callEventTypes";
import React from "react";
import { useCallFeed } from "./useCallFeed";
-import { useMediaStream } from "./useMediaStream";
+import { useSpatialMediaStream } from "./useMediaStream";
import { useRoomMemberName } from "./useRoomMemberName";
import { VideoTile } from "./VideoTile";
@@ -28,6 +28,7 @@ export function VideoTileContainer({
getAvatar,
showName,
audioOutputDevice,
+ audioContext,
disableSpeakingIndicator,
...rest
}) {
@@ -42,7 +43,12 @@ export function VideoTileContainer({
member,
} = useCallFeed(item.callFeed);
const { rawDisplayName } = useRoomMemberName(member);
- const mediaRef = useMediaStream(stream, audioOutputDevice, isLocal);
+ const [tileRef, mediaRef] = useSpatialMediaStream(
+ stream,
+ audioOutputDevice,
+ audioContext,
+ isLocal
+ );
// Firefox doesn't respect the disablePictureInPicture attribute
// https://bugzilla.mozilla.org/show_bug.cgi?id=1611831
@@ -57,6 +63,7 @@ export function VideoTileContainer({
screenshare={purpose === SDPStreamMetadataPurpose.Screenshare}
name={rawDisplayName}
showName={showName}
+ ref={tileRef}
mediaRef={mediaRef}
avatar={getAvatar && getAvatar(member, width, height)}
{...rest}
diff --git a/src/video-grid/useMediaStream.js b/src/video-grid/useMediaStream.js
index 2432a73..774c6dd 100644
--- a/src/video-grid/useMediaStream.js
+++ b/src/video-grid/useMediaStream.js
@@ -16,6 +16,8 @@ limitations under the License.
import { useRef, useEffect } from "react";
+import { useSpatialAudio } from "../settings/useSetting";
+
export function useMediaStream(stream, audioOutputDevice, mute = false) {
const mediaRef = useRef();
@@ -74,3 +76,69 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
return mediaRef;
}
+
+export const useSpatialMediaStream = (
+ stream,
+ audioOutputDevice,
+ audioContext,
+ mute = false
+) => {
+ const tileRef = useRef();
+ const [spatialAudio] = useSpatialAudio();
+ // If spatial audio is enabled, we handle audio separately from the video element
+ const mediaRef = useMediaStream(
+ stream,
+ audioOutputDevice,
+ spatialAudio || mute
+ );
+
+ const pannerNodeRef = useRef();
+ if (!pannerNodeRef.current) {
+ pannerNodeRef.current = new PannerNode(audioContext, {
+ panningModel: "HRTF",
+ refDistance: 3,
+ });
+ }
+
+ const sourceRef = useRef();
+
+ useEffect(() => {
+ if (spatialAudio && tileRef.current && !mute) {
+ if (!sourceRef.current) {
+ sourceRef.current = audioContext.createMediaStreamSource(stream);
+ }
+
+ const tile = tileRef.current;
+ const source = sourceRef.current;
+ const pannerNode = pannerNodeRef.current;
+
+ const updatePosition = () => {
+ const bounds = tile.getBoundingClientRect();
+ const windowSize = Math.max(window.innerWidth, window.innerHeight);
+ // Position the source relative to its placement in the window
+ pannerNodeRef.current.positionX.value =
+ (bounds.x + bounds.width / 2) / windowSize - 0.5;
+ pannerNodeRef.current.positionY.value =
+ (bounds.y + bounds.height / 2) / windowSize - 0.5;
+ // Put the source in front of the listener
+ pannerNodeRef.current.positionZ.value = -2;
+ };
+
+ updatePosition();
+ source.connect(pannerNode);
+ pannerNode.connect(audioContext.destination);
+ // HACK: We abuse the CSS transitionrun event to detect when the tile
+ // moves, because useMeasure, IntersectionObserver, etc. all have no
+ // ability to track changes in the CSS transform property
+ tile.addEventListener("transitionrun", updatePosition);
+
+ return () => {
+ tile.removeEventListener("transitionrun", updatePosition);
+ source.disconnect();
+ pannerNode.disconnect();
+ };
+ }
+ }, [stream, spatialAudio, audioContext, mute]);
+
+ return [tileRef, mediaRef];
+};