Add spatial audio capabilities
This commit is contained in:
parent
9444f43c72
commit
c6b90803f8
12 changed files with 205 additions and 99 deletions
|
|
@ -14,57 +14,63 @@ See the License for the specific language governing permissions and
|
|||
limitations under the License.
|
||||
*/
|
||||
|
||||
import React from "react";
|
||||
import React, { forwardRef } from "react";
|
||||
import { animated } from "@react-spring/web";
|
||||
import classNames from "classnames";
|
||||
import styles from "./VideoTile.module.css";
|
||||
import { ReactComponent as MicMutedIcon } from "../icons/MicMuted.svg";
|
||||
import { ReactComponent as VideoMutedIcon } from "../icons/VideoMuted.svg";
|
||||
|
||||
export function VideoTile({
|
||||
className,
|
||||
isLocal,
|
||||
speaking,
|
||||
audioMuted,
|
||||
noVideo,
|
||||
videoMuted,
|
||||
screenshare,
|
||||
avatar,
|
||||
name,
|
||||
showName,
|
||||
mediaRef,
|
||||
...rest
|
||||
}) {
|
||||
return (
|
||||
<animated.div
|
||||
className={classNames(styles.videoTile, className, {
|
||||
[styles.isLocal]: isLocal,
|
||||
[styles.speaking]: speaking,
|
||||
[styles.muted]: audioMuted,
|
||||
[styles.screenshare]: screenshare,
|
||||
})}
|
||||
{...rest}
|
||||
>
|
||||
{(videoMuted || noVideo) && (
|
||||
<>
|
||||
<div className={styles.videoMutedOverlay} />
|
||||
{avatar}
|
||||
</>
|
||||
)}
|
||||
{screenshare ? (
|
||||
<div className={styles.presenterLabel}>
|
||||
<span>{`${name} is presenting`}</span>
|
||||
</div>
|
||||
) : (
|
||||
(showName || audioMuted || (videoMuted && !noVideo)) && (
|
||||
<div className={styles.memberName}>
|
||||
{audioMuted && !(videoMuted && !noVideo) && <MicMutedIcon />}
|
||||
{videoMuted && !noVideo && <VideoMutedIcon />}
|
||||
{showName && <span title={name}>{name}</span>}
|
||||
export const VideoTile = forwardRef(
|
||||
(
|
||||
{
|
||||
className,
|
||||
isLocal,
|
||||
speaking,
|
||||
audioMuted,
|
||||
noVideo,
|
||||
videoMuted,
|
||||
screenshare,
|
||||
avatar,
|
||||
name,
|
||||
showName,
|
||||
mediaRef,
|
||||
...rest
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
return (
|
||||
<animated.div
|
||||
className={classNames(styles.videoTile, className, {
|
||||
[styles.isLocal]: isLocal,
|
||||
[styles.speaking]: speaking,
|
||||
[styles.muted]: audioMuted,
|
||||
[styles.screenshare]: screenshare,
|
||||
})}
|
||||
ref={ref}
|
||||
{...rest}
|
||||
>
|
||||
{(videoMuted || noVideo) && (
|
||||
<>
|
||||
<div className={styles.videoMutedOverlay} />
|
||||
{avatar}
|
||||
</>
|
||||
)}
|
||||
{screenshare ? (
|
||||
<div className={styles.presenterLabel}>
|
||||
<span>{`${name} is presenting`}</span>
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
<video ref={mediaRef} playsInline disablePictureInPicture />
|
||||
</animated.div>
|
||||
);
|
||||
}
|
||||
) : (
|
||||
(showName || audioMuted || (videoMuted && !noVideo)) && (
|
||||
<div className={styles.memberName}>
|
||||
{audioMuted && !(videoMuted && !noVideo) && <MicMutedIcon />}
|
||||
{videoMuted && !noVideo && <VideoMutedIcon />}
|
||||
{showName && <span title={name}>{name}</span>}
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
<video ref={mediaRef} playsInline disablePictureInPicture />
|
||||
</animated.div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@
|
|||
overflow: hidden;
|
||||
cursor: pointer;
|
||||
touch-action: none;
|
||||
|
||||
/* HACK: This has no visual effect due to the short duration, but allows the
|
||||
JS to detect movement via the transform property for audio spatialization */
|
||||
transition: transform 0.000000001s;
|
||||
}
|
||||
|
||||
.videoTile * {
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ limitations under the License.
|
|||
import { SDPStreamMetadataPurpose } from "matrix-js-sdk/src/webrtc/callEventTypes";
|
||||
import React from "react";
|
||||
import { useCallFeed } from "./useCallFeed";
|
||||
import { useMediaStream } from "./useMediaStream";
|
||||
import { useSpatialMediaStream } from "./useMediaStream";
|
||||
import { useRoomMemberName } from "./useRoomMemberName";
|
||||
import { VideoTile } from "./VideoTile";
|
||||
|
||||
|
|
@ -28,6 +28,7 @@ export function VideoTileContainer({
|
|||
getAvatar,
|
||||
showName,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
disableSpeakingIndicator,
|
||||
...rest
|
||||
}) {
|
||||
|
|
@ -42,7 +43,12 @@ export function VideoTileContainer({
|
|||
member,
|
||||
} = useCallFeed(item.callFeed);
|
||||
const { rawDisplayName } = useRoomMemberName(member);
|
||||
const mediaRef = useMediaStream(stream, audioOutputDevice, isLocal);
|
||||
const [tileRef, mediaRef] = useSpatialMediaStream(
|
||||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
isLocal
|
||||
);
|
||||
|
||||
// Firefox doesn't respect the disablePictureInPicture attribute
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=1611831
|
||||
|
|
@ -57,6 +63,7 @@ export function VideoTileContainer({
|
|||
screenshare={purpose === SDPStreamMetadataPurpose.Screenshare}
|
||||
name={rawDisplayName}
|
||||
showName={showName}
|
||||
ref={tileRef}
|
||||
mediaRef={mediaRef}
|
||||
avatar={getAvatar && getAvatar(member, width, height)}
|
||||
{...rest}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ limitations under the License.
|
|||
|
||||
import { useRef, useEffect } from "react";
|
||||
|
||||
import { useSpatialAudio } from "../settings/useSetting";
|
||||
|
||||
export function useMediaStream(stream, audioOutputDevice, mute = false) {
|
||||
const mediaRef = useRef();
|
||||
|
||||
|
|
@ -73,3 +75,61 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
|
|||
|
||||
return mediaRef;
|
||||
}
|
||||
|
||||
export const useSpatialMediaStream = (
|
||||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
mute = false
|
||||
) => {
|
||||
const tileRef = useRef();
|
||||
const [spatialAudio] = useSpatialAudio();
|
||||
// If spatial audio is enabled, we handle mute state separately from the video element
|
||||
const mediaRef = useMediaStream(
|
||||
stream,
|
||||
audioOutputDevice,
|
||||
spatialAudio || mute
|
||||
);
|
||||
|
||||
const pannerNodeRef = useRef();
|
||||
if (!pannerNodeRef.current) {
|
||||
pannerNodeRef.current = new PannerNode(audioContext, {
|
||||
panningModel: "HRTF",
|
||||
});
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (spatialAudio && tileRef.current && mediaRef.current && !mute) {
|
||||
const tile = tileRef.current;
|
||||
const pannerNode = pannerNodeRef.current;
|
||||
|
||||
const source = audioContext.createMediaElementSource(mediaRef.current);
|
||||
const updatePosition = () => {
|
||||
const bounds = tile.getBoundingClientRect();
|
||||
const windowSize = Math.max(window.innerWidth, window.innerHeight);
|
||||
// Position the source relative to its placement in the window
|
||||
pannerNodeRef.current.positionX.value =
|
||||
(bounds.x + bounds.width / 2) / windowSize - 0.5;
|
||||
pannerNodeRef.current.positionY.value =
|
||||
(bounds.y + bounds.height / 2) / windowSize - 0.5;
|
||||
// Put the source in front of the listener
|
||||
pannerNodeRef.current.positionZ.value = -2;
|
||||
};
|
||||
|
||||
source.connect(pannerNode);
|
||||
pannerNode.connect(audioContext.destination);
|
||||
// HACK: We abuse the CSS transitionrun event to detect when the tile
|
||||
// moves, because useMeasure, IntersectionObserver, etc. all have no
|
||||
// ability to track changes in the CSS transform property
|
||||
tile.addEventListener("transitionrun", updatePosition);
|
||||
|
||||
return () => {
|
||||
tile.removeEventListener("transitionrun", updatePosition);
|
||||
source.disconnect();
|
||||
pannerNode.disconnect();
|
||||
};
|
||||
}
|
||||
}, [spatialAudio, audioContext, mediaRef, mute]);
|
||||
|
||||
return [tileRef, mediaRef];
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue