Merge pull request #512 from vector-im/SimonBrandner/fix/audio

This commit is contained in:
Šimon Brandner 2022-08-16 10:07:55 +02:00 committed by GitHub
commit fc0a3f38ac
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 158 additions and 35 deletions

View file

@ -46,10 +46,12 @@ import { UserMenuContainer } from "../UserMenuContainer";
import { useRageshakeRequestModal } from "../settings/submit-rageshake";
import { RageshakeRequestModal } from "./RageshakeRequestModal";
import { useMediaHandler } from "../settings/useMediaHandler";
import { useShowInspector } from "../settings/useSetting";
import { useShowInspector, useSpatialAudio } from "../settings/useSetting";
import { useModalTriggerState } from "../Modal";
import { useAudioContext } from "../video-grid/useMediaStream";
import { useFullscreen } from "../video-grid/useFullscreen";
import { AudioContainer } from "../video-grid/AudioContainer";
import { useAudioOutputDevice } from "../video-grid/useAudioOutputDevice";
const canScreenshare = "getDisplayMedia" in (navigator.mediaDevices ?? {});
// There is currently a bug in Safari our our code with cloning and sending MediaStreams
@ -109,6 +111,8 @@ export function InCallView({
const { layout, setLayout } = useVideoGridLayout(screenshareFeeds.length > 0);
const { toggleFullscreen, fullscreenParticipant } = useFullscreen(elementRef);
const [spatialAudio] = useSpatialAudio();
const [audioContext, audioDestination, audioRef] = useAudioContext();
const { audioOutput } = useMediaHandler();
const [showInspector] = useShowInspector();
@ -116,6 +120,8 @@ export function InCallView({
const { modalState: feedbackModalState, modalProps: feedbackModalProps } =
useModalTriggerState();
useAudioOutputDevice(audioRef, audioOutput);
const items = useMemo(() => {
const participants: Participant[] = [];
@ -185,7 +191,6 @@ export function InCallView({
key={fullscreenParticipant.id}
item={fullscreenParticipant}
getAvatar={renderAvatar}
audioOutputDevice={audioOutput}
audioContext={audioContext}
audioDestination={audioDestination}
disableSpeakingIndicator={true}
@ -202,7 +207,6 @@ export function InCallView({
key={item.id}
item={item}
getAvatar={renderAvatar}
audioOutputDevice={audioOutput}
audioContext={audioContext}
audioDestination={audioDestination}
disableSpeakingIndicator={items.length < 3}
@ -218,7 +222,6 @@ export function InCallView({
items,
audioContext,
audioDestination,
audioOutput,
layout,
renderAvatar,
toggleFullscreen,
@ -236,6 +239,13 @@ export function InCallView({
return (
<div className={styles.inRoom} ref={elementRef}>
<audio ref={audioRef} />
{(!spatialAudio || fullscreenParticipant) && (
<AudioContainer
items={items}
audioContext={audioContext}
audioDestination={audioDestination}
/>
)}
{!fullscreenParticipant && (
<Header>
<LeftNav>

View file

@ -0,0 +1,97 @@
/*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import React, { useEffect, useRef } from "react";
import { Participant } from "../room/InCallView";
import { useCallFeed } from "./useCallFeed";
import { useMediaStreamTrackCount } from "./useMediaStream";
// XXX: These in fact do not render anything but to my knowledge this is the
// only way to a hook on an array
interface AudioForParticipantProps {
item: Participant;
audioContext: AudioContext;
audioDestination: AudioNode;
}
export function AudioForParticipant({
item,
audioContext,
audioDestination,
}: AudioForParticipantProps): JSX.Element {
const { stream, localVolume, audioMuted } = useCallFeed(item.callFeed);
const [audioTrackCount] = useMediaStreamTrackCount(stream);
const gainNodeRef = useRef<GainNode>();
const sourceRef = useRef<MediaStreamAudioSourceNode>();
useEffect(() => {
if (!item.isLocal && audioContext && !audioMuted && audioTrackCount > 0) {
if (!gainNodeRef.current) {
gainNodeRef.current = new GainNode(audioContext, {
gain: localVolume,
});
}
if (!sourceRef.current) {
sourceRef.current = audioContext.createMediaStreamSource(stream);
}
const source = sourceRef.current;
const gainNode = gainNodeRef.current;
gainNode.gain.value = localVolume;
source.connect(gainNode).connect(audioDestination);
return () => {
source.disconnect();
gainNode.disconnect();
};
}
}, [
item,
audioContext,
audioDestination,
stream,
localVolume,
audioMuted,
audioTrackCount,
]);
return null;
}
interface AudioContainerProps {
items: Participant[];
audioContext: AudioContext;
audioDestination: AudioNode;
}
export function AudioContainer({
items,
...rest
}: AudioContainerProps): JSX.Element {
return (
<>
{items
.filter((item) => !item.isLocal)
.map((item) => (
<AudioForParticipant key={item.id} item={item} {...rest} />
))}
</>
);
}

View file

@ -36,7 +36,6 @@ interface Props {
width: number,
height: number
) => JSX.Element;
audioOutputDevice: string;
audioContext: AudioContext;
audioDestination: AudioNode;
disableSpeakingIndicator: boolean;
@ -48,7 +47,6 @@ export function VideoTileContainer({
width,
height,
getAvatar,
audioOutputDevice,
audioContext,
audioDestination,
disableSpeakingIndicator,
@ -69,7 +67,6 @@ export function VideoTileContainer({
const { rawDisplayName } = useRoomMemberName(member);
const [tileRef, mediaRef] = useSpatialMediaStream(
stream,
audioOutputDevice,
audioContext,
audioDestination,
isLocal,

View file

@ -0,0 +1,40 @@
/*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { RefObject, useEffect } from "react";
export function useAudioOutputDevice(
mediaRef: RefObject<MediaElement>,
audioOutputDevice: string
): void {
useEffect(() => {
if (
mediaRef.current &&
mediaRef.current !== undefined &&
audioOutputDevice
) {
if (mediaRef.current.setSinkId) {
console.log(
`useMediaStream setting output setSinkId ${audioOutputDevice}`
);
// Chrome for Android doesn't support this
mediaRef.current.setSinkId(audioOutputDevice);
} else {
console.log("Can't set output - no setsinkid");
}
}
}, [mediaRef, audioOutputDevice]);
}

View file

@ -23,6 +23,7 @@ import {
import { useSpatialAudio } from "../settings/useSetting";
import { useEventTarget } from "../useEvents";
import { useAudioOutputDevice } from "./useAudioOutputDevice";
declare global {
interface Window {
@ -60,6 +61,8 @@ export const useMediaStream = (
): RefObject<MediaElement> => {
const mediaRef = useRef<MediaElement>();
useAudioOutputDevice(mediaRef, audioOutputDevice);
useEffect(() => {
console.log(
`useMediaStream update stream mediaRef.current ${!!mediaRef.current} stream ${
@ -89,24 +92,6 @@ export const useMediaStream = (
}
}, [stream, mute]);
useEffect(() => {
if (
mediaRef.current &&
audioOutputDevice &&
mediaRef.current !== undefined
) {
if (mediaRef.current.setSinkId) {
console.log(
`useMediaStream setting output setSinkId ${audioOutputDevice}`
);
// Chrome for Android doesn't support this
mediaRef.current.setSinkId(audioOutputDevice);
} else {
console.log("Can't set output - no setsinkid");
}
}
}, [audioOutputDevice]);
useEffect(() => {
if (!mediaRef.current) return;
if (localVolume === null || localVolume === undefined) return;
@ -178,11 +163,11 @@ const createLoopback = async (stream: MediaStream): Promise<MediaStream> => {
export const useAudioContext = (): [
AudioContext,
AudioNode,
RefObject<HTMLAudioElement>
RefObject<MediaElement>
] => {
const context = useRef<AudioContext>();
const destination = useRef<AudioNode>();
const audioRef = useRef<HTMLAudioElement>();
const audioRef = useRef<MediaElement>();
useEffect(() => {
if (audioRef.current && !context.current) {
@ -214,7 +199,6 @@ export const useAudioContext = (): [
export const useSpatialMediaStream = (
stream: MediaStream,
audioOutputDevice: string,
audioContext: AudioContext,
audioDestination: AudioNode,
mute = false,
@ -222,13 +206,8 @@ export const useSpatialMediaStream = (
): [RefObject<HTMLDivElement>, RefObject<MediaElement>] => {
const tileRef = useRef<HTMLDivElement>();
const [spatialAudio] = useSpatialAudio();
// If spatial audio is enabled, we handle audio separately from the video element
const mediaRef = useMediaStream(
stream,
audioOutputDevice,
spatialAudio || mute,
localVolume
);
// We always handle audio separately form the video element
const mediaRef = useMediaStream(stream, undefined, true, undefined);
const [audioTrackCount] = useMediaStreamTrackCount(stream);
const gainNodeRef = useRef<GainNode>();