Make AEC work with spatial audio on Chrome
This commit is contained in:
parent
939398b277
commit
ee43fcc91f
5 changed files with 97 additions and 13 deletions
|
@ -50,6 +50,7 @@
|
|||
"react-router-dom": "^5.2.0",
|
||||
"react-use-clipboard": "^1.0.7",
|
||||
"react-use-measure": "^2.1.1",
|
||||
"sdp-transform": "^2.14.1",
|
||||
"unique-names-generator": "^4.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -42,6 +42,7 @@ import { usePreventScroll } from "@react-aria/overlays";
|
|||
import { useMediaHandler } from "../settings/useMediaHandler";
|
||||
import { useShowInspector } from "../settings/useSetting";
|
||||
import { useModalTriggerState } from "../Modal";
|
||||
import { useAudioContext } from "../video-grid/useMediaStream";
|
||||
|
||||
const canScreenshare = "getDisplayMedia" in navigator.mediaDevices;
|
||||
// There is currently a bug in Safari our our code with cloning and sending MediaStreams
|
||||
|
@ -70,12 +71,10 @@ export function InCallView({
|
|||
usePreventScroll();
|
||||
const [layout, setLayout] = useVideoGridLayout(screenshareFeeds.length > 0);
|
||||
|
||||
const [audioContext, audioDestination, audioRef] = useAudioContext();
|
||||
const { audioOutput } = useMediaHandler();
|
||||
const [showInspector] = useShowInspector();
|
||||
|
||||
const audioContext = useRef();
|
||||
if (!audioContext.current) audioContext.current = new AudioContext();
|
||||
|
||||
const { modalState: feedbackModalState, modalProps: feedbackModalProps } =
|
||||
useModalTriggerState();
|
||||
|
||||
|
@ -139,6 +138,7 @@ export function InCallView({
|
|||
|
||||
return (
|
||||
<div className={styles.inRoom}>
|
||||
<audio ref={audioRef} />
|
||||
<Header>
|
||||
<LeftNav>
|
||||
<RoomHeaderInfo roomName={roomName} avatarUrl={avatarUrl} />
|
||||
|
@ -165,7 +165,8 @@ export function InCallView({
|
|||
getAvatar={renderAvatar}
|
||||
showName={items.length > 2 || item.focused}
|
||||
audioOutputDevice={audioOutput}
|
||||
audioContext={audioContext.current}
|
||||
audioContext={audioContext}
|
||||
audioDestination={audioDestination}
|
||||
disableSpeakingIndicator={items.length < 3}
|
||||
{...rest}
|
||||
/>
|
||||
|
|
|
@ -29,6 +29,7 @@ export function VideoTileContainer({
|
|||
showName,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
disableSpeakingIndicator,
|
||||
...rest
|
||||
}) {
|
||||
|
@ -47,6 +48,7 @@ export function VideoTileContainer({
|
|||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
isLocal
|
||||
);
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ limitations under the License.
|
|||
*/
|
||||
|
||||
import { useRef, useEffect } from "react";
|
||||
import { parse as parseSdp, write as writeSdp } from "sdp-transform";
|
||||
|
||||
import { useSpatialAudio } from "../settings/useSetting";
|
||||
|
||||
|
@ -77,10 +78,85 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
|
|||
return mediaRef;
|
||||
}
|
||||
|
||||
// Loops the given audio stream back through a local peer connection, to make
|
||||
// AEC work with Web Audio streams on Chrome. The resulting stream should be
|
||||
// played through an audio element.
|
||||
// This hack can be removed once the following bug is resolved:
|
||||
// https://bugs.chromium.org/p/chromium/issues/detail?id=687574
|
||||
const createLoopback = async (stream) => {
|
||||
// Prepare our local peer connections
|
||||
const conn = new RTCPeerConnection();
|
||||
const loopbackConn = new RTCPeerConnection();
|
||||
const loopbackStream = new MediaStream();
|
||||
|
||||
conn.addEventListener("icecandidate", ({ candidate }) => {
|
||||
if (candidate) loopbackConn.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
});
|
||||
loopbackConn.addEventListener("icecandidate", ({ candidate }) => {
|
||||
if (candidate) conn.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
});
|
||||
loopbackConn.addEventListener("track", ({ track }) =>
|
||||
loopbackStream.addTrack(track),
|
||||
);
|
||||
|
||||
// Hook the connections together
|
||||
stream.getTracks().forEach((track) => conn.addTrack(track));
|
||||
const offer = await conn.createOffer({
|
||||
offerVideo: false,
|
||||
offerAudio: true,
|
||||
offerToReceiveAudio: false,
|
||||
offerToReceiveVideo: false,
|
||||
});
|
||||
await conn.setLocalDescription(offer);
|
||||
|
||||
await loopbackConn.setRemoteDescription(offer);
|
||||
const answer = await loopbackConn.createAnswer();
|
||||
// Rewrite SDP to be stereo and (variable) max bitrate
|
||||
const parsedSdp = parseSdp(answer.sdp);
|
||||
parsedSdp.media.forEach((m) => m.fmtp.forEach((f) =>
|
||||
f.config += `;stereo=1;cbr=0;maxaveragebitrate=510000;`),
|
||||
);
|
||||
answer.sdp = writeSdp(parsedSdp);
|
||||
|
||||
await loopbackConn.setLocalDescription(answer);
|
||||
await conn.setRemoteDescription(answer);
|
||||
|
||||
return loopbackStream;
|
||||
};
|
||||
|
||||
export const useAudioContext = () => {
|
||||
const context = useRef();
|
||||
const destination = useRef();
|
||||
const audioRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (audioRef.current && !context.current) {
|
||||
context.current = new AudioContext();
|
||||
|
||||
if (window.chrome) {
|
||||
// We're in Chrome, which needs a loopback hack applied to enable AEC
|
||||
destination.current = context.current.createMediaStreamDestination();
|
||||
|
||||
const audioEl = audioRef.current;
|
||||
(async () => {
|
||||
audioEl.srcObject = await createLoopback(destination.current.stream);
|
||||
await audioEl.play();
|
||||
})();
|
||||
return () => { audioEl.srcObject = null; };
|
||||
} else {
|
||||
destination.current = context.current.destination;
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
|
||||
return [context.current, destination.current, audioRef];
|
||||
};
|
||||
|
||||
export const useSpatialMediaStream = (
|
||||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
mute = false
|
||||
) => {
|
||||
const tileRef = useRef();
|
||||
|
@ -93,17 +169,16 @@ export const useSpatialMediaStream = (
|
|||
);
|
||||
|
||||
const pannerNodeRef = useRef();
|
||||
const sourceRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (spatialAudio && tileRef.current && !mute) {
|
||||
if (!pannerNodeRef.current) {
|
||||
pannerNodeRef.current = new PannerNode(audioContext, {
|
||||
panningModel: "HRTF",
|
||||
refDistance: 3,
|
||||
});
|
||||
}
|
||||
|
||||
const sourceRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (spatialAudio && tileRef.current && !mute) {
|
||||
if (!sourceRef.current) {
|
||||
sourceRef.current = audioContext.createMediaStreamSource(stream);
|
||||
}
|
||||
|
@ -126,7 +201,7 @@ export const useSpatialMediaStream = (
|
|||
|
||||
updatePosition();
|
||||
source.connect(pannerNode);
|
||||
pannerNode.connect(audioContext.destination);
|
||||
pannerNode.connect(audioDestination);
|
||||
// HACK: We abuse the CSS transitionrun event to detect when the tile
|
||||
// moves, because useMeasure, IntersectionObserver, etc. all have no
|
||||
// ability to track changes in the CSS transform property
|
||||
|
@ -138,7 +213,7 @@ export const useSpatialMediaStream = (
|
|||
pannerNode.disconnect();
|
||||
};
|
||||
}
|
||||
}, [stream, spatialAudio, audioContext, mute]);
|
||||
}, [stream, spatialAudio, audioContext, audioDestination, mute]);
|
||||
|
||||
return [tileRef, mediaRef];
|
||||
};
|
||||
|
|
|
@ -11046,6 +11046,11 @@ schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1:
|
|||
ajv "^6.12.5"
|
||||
ajv-keywords "^3.5.2"
|
||||
|
||||
sdp-transform@^2.14.1:
|
||||
version "2.14.1"
|
||||
resolved "https://registry.yarnpkg.com/sdp-transform/-/sdp-transform-2.14.1.tgz#2bb443583d478dee217df4caa284c46b870d5827"
|
||||
integrity sha512-RjZyX3nVwJyCuTo5tGPx+PZWkDMCg7oOLpSlhjDdZfwUoNqG1mM8nyj31IGHyaPWXhjbP7cdK3qZ2bmkJ1GzRw==
|
||||
|
||||
"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.6.0:
|
||||
version "5.7.1"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||
|
|
Loading…
Reference in a new issue