Make AEC work with spatial audio on Chrome
This commit is contained in:
parent
939398b277
commit
ee43fcc91f
5 changed files with 97 additions and 13 deletions
|
|
@ -29,6 +29,7 @@ export function VideoTileContainer({
|
|||
showName,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
disableSpeakingIndicator,
|
||||
...rest
|
||||
}) {
|
||||
|
|
@ -47,6 +48,7 @@ export function VideoTileContainer({
|
|||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
isLocal
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ limitations under the License.
|
|||
*/
|
||||
|
||||
import { useRef, useEffect } from "react";
|
||||
import { parse as parseSdp, write as writeSdp } from "sdp-transform";
|
||||
|
||||
import { useSpatialAudio } from "../settings/useSetting";
|
||||
|
||||
|
|
@ -77,10 +78,85 @@ export function useMediaStream(stream, audioOutputDevice, mute = false) {
|
|||
return mediaRef;
|
||||
}
|
||||
|
||||
// Loops the given audio stream back through a local peer connection, to make
|
||||
// AEC work with Web Audio streams on Chrome. The resulting stream should be
|
||||
// played through an audio element.
|
||||
// This hack can be removed once the following bug is resolved:
|
||||
// https://bugs.chromium.org/p/chromium/issues/detail?id=687574
|
||||
const createLoopback = async (stream) => {
|
||||
// Prepare our local peer connections
|
||||
const conn = new RTCPeerConnection();
|
||||
const loopbackConn = new RTCPeerConnection();
|
||||
const loopbackStream = new MediaStream();
|
||||
|
||||
conn.addEventListener("icecandidate", ({ candidate }) => {
|
||||
if (candidate) loopbackConn.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
});
|
||||
loopbackConn.addEventListener("icecandidate", ({ candidate }) => {
|
||||
if (candidate) conn.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
});
|
||||
loopbackConn.addEventListener("track", ({ track }) =>
|
||||
loopbackStream.addTrack(track),
|
||||
);
|
||||
|
||||
// Hook the connections together
|
||||
stream.getTracks().forEach((track) => conn.addTrack(track));
|
||||
const offer = await conn.createOffer({
|
||||
offerVideo: false,
|
||||
offerAudio: true,
|
||||
offerToReceiveAudio: false,
|
||||
offerToReceiveVideo: false,
|
||||
});
|
||||
await conn.setLocalDescription(offer);
|
||||
|
||||
await loopbackConn.setRemoteDescription(offer);
|
||||
const answer = await loopbackConn.createAnswer();
|
||||
// Rewrite SDP to be stereo and (variable) max bitrate
|
||||
const parsedSdp = parseSdp(answer.sdp);
|
||||
parsedSdp.media.forEach((m) => m.fmtp.forEach((f) =>
|
||||
f.config += `;stereo=1;cbr=0;maxaveragebitrate=510000;`),
|
||||
);
|
||||
answer.sdp = writeSdp(parsedSdp);
|
||||
|
||||
await loopbackConn.setLocalDescription(answer);
|
||||
await conn.setRemoteDescription(answer);
|
||||
|
||||
return loopbackStream;
|
||||
};
|
||||
|
||||
export const useAudioContext = () => {
|
||||
const context = useRef();
|
||||
const destination = useRef();
|
||||
const audioRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (audioRef.current && !context.current) {
|
||||
context.current = new AudioContext();
|
||||
|
||||
if (window.chrome) {
|
||||
// We're in Chrome, which needs a loopback hack applied to enable AEC
|
||||
destination.current = context.current.createMediaStreamDestination();
|
||||
|
||||
const audioEl = audioRef.current;
|
||||
(async () => {
|
||||
audioEl.srcObject = await createLoopback(destination.current.stream);
|
||||
await audioEl.play();
|
||||
})();
|
||||
return () => { audioEl.srcObject = null; };
|
||||
} else {
|
||||
destination.current = context.current.destination;
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
|
||||
return [context.current, destination.current, audioRef];
|
||||
};
|
||||
|
||||
export const useSpatialMediaStream = (
|
||||
stream,
|
||||
audioOutputDevice,
|
||||
audioContext,
|
||||
audioDestination,
|
||||
mute = false
|
||||
) => {
|
||||
const tileRef = useRef();
|
||||
|
|
@ -93,17 +169,16 @@ export const useSpatialMediaStream = (
|
|||
);
|
||||
|
||||
const pannerNodeRef = useRef();
|
||||
if (!pannerNodeRef.current) {
|
||||
pannerNodeRef.current = new PannerNode(audioContext, {
|
||||
panningModel: "HRTF",
|
||||
refDistance: 3,
|
||||
});
|
||||
}
|
||||
|
||||
const sourceRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (spatialAudio && tileRef.current && !mute) {
|
||||
if (!pannerNodeRef.current) {
|
||||
pannerNodeRef.current = new PannerNode(audioContext, {
|
||||
panningModel: "HRTF",
|
||||
refDistance: 3,
|
||||
});
|
||||
}
|
||||
if (!sourceRef.current) {
|
||||
sourceRef.current = audioContext.createMediaStreamSource(stream);
|
||||
}
|
||||
|
|
@ -126,7 +201,7 @@ export const useSpatialMediaStream = (
|
|||
|
||||
updatePosition();
|
||||
source.connect(pannerNode);
|
||||
pannerNode.connect(audioContext.destination);
|
||||
pannerNode.connect(audioDestination);
|
||||
// HACK: We abuse the CSS transitionrun event to detect when the tile
|
||||
// moves, because useMeasure, IntersectionObserver, etc. all have no
|
||||
// ability to track changes in the CSS transform property
|
||||
|
|
@ -138,7 +213,7 @@ export const useSpatialMediaStream = (
|
|||
pannerNode.disconnect();
|
||||
};
|
||||
}
|
||||
}, [stream, spatialAudio, audioContext, mute]);
|
||||
}, [stream, spatialAudio, audioContext, audioDestination, mute]);
|
||||
|
||||
return [tileRef, mediaRef];
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue