2022-05-04 17:09:48 +01:00
|
|
|
/*
|
|
|
|
Copyright 2022 Matrix.org Foundation C.I.C.
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2022-06-13 17:24:25 -04:00
|
|
|
import { useRef, useEffect, RefObject } from "react";
|
2022-06-13 13:31:44 -04:00
|
|
|
import { parse as parseSdp, write as writeSdp } from "sdp-transform";
|
2022-07-01 12:34:57 -04:00
|
|
|
import {
|
|
|
|
acquireContext,
|
|
|
|
releaseContext,
|
|
|
|
} from "matrix-js-sdk/src/webrtc/audioContext";
|
2022-04-07 14:22:36 -07:00
|
|
|
|
2022-05-31 10:43:05 -04:00
|
|
|
import { useSpatialAudio } from "../settings/useSetting";
|
|
|
|
|
2022-06-13 17:24:25 -04:00
|
|
|
declare global {
|
|
|
|
interface Window {
|
|
|
|
// For detecting whether this browser is Chrome or not
|
|
|
|
chrome?: unknown;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export const useMediaStream = (
|
|
|
|
stream: MediaStream,
|
|
|
|
audioOutputDevice: string,
|
2022-07-14 16:18:10 +02:00
|
|
|
mute = false,
|
2022-08-02 00:46:16 +02:00
|
|
|
localVolume?: number
|
2022-06-16 10:01:52 -04:00
|
|
|
): RefObject<MediaElement> => {
|
2022-06-13 17:24:25 -04:00
|
|
|
const mediaRef = useRef<MediaElement>();
|
2022-04-07 14:22:36 -07:00
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
console.log(
|
|
|
|
`useMediaStream update stream mediaRef.current ${!!mediaRef.current} stream ${
|
|
|
|
stream && stream.id
|
|
|
|
}`
|
|
|
|
);
|
|
|
|
|
|
|
|
if (mediaRef.current) {
|
2022-05-23 09:16:40 -04:00
|
|
|
const mediaEl = mediaRef.current;
|
|
|
|
|
2022-04-07 14:22:36 -07:00
|
|
|
if (stream) {
|
2022-05-23 09:16:40 -04:00
|
|
|
mediaEl.muted = mute;
|
|
|
|
mediaEl.srcObject = stream;
|
|
|
|
mediaEl.play();
|
|
|
|
|
|
|
|
// Unmuting the tab in Safari causes all video elements to be individually
|
|
|
|
// unmuted, so we need to reset the mute state here to prevent audio loops
|
|
|
|
const onVolumeChange = () => {
|
|
|
|
mediaEl.muted = mute;
|
|
|
|
};
|
|
|
|
mediaEl.addEventListener("volumechange", onVolumeChange);
|
|
|
|
return () =>
|
|
|
|
mediaEl.removeEventListener("volumechange", onVolumeChange);
|
2022-04-07 14:22:36 -07:00
|
|
|
} else {
|
|
|
|
mediaRef.current.srcObject = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}, [stream, mute]);
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
if (
|
|
|
|
mediaRef.current &&
|
|
|
|
audioOutputDevice &&
|
|
|
|
mediaRef.current !== undefined
|
|
|
|
) {
|
2022-06-15 21:37:42 +01:00
|
|
|
if (mediaRef.current.setSinkId) {
|
|
|
|
console.log(
|
|
|
|
`useMediaStream setting output setSinkId ${audioOutputDevice}`
|
|
|
|
);
|
2022-06-28 15:12:59 +01:00
|
|
|
// Chrome for Android doesn't support this
|
|
|
|
mediaRef.current.setSinkId(audioOutputDevice);
|
2022-06-15 21:37:42 +01:00
|
|
|
} else {
|
|
|
|
console.log("Can't set output - no setsinkid");
|
|
|
|
}
|
2022-04-07 14:22:36 -07:00
|
|
|
}
|
|
|
|
}, [audioOutputDevice]);
|
|
|
|
|
2022-07-14 16:18:10 +02:00
|
|
|
useEffect(() => {
|
|
|
|
if (!mediaRef.current) return;
|
|
|
|
if (localVolume === null || localVolume === undefined) return;
|
|
|
|
|
|
|
|
mediaRef.current.volume = localVolume;
|
2022-07-15 11:22:13 +02:00
|
|
|
}, [localVolume]);
|
2022-07-14 16:18:10 +02:00
|
|
|
|
2022-04-07 14:22:36 -07:00
|
|
|
useEffect(() => {
|
|
|
|
const mediaEl = mediaRef.current;
|
|
|
|
|
|
|
|
return () => {
|
|
|
|
if (mediaEl) {
|
|
|
|
// Ensure we set srcObject to null before unmounting to prevent memory leak
|
|
|
|
// https://webrtchacks.com/srcobject-intervention/
|
|
|
|
mediaEl.srcObject = null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}, []);
|
|
|
|
|
|
|
|
return mediaRef;
|
2022-06-13 17:24:25 -04:00
|
|
|
};
|
2022-05-31 10:43:05 -04:00
|
|
|
|
2022-06-13 13:31:44 -04:00
|
|
|
// Loops the given audio stream back through a local peer connection, to make
|
|
|
|
// AEC work with Web Audio streams on Chrome. The resulting stream should be
|
|
|
|
// played through an audio element.
|
|
|
|
// This hack can be removed once the following bug is resolved:
|
|
|
|
// https://bugs.chromium.org/p/chromium/issues/detail?id=687574
|
2022-06-13 17:24:25 -04:00
|
|
|
const createLoopback = async (stream: MediaStream): Promise<MediaStream> => {
|
2022-06-13 13:31:44 -04:00
|
|
|
// Prepare our local peer connections
|
|
|
|
const conn = new RTCPeerConnection();
|
|
|
|
const loopbackConn = new RTCPeerConnection();
|
|
|
|
const loopbackStream = new MediaStream();
|
|
|
|
|
|
|
|
conn.addEventListener("icecandidate", ({ candidate }) => {
|
|
|
|
if (candidate) loopbackConn.addIceCandidate(new RTCIceCandidate(candidate));
|
|
|
|
});
|
|
|
|
loopbackConn.addEventListener("icecandidate", ({ candidate }) => {
|
|
|
|
if (candidate) conn.addIceCandidate(new RTCIceCandidate(candidate));
|
|
|
|
});
|
|
|
|
loopbackConn.addEventListener("track", ({ track }) =>
|
2022-06-13 13:34:45 -04:00
|
|
|
loopbackStream.addTrack(track)
|
2022-06-13 13:31:44 -04:00
|
|
|
);
|
|
|
|
|
|
|
|
// Hook the connections together
|
|
|
|
stream.getTracks().forEach((track) => conn.addTrack(track));
|
|
|
|
const offer = await conn.createOffer({
|
|
|
|
offerToReceiveAudio: false,
|
|
|
|
offerToReceiveVideo: false,
|
|
|
|
});
|
|
|
|
await conn.setLocalDescription(offer);
|
|
|
|
|
|
|
|
await loopbackConn.setRemoteDescription(offer);
|
|
|
|
const answer = await loopbackConn.createAnswer();
|
|
|
|
// Rewrite SDP to be stereo and (variable) max bitrate
|
|
|
|
const parsedSdp = parseSdp(answer.sdp);
|
2022-06-13 13:34:45 -04:00
|
|
|
parsedSdp.media.forEach((m) =>
|
|
|
|
m.fmtp.forEach(
|
|
|
|
(f) => (f.config += `;stereo=1;cbr=0;maxaveragebitrate=510000;`)
|
|
|
|
)
|
2022-06-13 13:31:44 -04:00
|
|
|
);
|
|
|
|
answer.sdp = writeSdp(parsedSdp);
|
|
|
|
|
|
|
|
await loopbackConn.setLocalDescription(answer);
|
|
|
|
await conn.setRemoteDescription(answer);
|
|
|
|
|
|
|
|
return loopbackStream;
|
|
|
|
};
|
|
|
|
|
2022-06-13 17:24:25 -04:00
|
|
|
export const useAudioContext = (): [
|
|
|
|
AudioContext,
|
|
|
|
AudioNode,
|
|
|
|
RefObject<HTMLAudioElement>
|
|
|
|
] => {
|
|
|
|
const context = useRef<AudioContext>();
|
|
|
|
const destination = useRef<AudioNode>();
|
|
|
|
const audioRef = useRef<HTMLAudioElement>();
|
2022-06-13 13:31:44 -04:00
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
if (audioRef.current && !context.current) {
|
2022-07-01 12:08:15 -04:00
|
|
|
context.current = acquireContext();
|
2022-06-13 13:31:44 -04:00
|
|
|
|
|
|
|
if (window.chrome) {
|
|
|
|
// We're in Chrome, which needs a loopback hack applied to enable AEC
|
2022-06-13 17:24:25 -04:00
|
|
|
const streamDest = context.current.createMediaStreamDestination();
|
|
|
|
destination.current = streamDest;
|
2022-06-13 13:31:44 -04:00
|
|
|
|
|
|
|
const audioEl = audioRef.current;
|
|
|
|
(async () => {
|
2022-06-13 17:24:25 -04:00
|
|
|
audioEl.srcObject = await createLoopback(streamDest.stream);
|
2022-06-13 13:31:44 -04:00
|
|
|
await audioEl.play();
|
|
|
|
})();
|
2022-06-13 13:34:45 -04:00
|
|
|
return () => {
|
|
|
|
audioEl.srcObject = null;
|
2022-07-01 12:08:15 -04:00
|
|
|
releaseContext();
|
2022-06-13 13:34:45 -04:00
|
|
|
};
|
2022-06-13 13:31:44 -04:00
|
|
|
} else {
|
|
|
|
destination.current = context.current.destination;
|
2022-07-01 12:08:15 -04:00
|
|
|
return releaseContext;
|
2022-06-13 13:31:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}, []);
|
|
|
|
|
|
|
|
return [context.current, destination.current, audioRef];
|
|
|
|
};
|
|
|
|
|
2022-05-31 10:43:05 -04:00
|
|
|
export const useSpatialMediaStream = (
|
2022-06-13 17:24:25 -04:00
|
|
|
stream: MediaStream,
|
|
|
|
audioOutputDevice: string,
|
|
|
|
audioContext: AudioContext,
|
|
|
|
audioDestination: AudioNode,
|
2022-07-14 16:18:10 +02:00
|
|
|
mute = false,
|
2022-08-02 00:46:16 +02:00
|
|
|
localVolume?: number
|
2022-06-16 10:01:52 -04:00
|
|
|
): [RefObject<Element>, RefObject<MediaElement>] => {
|
2022-06-13 17:24:25 -04:00
|
|
|
const tileRef = useRef<Element>();
|
2022-05-31 10:43:05 -04:00
|
|
|
const [spatialAudio] = useSpatialAudio();
|
2022-05-31 16:11:39 -04:00
|
|
|
// If spatial audio is enabled, we handle audio separately from the video element
|
2022-05-31 10:43:05 -04:00
|
|
|
const mediaRef = useMediaStream(
|
|
|
|
stream,
|
|
|
|
audioOutputDevice,
|
2022-07-14 16:18:10 +02:00
|
|
|
spatialAudio || mute,
|
2022-07-15 11:22:13 +02:00
|
|
|
localVolume
|
2022-05-31 10:43:05 -04:00
|
|
|
);
|
|
|
|
|
2022-07-28 09:16:49 +02:00
|
|
|
const gainNodeRef = useRef<GainNode>();
|
2022-06-13 17:24:25 -04:00
|
|
|
const pannerNodeRef = useRef<PannerNode>();
|
|
|
|
const sourceRef = useRef<MediaStreamAudioSourceNode>();
|
2022-05-31 16:11:39 -04:00
|
|
|
|
2022-05-31 10:43:05 -04:00
|
|
|
useEffect(() => {
|
2022-07-20 20:49:07 +01:00
|
|
|
if (
|
|
|
|
spatialAudio &&
|
|
|
|
tileRef.current &&
|
|
|
|
!mute &&
|
|
|
|
stream.getAudioTracks().length > 0
|
|
|
|
) {
|
2022-06-13 13:31:44 -04:00
|
|
|
if (!pannerNodeRef.current) {
|
|
|
|
pannerNodeRef.current = new PannerNode(audioContext, {
|
|
|
|
panningModel: "HRTF",
|
|
|
|
refDistance: 3,
|
|
|
|
});
|
|
|
|
}
|
2022-07-28 09:16:49 +02:00
|
|
|
if (!gainNodeRef.current) {
|
|
|
|
gainNodeRef.current = new GainNode(audioContext, {
|
|
|
|
gain: localVolume,
|
|
|
|
});
|
|
|
|
}
|
2022-05-31 16:11:39 -04:00
|
|
|
if (!sourceRef.current) {
|
|
|
|
sourceRef.current = audioContext.createMediaStreamSource(stream);
|
|
|
|
}
|
|
|
|
|
2022-05-31 10:43:05 -04:00
|
|
|
const tile = tileRef.current;
|
2022-05-31 16:11:39 -04:00
|
|
|
const source = sourceRef.current;
|
2022-07-28 09:16:49 +02:00
|
|
|
const gainNode = gainNodeRef.current;
|
2022-05-31 10:43:05 -04:00
|
|
|
const pannerNode = pannerNodeRef.current;
|
|
|
|
|
|
|
|
const updatePosition = () => {
|
|
|
|
const bounds = tile.getBoundingClientRect();
|
|
|
|
const windowSize = Math.max(window.innerWidth, window.innerHeight);
|
|
|
|
// Position the source relative to its placement in the window
|
|
|
|
pannerNodeRef.current.positionX.value =
|
|
|
|
(bounds.x + bounds.width / 2) / windowSize - 0.5;
|
|
|
|
pannerNodeRef.current.positionY.value =
|
|
|
|
(bounds.y + bounds.height / 2) / windowSize - 0.5;
|
|
|
|
// Put the source in front of the listener
|
|
|
|
pannerNodeRef.current.positionZ.value = -2;
|
|
|
|
};
|
|
|
|
|
2022-07-28 09:16:49 +02:00
|
|
|
gainNode.gain.value = localVolume;
|
2022-05-31 16:11:39 -04:00
|
|
|
updatePosition();
|
2022-07-28 09:16:49 +02:00
|
|
|
source.connect(gainNode).connect(pannerNode).connect(audioDestination);
|
2022-05-31 10:43:05 -04:00
|
|
|
// HACK: We abuse the CSS transitionrun event to detect when the tile
|
|
|
|
// moves, because useMeasure, IntersectionObserver, etc. all have no
|
|
|
|
// ability to track changes in the CSS transform property
|
|
|
|
tile.addEventListener("transitionrun", updatePosition);
|
|
|
|
|
|
|
|
return () => {
|
|
|
|
tile.removeEventListener("transitionrun", updatePosition);
|
|
|
|
source.disconnect();
|
2022-07-28 09:16:49 +02:00
|
|
|
gainNode.disconnect();
|
2022-05-31 10:43:05 -04:00
|
|
|
pannerNode.disconnect();
|
|
|
|
};
|
|
|
|
}
|
2022-07-28 09:16:49 +02:00
|
|
|
}, [stream, spatialAudio, audioContext, audioDestination, mute, localVolume]);
|
2022-05-31 10:43:05 -04:00
|
|
|
|
|
|
|
return [tileRef, mediaRef];
|
|
|
|
};
|