Merge branch 'main' into develop-js-sdk

This commit is contained in:
Robin Townsend 2022-11-08 16:10:43 -05:00
commit 15bb710394
6 changed files with 74 additions and 56 deletions

View file

@ -117,7 +117,7 @@
"This call already exists, would you like to join?": "This call already exists, would you like to join?",
"This site is protected by ReCAPTCHA and the Google <2>Privacy Policy</2> and <6>Terms of Service</6> apply.<9></9>By clicking \"Register\", you agree to our <12>Terms and conditions</12>": "This site is protected by ReCAPTCHA and the Google <2>Privacy Policy</2> and <6>Terms of Service</6> apply.<9></9>By clicking \"Register\", you agree to our <12>Terms and conditions</12>",
"This will make a speaker's audio seem as if it is coming from where their tile is positioned on screen. (Experimental feature: this may impact the stability of audio.)": "This will make a speaker's audio seem as if it is coming from where their tile is positioned on screen. (Experimental feature: this may impact the stability of audio.)",
"This will send anonymized data (such as the duration of a call and the number of participants) to the element call team to help us optimise the application based on how it is used.": "This will send anonymized data (such as the duration of a call and the number of participants) to the element call team to help us optimise the application based on how it is used.",
"This will send anonymised data (such as the duration of a call and the number of participants) to the Element Call team to help us optimise the application based on how it is used.": "This will send anonymised data (such as the duration of a call and the number of participants) to the Element Call team to help us optimise the application based on how it is used.",
"Turn off camera": "Turn off camera",
"Turn on camera": "Turn on camera",
"Unmute microphone": "Unmute microphone",

View file

@ -7,7 +7,6 @@ export VITE_PRODUCT_NAME="Element Call"
git clone https://github.com/matrix-org/matrix-js-sdk.git
cd matrix-js-sdk
git checkout robertlong/group-call
yarn install
yarn run build
yarn link

View file

@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import React, { ChangeEvent, FC, forwardRef, ReactNode } from "react";
import React, { ChangeEvent, FC, forwardRef, ReactNode, useId } from "react";
import classNames from "classnames";
import styles from "./Input.module.css";
@ -96,6 +96,8 @@ export const InputField = forwardRef<
},
ref
) => {
const descriptionId = useId();
return (
<Field
className={classNames(
@ -113,6 +115,7 @@ export const InputField = forwardRef<
id={id}
ref={ref as React.ForwardedRef<HTMLTextAreaElement>}
disabled={disabled}
aria-describedby={descriptionId}
{...rest}
/>
) : (
@ -122,6 +125,7 @@ export const InputField = forwardRef<
type={type}
checked={checked}
disabled={disabled}
aria-describedby={descriptionId}
{...rest}
/>
)}
@ -135,7 +139,11 @@ export const InputField = forwardRef<
{label}
</label>
{suffix && <span>{suffix}</span>}
{description && <p className={styles.description}>{description}</p>}
{description && (
<p id={descriptionId} className={styles.description}>
{description}
</p>
)}
</Field>
);
}

View file

@ -159,7 +159,7 @@ export const SettingsModal = (props: Props) => {
type="checkbox"
checked={optInAnalytics}
description={t(
"This will send anonymized data (such as the duration of a call and the number of participants) to the element call team to help us optimise the application based on how it is used."
"This will send anonymised data (such as the duration of a call and the number of participants) to the Element Call team to help us optimise the application based on how it is used."
)}
onChange={(event: React.ChangeEvent<HTMLInputElement>) =>
setOptInAnalytics(event.target.checked)

View file

@ -71,8 +71,8 @@ export function VideoTileContainer({
stream ?? null,
audioContext,
audioDestination,
isLocal,
localVolume
localVolume,
isLocal || maximised
);
const {
modalState: videoTileSettingsModalState,

View file

@ -20,6 +20,7 @@ import {
acquireContext,
releaseContext,
} from "matrix-js-sdk/src/webrtc/audioContext";
import { logger } from "matrix-js-sdk/src/logger";
import { useSpatialAudio } from "../settings/useSetting";
import { useEventTarget } from "../useEvents";
@ -213,10 +214,10 @@ export const useSpatialMediaStream = (
stream: MediaStream | null,
audioContext: AudioContext,
audioDestination: AudioNode,
mute = false,
localVolume?: number
localVolume: number,
mute = false
): [RefObject<HTMLDivElement>, RefObject<MediaElement>] => {
const tileRef = useRef<HTMLDivElement>();
const tileRef = useRef<HTMLDivElement | null>(null);
const [spatialAudio] = useSpatialAudio();
// We always handle audio separately form the video element
const mediaRef = useMediaStream(stream, null, true);
@ -227,53 +228,63 @@ export const useSpatialMediaStream = (
const sourceRef = useRef<MediaStreamAudioSourceNode>();
useEffect(() => {
if (spatialAudio && tileRef.current && !mute && audioTrackCount > 0) {
if (!pannerNodeRef.current) {
pannerNodeRef.current = new PannerNode(audioContext, {
panningModel: "HRTF",
refDistance: 3,
});
if (spatialAudio) {
if (tileRef.current && !mute && audioTrackCount > 0) {
logger.debug(`Rendering spatial audio for ${stream!.id}`);
if (!pannerNodeRef.current) {
pannerNodeRef.current = new PannerNode(audioContext, {
panningModel: "HRTF",
refDistance: 3,
});
}
if (!gainNodeRef.current) {
gainNodeRef.current = new GainNode(audioContext, {
gain: localVolume,
});
}
if (!sourceRef.current || sourceRef.current.mediaStream !== stream!) {
sourceRef.current = audioContext.createMediaStreamSource(stream!);
}
const tile = tileRef.current;
const source = sourceRef.current;
const gainNode = gainNodeRef.current;
const pannerNode = pannerNodeRef.current;
const updatePosition = () => {
const bounds = tile.getBoundingClientRect();
const windowSize = Math.max(window.innerWidth, window.innerHeight);
// Position the source relative to its placement in the window
pannerNodeRef.current!.positionX.value =
(bounds.x + bounds.width / 2) / windowSize - 0.5;
pannerNodeRef.current!.positionY.value =
(bounds.y + bounds.height / 2) / windowSize - 0.5;
// Put the source in front of the listener
pannerNodeRef.current!.positionZ.value = -2;
};
gainNode.gain.value = localVolume;
updatePosition();
source.connect(gainNode).connect(pannerNode).connect(audioDestination);
// HACK: We abuse the CSS transitionrun event to detect when the tile
// moves, because useMeasure, IntersectionObserver, etc. all have no
// ability to track changes in the CSS transform property
tile.addEventListener("transitionrun", updatePosition);
return () => {
tile.removeEventListener("transitionrun", updatePosition);
source.disconnect();
gainNode.disconnect();
pannerNode.disconnect();
};
} else if (stream) {
logger.debug(
`Not rendering spatial audio for ${stream.id} (tile ref ${Boolean(
tileRef.current
)}, mute ${mute}, track count ${audioTrackCount})`
);
}
if (!gainNodeRef.current) {
gainNodeRef.current = new GainNode(audioContext, {
gain: localVolume,
});
}
if (!sourceRef.current) {
sourceRef.current = audioContext.createMediaStreamSource(stream!);
}
const tile = tileRef.current;
const source = sourceRef.current;
const gainNode = gainNodeRef.current;
const pannerNode = pannerNodeRef.current;
const updatePosition = () => {
const bounds = tile.getBoundingClientRect();
const windowSize = Math.max(window.innerWidth, window.innerHeight);
// Position the source relative to its placement in the window
pannerNodeRef.current!.positionX.value =
(bounds.x + bounds.width / 2) / windowSize - 0.5;
pannerNodeRef.current!.positionY.value =
(bounds.y + bounds.height / 2) / windowSize - 0.5;
// Put the source in front of the listener
pannerNodeRef.current!.positionZ.value = -2;
};
gainNode.gain.value = localVolume;
updatePosition();
source.connect(gainNode).connect(pannerNode).connect(audioDestination);
// HACK: We abuse the CSS transitionrun event to detect when the tile
// moves, because useMeasure, IntersectionObserver, etc. all have no
// ability to track changes in the CSS transform property
tile.addEventListener("transitionrun", updatePosition);
return () => {
tile.removeEventListener("transitionrun", updatePosition);
source.disconnect();
gainNode.disconnect();
pannerNode.disconnect();
};
}
}, [
stream,