From 4469240d28e1a0189a215e33a9b24a87bd184a67 Mon Sep 17 00:00:00 2001 From: Delirium Date: Wed, 2 Aug 2023 10:56:54 +0200 Subject: [PATCH] fix(ui): merge audio tracks on node chain for recording (#1433) * fix(ui): merge audio tracks on node chain for recording * fix(ui): fix --- .../AssistActions/AssistActions.tsx | 59 +++++----- .../app/components/Session/LivePlayer.tsx | 2 + frontend/app/player/web/assist/LocalStream.ts | 108 +++++++++--------- frontend/app/utils/screenRecorder.ts | 64 +++++++++-- 4 files changed, 146 insertions(+), 87 deletions(-) diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 71a117868..29d2c42a4 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -3,12 +3,7 @@ import { Button, Tooltip } from 'UI'; import { connect } from 'react-redux'; import cn from 'classnames'; import ChatWindow from '../../ChatWindow'; -import { - CallingState, - ConnectionStatus, - RemoteControlStatus, - RequestLocalStream, -} from 'Player'; +import { CallingState, ConnectionStatus, RemoteControlStatus, RequestLocalStream } from 'Player'; import type { LocalStream } from 'Player'; import { PlayerContext, ILivePlayerContext } from 'App/components/Session/playerContext'; import { observer } from 'mobx-react-lite'; @@ -16,12 +11,14 @@ import { toast } from 'react-toastify'; import { confirm } from 'UI'; import stl from './AassistActions.module.css'; import ScreenRecorder from 'App/components/Session_/ScreenRecorder/ScreenRecorder'; +import { audioContextManager } from 'App/utils/screenRecorder'; function onReject() { toast.info(`Call was rejected.`); } + function onControlReject() { - toast.info('Remote control request was rejected by user') + toast.info('Remote control request was rejected by user'); } function onError(e: any) { @@ -47,7 +44,7 @@ function AssistActions({ userDisplayName, }: Props) { // @ts-ignore ??? - const { player, store } = React.useContext(PlayerContext) + const { player, store } = React.useContext(PlayerContext); const { assistManager: { @@ -55,17 +52,17 @@ function AssistActions({ setCallArgs, requestReleaseRemoteControl, toggleAnnotation, - setRemoteControlCallbacks + setRemoteControlCallbacks, }, - toggleUserName, - } = player + toggleUserName, + } = player; const { calling, annotating, peerConnectionStatus, remoteControl: remoteControlStatus, livePlay, - } = store.get() + } = store.get(); const [isPrestart, setPrestart] = useState(false); const [incomeStream, setIncomeStream] = useState([]); @@ -121,8 +118,9 @@ function AssistActions({ const addIncomeStream = (stream: MediaStream) => { setIncomeStream((oldState) => { - if (oldState === null) return [stream] + if (oldState === null) return [stream]; if (!oldState.find((existingStream) => existingStream.id === stream.id)) { + audioContextManager.mergeAudioStreams(stream); return [...oldState, stream]; } return oldState; @@ -133,7 +131,16 @@ function AssistActions({ RequestLocalStream() .then((lStream) => { setLocalStream(lStream); - setCallArgs(lStream, addIncomeStream, lStream.stop.bind(lStream), onReject, onError); + audioContextManager.mergeAudioStreams(lStream.stream); + setCallArgs( + lStream, + addIncomeStream, + () => { + lStream.stop.bind(lStream); + }, + onReject, + onError + ); setCallObject(callPeer()); if (additionalAgentIds) { callPeer(additionalAgentIds); @@ -157,7 +164,7 @@ function AssistActions({ }; const requestControl = () => { - setRemoteControlCallbacks({ onReject: onControlReject }) + setRemoteControlCallbacks({ onReject: onControlReject }); if (callRequesting || remoteRequesting) return; requestReleaseRemoteControl(); }; @@ -249,17 +256,13 @@ function AssistActions({ ); } -const con = connect( - (state: any) => { - const permissions = state.getIn(['user', 'account', 'permissions']) || []; - return { - hasPermission: permissions.includes('ASSIST_CALL'), - isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', - userDisplayName: state.getIn(['sessions', 'current']).userDisplayName, - }; - } -); +const con = connect((state: any) => { + const permissions = state.getIn(['user', 'account', 'permissions']) || []; + return { + hasPermission: permissions.includes('ASSIST_CALL'), + isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', + userDisplayName: state.getIn(['sessions', 'current']).userDisplayName, + }; +}); -export default con( - observer(AssistActions) -); +export default con(observer(AssistActions)); diff --git a/frontend/app/components/Session/LivePlayer.tsx b/frontend/app/components/Session/LivePlayer.tsx index d4913766b..65c9a8536 100644 --- a/frontend/app/components/Session/LivePlayer.tsx +++ b/frontend/app/components/Session/LivePlayer.tsx @@ -1,3 +1,4 @@ +import {audioContextManager} from "App/utils/screenRecorder"; import React from 'react'; import { useEffect, useState } from 'react'; import { connect } from 'react-redux'; @@ -72,6 +73,7 @@ function LivePlayer({ return () => { if (!location.pathname.includes('multiview') || !location.pathname.includes(usedSession.sessionId)) { console.debug('unmount', usedSession.sessionId) + audioContextManager.clear(); playerInst?.clean?.(); // @ts-ignore default empty setContextValue(defaultContextValue) diff --git a/frontend/app/player/web/assist/LocalStream.ts b/frontend/app/player/web/assist/LocalStream.ts index e7d3bae15..42021c1b7 100644 --- a/frontend/app/player/web/assist/LocalStream.ts +++ b/frontend/app/player/web/assist/LocalStream.ts @@ -1,16 +1,18 @@ +import { audioContextManager } from 'App/utils/screenRecorder'; + declare global { - interface HTMLCanvasElement { - captureStream(frameRate?: number): MediaStream; + interface HTMLCanvasElement { + captureStream(frameRate?: number): MediaStream; } } -function dummyTrack(): MediaStreamTrack { - const canvas = document.createElement("canvas")//, { width: 0, height: 0}) - canvas.width=canvas.height=2 // Doesn't work when 1 (?!) +function dummyTrack(): MediaStreamTrack { + const canvas = document.createElement('canvas'); //, { width: 0, height: 0}) + canvas.width = canvas.height = 2; // Doesn't work when 1 (?!) const ctx = canvas.getContext('2d'); ctx?.fillRect(0, 0, canvas.width, canvas.height); - requestAnimationFrame(function draw(){ - ctx?.fillRect(0,0, canvas.width, canvas.height) + requestAnimationFrame(function draw() { + ctx?.fillRect(0, 0, canvas.width, canvas.height); requestAnimationFrame(draw); }); // Also works. Probably it should be done once connected. @@ -19,68 +21,72 @@ function dummyTrack(): MediaStreamTrack { } export function RequestLocalStream(): Promise { - return navigator.mediaDevices.getUserMedia({ audio:true }) - .then(aStream => { - const aTrack = aStream.getAudioTracks()[0] - if (!aTrack) { throw new Error("No audio tracks provided") } - return new _LocalStream(aTrack) - }) + return navigator.mediaDevices.getUserMedia({ audio: true }).then((aStream) => { + const aTrack = aStream.getAudioTracks()[0]; + if (!aTrack) { + throw new Error('No audio tracks provided'); + } + return new _LocalStream(aTrack); + }); } class _LocalStream { - private mediaRequested: boolean = false - readonly stream: MediaStream - private readonly vdTrack: MediaStreamTrack + private mediaRequested: boolean = false; + readonly stream: MediaStream; + private readonly vdTrack: MediaStreamTrack; + constructor(aTrack: MediaStreamTrack) { - this.vdTrack = dummyTrack() - this.stream = new MediaStream([ aTrack, this.vdTrack ]) + this.vdTrack = dummyTrack(); + this.stream = new MediaStream([aTrack, this.vdTrack]); } toggleVideo(): Promise { if (!this.mediaRequested) { - return navigator.mediaDevices.getUserMedia({video:true}) - .then(vStream => { - const vTrack = vStream.getVideoTracks()[0] - if (!vTrack) { - throw new Error("No video track provided") - } - this.stream.addTrack(vTrack) - this.stream.removeTrack(this.vdTrack) - this.mediaRequested = true - if (this.onVideoTrackCb) { - this.onVideoTrackCb(vTrack) - } - return true - }) - .catch(e => { - // TODO: log - console.error(e) - return false - }) + return navigator.mediaDevices + .getUserMedia({ video: true }) + .then((vStream) => { + const vTrack = vStream.getVideoTracks()[0]; + if (!vTrack) { + throw new Error('No video track provided'); + } + this.stream.addTrack(vTrack); + this.stream.removeTrack(this.vdTrack); + this.mediaRequested = true; + if (this.onVideoTrackCb) { + this.onVideoTrackCb(vTrack); + } + return true; + }) + .catch((e) => { + // TODO: log + console.error(e); + return false; + }); } - let enabled = true - this.stream.getVideoTracks().forEach(track => { - track.enabled = enabled = enabled && !track.enabled - }) - return Promise.resolve(enabled) + let enabled = true; + this.stream.getVideoTracks().forEach((track) => { + track.enabled = enabled = enabled && !track.enabled; + }); + return Promise.resolve(enabled); } toggleAudio(): boolean { - let enabled = true - this.stream.getAudioTracks().forEach(track => { - track.enabled = enabled = enabled && !track.enabled - }) - return enabled + let enabled = true; + this.stream.getAudioTracks().forEach((track) => { + track.enabled = enabled = enabled && !track.enabled; + }); + return enabled; } - private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null + private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null; + onVideoTrack(cb: (t: MediaStreamTrack) => void) { - this.onVideoTrackCb = cb + this.onVideoTrackCb = cb; } stop() { - this.stream.getTracks().forEach(t => t.stop()) + this.stream.getTracks().forEach((t) => t.stop()); } } -export type LocalStream = InstanceType +export type LocalStream = InstanceType; diff --git a/frontend/app/utils/screenRecorder.ts b/frontend/app/utils/screenRecorder.ts index 3905d7b05..4ac6aab62 100644 --- a/frontend/app/utils/screenRecorder.ts +++ b/frontend/app/utils/screenRecorder.ts @@ -1,5 +1,29 @@ import { toast } from 'react-toastify'; +class AudioContextManager { + context = new AudioContext(); + destination = this.context.createMediaStreamDestination(); + + getAllTracks() { + return this.destination.stream.getAudioTracks() || []; + } + + mergeAudioStreams(stream: MediaStream) { + const source = this.context.createMediaStreamSource(stream); + const gain = this.context.createGain(); + gain.gain.value = 0.7; + return source.connect(gain).connect(this.destination); + } + + clear() { + // when everything is removed, tracks will be stopped automatically (hopefully) + this.context = new AudioContext(); + this.destination = this.context.createMediaStreamDestination(); + } +} + +export const audioContextManager = new AudioContextManager(); + const FILE_TYPE = 'video/webm'; const FRAME_RATE = 30; @@ -16,7 +40,7 @@ function createFileRecorder( let recordedChunks: BlobPart[] = []; const SAVE_INTERVAL_MS = 200; - const mediaRecorder = new MediaRecorder(stream); + const mediaRecorder = new MediaRecorder(stream, { mimeType: 'video/webm; codecs=vp8,opus' }); mediaRecorder.ondataavailable = function (e) { if (e.data.size > 0) { @@ -29,7 +53,7 @@ function createFileRecorder( ended = true; saveFile(recordedChunks, mimeType, start, recName, sessionId, saveCb); - onStop() + onStop(); recordedChunks = []; } @@ -74,13 +98,24 @@ function saveFile( } async function recordScreen() { - return await navigator.mediaDevices.getDisplayMedia({ - audio: true, + const desktopStreams = await navigator.mediaDevices.getDisplayMedia({ + audio: { + // @ts-ignore + restrictOwnAudio: false, + echoCancellation: true, + noiseSuppression: false, + sampleRate: 44100, + }, video: { frameRate: FRAME_RATE }, // potential chrome hack // @ts-ignore preferCurrentTab: true, }); + audioContextManager.mergeAudioStreams(desktopStreams); + return new MediaStream([ + ...desktopStreams.getVideoTracks(), + ...audioContextManager.getAllTracks(), + ]); } /** @@ -94,7 +129,18 @@ async function recordScreen() { * * @returns a promise that resolves to a function that stops the recording */ -export async function screenRecorder(recName: string, sessionId: string, saveCb: (saveObj: { name: string; duration: number }, blob: Blob) => void, onStop: () => void) { +export async function screenRecorder( + recName: string, + sessionId: string, + saveCb: ( + saveObj: { + name: string; + duration: number; + }, + blob: Blob + ) => void, + onStop: () => void +) { try { const stream = await recordScreen(); const mediaRecorder = createFileRecorder(stream, FILE_TYPE, recName, sessionId, saveCb, onStop); @@ -102,11 +148,13 @@ export async function screenRecorder(recName: string, sessionId: string, saveCb: return () => { if (mediaRecorder.state !== 'inactive') { mediaRecorder.stop(); - onStop() + onStop(); } - } + }; } catch (e) { - toast.error('Screen recording is not permitted by your system and/or browser. Make sure to enable it in your browser as well as in your system settings.'); + toast.error( + 'Screen recording is not permitted by your system and/or browser. Make sure to enable it in your browser as well as in your system settings.' + ); throw new Error('OpenReplay recording: ' + e); } }