openreplay/frontend/app/player/web/assist/LocalStream.ts
Delirium cc34356294
[wip] user testing ui/tracker (#1520)
* feat(ui): some design mocks

* fix(ui): some fixes for stuff

* feat(ui): test overview page layout

* feat(ui): fix placeholder

* feat(ui): answers table modal

* feat(tracker): user testing module in tracker

* fix(tracker): add "thank you" section, refactor file to make it readable

* fix(tracker): naming

* fix(tracker): naming

* fix(tracker): some refactorings for user testing modd

* feat(tracker): export assist vers to window obj, add recorder manager for user testing

* feat(tracker): refactor UT file

* feat(tracker): add recording delay for UT module

* feat(tracker): dnd for UT widget

* fix(tracker): changelog for assist

* fix(tracker): cover ut with tests

* fix(tracker): update package scripts to include testing before releasing packages

* fix(UI): fix uxt routes

* feat(ui): uxt store

* feat(ui): uxt store connection

* feat(ui): some api connections for utx

* feat(ui): some api connections for utx

* feat(ui): some api connections for utx

* feat(ui): api connections

* feat(ui): api connections

* feat(ui): api connections

* feat(ui): api connections

* feat(ui): utx components for replay

* feat(ui): utx components for replay

* feat(ui): make events shared

* feat(ui): final fixes
2023-11-29 12:22:30 +01:00

90 lines
2.6 KiB
TypeScript

declare global {
interface HTMLCanvasElement {
captureStream(frameRate?: number): MediaStream;
}
}
function dummyTrack(): MediaStreamTrack {
const canvas = document.createElement('canvas'); //, { width: 0, height: 0})
canvas.width = canvas.height = 2; // Doesn't work when 1 (?!)
const ctx = canvas.getContext('2d');
ctx?.fillRect(0, 0, canvas.width, canvas.height);
requestAnimationFrame(function draw() {
ctx?.fillRect(0, 0, canvas.width, canvas.height);
requestAnimationFrame(draw);
});
// Also works. Probably it should be done once connected.
//setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000)
return canvas.captureStream(60).getTracks()[0];
}
export function RequestLocalStream(): Promise<LocalStream> {
return navigator.mediaDevices.getUserMedia({ audio: true }).then((aStream) => {
const aTrack = aStream.getAudioTracks()[0];
if (!aTrack) {
throw new Error('No audio tracks provided');
}
return new _LocalStream(aTrack);
});
}
class _LocalStream {
private mediaRequested: boolean = false;
readonly stream: MediaStream;
private readonly vdTrack: MediaStreamTrack;
constructor(aTrack: MediaStreamTrack) {
this.vdTrack = dummyTrack();
this.stream = new MediaStream([aTrack, this.vdTrack]);
}
toggleVideo(): Promise<boolean> {
if (!this.mediaRequested) {
return navigator.mediaDevices
.getUserMedia({ video: true })
.then((vStream) => {
const vTrack = vStream.getVideoTracks()[0];
if (!vTrack) {
throw new Error('No video track provided');
}
this.stream.addTrack(vTrack);
this.stream.removeTrack(this.vdTrack);
this.mediaRequested = true;
if (this.onVideoTrackCb) {
this.onVideoTrackCb(vTrack);
}
return true;
})
.catch((e) => {
// TODO: log
console.error(e);
return false;
});
}
let enabled = true;
this.stream.getVideoTracks().forEach((track) => {
track.enabled = enabled = enabled && !track.enabled;
});
return Promise.resolve(enabled);
}
toggleAudio(): boolean {
let enabled = true;
this.stream.getAudioTracks().forEach((track) => {
track.enabled = enabled = enabled && !track.enabled;
});
return enabled;
}
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null;
onVideoTrack(cb: (t: MediaStreamTrack) => void) {
this.onVideoTrackCb = cb;
}
stop() {
this.stream.getTracks().forEach((t) => t.stop());
}
}
export type LocalStream = InstanceType<typeof _LocalStream>;