Merge pull request #218 from openreplay/240ce27681f6dddb7

change(ui) - assist changes cherry pick from 240ce27681
This commit is contained in:
Shekar Siri 2021-11-20 00:12:41 +05:30 committed by GitHub
commit a7bd5c3cf9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 707 additions and 212 deletions

View file

@ -2,27 +2,26 @@ import React, { useState } from 'react'
import stl from './ChatControls.css'
import cn from 'classnames'
import { Button, Icon } from 'UI'
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
interface Props {
stream: MediaStream | null,
stream: LocalStream | null,
endCall: () => void
}
function ChatControls({ stream, endCall } : Props) {
const [audioEnabled, setAudioEnabled] = useState(true)
const [videoEnabled, setVideoEnabled] = useState(true)
const [videoEnabled, setVideoEnabled] = useState(false)
const toggleAudio = () => {
if (!stream) { return; }
const aEn = !audioEnabled
stream.getAudioTracks().forEach(track => track.enabled = aEn);
setAudioEnabled(aEn);
setAudioEnabled(stream.toggleAudio());
}
const toggleVideo = () => {
if (!stream) { return; }
const vEn = !videoEnabled;
stream.getVideoTracks().forEach(track => track.enabled = vEn);
setVideoEnabled(vEn)
stream.toggleVideo()
.then(setVideoEnabled)
}
return (

View file

@ -6,10 +6,12 @@ import Counter from 'App/components/shared/SessionItem/Counter'
import stl from './chatWindow.css'
import ChatControls from '../ChatControls/ChatControls'
import Draggable from 'react-draggable';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
export interface Props {
incomeStream: MediaStream | null,
localStream: MediaStream | null,
localStream: LocalStream | null,
userId: String,
endCall: () => void
}
@ -30,7 +32,7 @@ const ChatWindow: FC<Props> = function ChatWindow({ userId, incomeStream, localS
<div className={cn(stl.videoWrapper, {'hidden' : minimize}, 'relative')}>
<VideoContainer stream={ incomeStream } />
<div className="absolute bottom-0 right-0 z-50">
<VideoContainer stream={ localStream } muted width={50} />
<VideoContainer stream={ localStream ? localStream.stream : null } muted width={50} />
</div>
</div>
<ChatControls stream={localStream} endCall={endCall} />

View file

@ -7,6 +7,9 @@ import { connectPlayer } from 'Player/store';
import ChatWindow from '../../ChatWindow';
import { callPeer } from 'Player'
import { CallingState, ConnectionStatus } from 'Player/MessageDistributor/managers/AssistManager';
import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
import { toast } from 'react-toastify';
import { confirm } from 'UI/Confirmation';
import stl from './AassistActions.css'
@ -33,7 +36,7 @@ interface Props {
function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus }: Props) {
const [ incomeStream, setIncomeStream ] = useState<MediaStream | null>(null);
const [ localStream, setLocalStream ] = useState<MediaStream | null>(null);
const [ localStream, setLocalStream ] = useState<LocalStream | null>(null);
const [ endCall, setEndCall ] = useState<()=>void>(()=>{});
useEffect(() => {
@ -46,24 +49,18 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
}
}, [peerConnectionStatus])
function onCallConnect(lStream) {
function call() {
RequestLocalStream().then(lStream => {
setLocalStream(lStream);
setEndCall(() => callPeer(
lStream,
setIncomeStream,
onClose.bind(null, lStream),
lStream.stop.bind(lStream),
onReject,
onError
));
}
function call() {
navigator.mediaDevices.getUserMedia({video:true, audio:true})
.then(onCallConnect).catch(error => { // TODO retry only if specific error
navigator.mediaDevices.getUserMedia({audio:true})
.then(onCallConnect)
.catch(onError)
});
}).catch(onError)
}
const confirmCall = async () => {

View file

@ -5,6 +5,7 @@ import type { TimedMessage } from '../Timed';
import type { Message } from '../messages'
import { ID_TP_MAP } from '../messages';
import store from 'App/store';
import type { LocalStream } from './LocalStream';
import { update, getState } from '../../store';
@ -356,13 +357,14 @@ export default class AssistManager {
}
private localCallData: {
localStream: MediaStream,
localStream: LocalStream,
onStream: (s: MediaStream)=>void,
onCallEnd: () => void,
onReject: () => void,
onError?: ()=> void
} | null = null
call(localStream: MediaStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function {
call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function {
this.localCallData = {
localStream,
onStream,
@ -387,7 +389,17 @@ export default class AssistManager {
//console.log('calling...', this.localCallData.localStream)
const call = this.peer.call(this.peerID, this.localCallData.localStream);
const call = this.peer.call(this.peerID, this.localCallData.localStream.stream);
this.localCallData.localStream.onVideoTrack(vTrack => {
const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
if (!sender) {
//logger.warn("No video sender found")
return
}
//logger.log("sender found:", sender)
sender.replaceTrack(vTrack)
})
call.on('stream', stream => {
update({ calling: CallingState.True });
this.localCallData && this.localCallData.onStream(stream);
@ -396,8 +408,8 @@ export default class AssistManager {
});
this.md.overlay.addEventListener("mousemove", this.onMouseMove)
this.md.overlay.addEventListener("click", this.onMouseClick)
});
//call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
call.on("close", this.localCallData.onCallEnd);
call.on("error", (e) => {

View file

@ -0,0 +1,85 @@
declare global {
interface HTMLCanvasElement {
captureStream(frameRate?: number): MediaStream;
}
}
function dummyTrack(): MediaStreamTrack {
const canvas = document.createElement("canvas")//, { width: 0, height: 0})
canvas.width=canvas.height=2 // Doesn't work when 1 (?!)
const ctx = canvas.getContext('2d');
ctx?.fillRect(0, 0, canvas.width, canvas.height);
requestAnimationFrame(function draw(){
ctx?.fillRect(0,0, canvas.width, canvas.height)
requestAnimationFrame(draw);
});
// Also works. Probably it should be done once connected.
//setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000)
return canvas.captureStream(60).getTracks()[0];
}
export default function RequestLocalStream(): Promise<LocalStream> {
return navigator.mediaDevices.getUserMedia({ audio:true })
.then(aStream => {
const aTrack = aStream.getAudioTracks()[0]
if (!aTrack) { throw new Error("No audio tracks provided") }
return new _LocalStream(aTrack)
})
}
class _LocalStream {
private mediaRequested: boolean = false
readonly stream: MediaStream
private readonly vdTrack: MediaStreamTrack
constructor(aTrack: MediaStreamTrack) {
this.vdTrack = dummyTrack()
this.stream = new MediaStream([ aTrack, this.vdTrack ])
}
toggleVideo(): Promise<boolean> {
if (!this.mediaRequested) {
return navigator.mediaDevices.getUserMedia({video:true})
.then(vStream => {
const vTrack = vStream.getVideoTracks()[0]
if (!vTrack) {
throw new Error("No video track provided")
}
this.stream.addTrack(vTrack)
this.stream.removeTrack(this.vdTrack)
this.mediaRequested = true
if (this.onVideoTrackCb) {
this.onVideoTrackCb(vTrack)
}
return true
})
.catch(e => {
// TODO: log
return false
})
}
let enabled = true
this.stream.getVideoTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return Promise.resolve(enabled)
}
toggleAudio(): boolean {
let enabled = true
this.stream.getAudioTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return enabled
}
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null
onVideoTrack(cb: (t: MediaStreamTrack) => void) {
this.onVideoTrackCb = cb
}
stop() {
this.stream.getTracks().forEach(t => t.stop())
}
}
export type LocalStream = InstanceType<typeof _LocalStream>

View file

@ -3,3 +3,4 @@ tsconfig-cjs.json
tsconfig.json
.prettierrc.json
.cache
layout

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,86 @@
.connecting-message {
margin-top: 50%;
font-size: 20px;
color: #aaa;
text-align: center;
display: none;
}
.status-connecting .connecting-message {
display: block;
}
.status-connecting .card {
display: none;
}
.card{
min-width: 324px;
width: 350px;
max-width: 800px;
/*min-height: 220px;*/
max-height: 450px;
/*resize: both;
overflow: auto;*/
}
.card .card-header{
cursor: move;
}
#agent-name, #duration{
cursor:default;
}
#local-stream, #remote-stream {
display:none;
}
#video-container.remote #remote-stream {
display: block;
}
#video-container.local {
min-height: 100px;
}
#video-container.local #local-stream {
display: block;
}
#local-stream{
width: 35%;
position: absolute;
z-index: 99;
bottom: 5px;
right: 5px;
border: thin solid rgba(255,255,255, .3);
}
#audio-btn .bi-mic-mute {
display:none;
}
#audio-btn:after {
content: 'Mute'
}
#audio-btn.muted .bi-mic-mute {
display: inline-block;
}
#audio-btn.muted .bi-mic {
display:none;
}
#audio-btn.muted:after {
content: 'Unmute'
}
#video-btn .bi-camera-video-off {
display:none;
}
#video-btn.off:after {
content: 'Start Video'
}
#video-btn.off .bi-camera-video-off {
display: inline-block;
}
#video-btn.off .bi-camera-video {
display:none;
}
#video-btn:after {
content: 'Stop Video'
}

View file

@ -0,0 +1,168 @@
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>OpenReplay | Assist</title>
<!--CSS -->
<!-- <link href="css/styles.css" rel="stylesheet"> -->
<style>
.connecting-message {
margin-top: 50%;
font-size: 20px;
color: #aaa;
text-align: center;
display: none;
font-family: sans-serif;
}
.status-connecting .connecting-message {
display: block;
}
.status-connecting .card {
display: none;
}
.card{
min-width: 324px;
width: 350px;
max-width: 800px;
/*min-height: 220px;*/
max-height: 450px;
/*resize: both;
overflow: auto;*/
}
.card .card-header{
cursor: move;
}
#agent-name, #duration{
cursor:default;
}
#local-stream, #remote-stream {
display:none;
}
#video-container.remote #remote-stream {
display: block;
}
#video-container.local {
min-height: 100px;
}
#video-container.local #local-stream {
display: block;
}
#local-stream{
width: 35%;
position: absolute;
z-index: 99;
bottom: 5px;
right: 5px;
border: thin solid rgba(255,255,255, .3);
}
#audio-btn .bi-mic-mute {
display:none;
}
#audio-btn:after {
text-transform: capitalize;
content: 'Mute'
}
#audio-btn.muted .bi-mic-mute {
display: inline-block;
}
#audio-btn.muted .bi-mic {
display:none;
}
#audio-btn.muted:after {
content: 'Unmute'
}
#video-btn .bi-camera-video-off {
display:none;
}
#video-btn:after {
text-transform: capitalize;
content: 'Stop Video'
}
#video-btn.off:after {
content: 'Start Video'
}
#video-btn.off .bi-camera-video-off {
display: inline-block;
}
#video-btn.off .bi-camera-video {
display:none;
}
</style>
<link href="css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
<section id="or-assist" class="status-connecting">
<div class="connecting-message"> Connecting... </div>
<div class="card border-dark shadow">
<div class="drag-area card-header d-flex justify-content-between">
<div class="user-info">
<span>Call with</span>
<!-- User Name -->
<span id="agent-name" class="person-name fw-light" >Support Agent</span>
</div>
<div class="call-duration">
<!--Call Duration. -->
<span id="duration" class="card-subtitle mb-2 text-muted fw-light" data-bs-toggle="tooltip" data-bs-placement="bottom" title="Duration">00:00</span>
</div>
</div>
<div id="video-container" class="card-body bg-dark p-0 d-flex align-items-center position-relative">
<div id="local-stream" class="ratio ratio-4x3 rounded m-0 p-0 shadow">
<p class="text-white m-auto text-center">Starting video...</p>
<video id="video-local" autoplay muted></video>
</div>
<div id="remote-stream" class="ratio ratio-4x3 m-0 p-0">
<p id="remote-stream-placeholder" class="text-white m-auto text-center">Starting video...</p>
<video id="video-remote" autoplay></video>
</div>
</div>
<div class="card-footer bg-transparent d-flex justify-content-between">
<div class="assist-controls">
<a href="#" id="audio-btn" class="btn btn-light btn-sm text-uppercase me-2"><i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic" viewBox="0 0 16 16">
<path d="M3.5 6.5A.5.5 0 0 1 4 7v1a4 4 0 0 0 8 0V7a.5.5 0 0 1 1 0v1a5 5 0 0 1-4.5 4.975V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 .5-.5z"/>
<path d="M10 8a2 2 0 1 1-4 0V3a2 2 0 1 1 4 0v5zM8 0a3 3 0 0 0-3 3v5a3 3 0 0 0 6 0V3a3 3 0 0 0-3-3z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic-mute" viewBox="0 0 16 16">
<path d="M13 8c0 .564-.094 1.107-.266 1.613l-.814-.814A4.02 4.02 0 0 0 12 8V7a.5.5 0 0 1 1 0v1zm-5 4c.818 0 1.578-.245 2.212-.667l.718.719a4.973 4.973 0 0 1-2.43.923V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 1 0v1a4 4 0 0 0 4 4zm3-9v4.879l-1-1V3a2 2 0 0 0-3.997-.118l-.845-.845A3.001 3.001 0 0 1 11 3z"/>
<path d="m9.486 10.607-.748-.748A2 2 0 0 1 6 8v-.878l-1-1V8a3 3 0 0 0 4.486 2.607zm-7.84-9.253 12 12 .708-.708-12-12-.708.708z"/>
</svg>
</i></a>
<!-- Add class .mute to #audio-btn when user mutes audio -->
<a href="#" id="video-btn" class="off btn btn-light btn-sm text-uppercase ms-2"><i >
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M0 5a2 2 0 0 1 2-2h7.5a2 2 0 0 1 1.983 1.738l3.11-1.382A1 1 0 0 1 16 4.269v7.462a1 1 0 0 1-1.406.913l-3.111-1.382A2 2 0 0 1 9.5 13H2a2 2 0 0 1-2-2V5zm11.5 5.175 3.5 1.556V4.269l-3.5 1.556v4.35zM2 4a1 1 0 0 0-1 1v6a1 1 0 0 0 1 1h7.5a1 1 0 0 0 1-1V5a1 1 0 0 0-1-1H2z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video-off" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M10.961 12.365a1.99 1.99 0 0 0 .522-1.103l3.11 1.382A1 1 0 0 0 16 11.731V4.269a1 1 0 0 0-1.406-.913l-3.111 1.382A2 2 0 0 0 9.5 3H4.272l.714 1H9.5a1 1 0 0 1 1 1v6a1 1 0 0 1-.144.518l.605.847zM1.428 4.18A.999.999 0 0 0 1 5v6a1 1 0 0 0 1 1h5.014l.714 1H2a2 2 0 0 1-2-2V5c0-.675.334-1.272.847-1.634l.58.814zM15 11.73l-3.5-1.555v-4.35L15 4.269v7.462zm-4.407 3.56-10-14 .814-.58 10 14-.814.58z"/>
</svg>
</i></a>
<!--Add class .off to #video-btn when user stops video -->
</div>
<div class="assist-end">
<a id="end-call-btn" href="#" class="btn btn-danger btn-sm text-uppercase">End</a>
</div>
</div>
</div>
</section>
</body>
</html>

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-assist",
"description": "Tracker plugin for screen assistance through the WebRTC",
"version": "3.2.0",
"version": "3.4.7",
"keywords": [
"WebRTC",
"assistance",

View file

@ -1,16 +1,23 @@
import type { LocalStream } from './LocalStream';
const SS_START_TS_KEY = "__openreplay_assist_call_start_ts"
export default class CallWindow {
private iframe: HTMLIFrameElement;
private vRemote: HTMLVideoElement | null = null;
private vLocal: HTMLVideoElement | null = null;
private audioBtn: HTMLAnchorElement | null = null;
private videoBtn: HTMLAnchorElement | null = null;
private userNameSpan: HTMLSpanElement | null = null;
private vPlaceholder: HTMLParagraphElement | null = null;
private iframe: HTMLIFrameElement
private vRemote: HTMLVideoElement | null = null
private vLocal: HTMLVideoElement | null = null
private audioBtn: HTMLElement | null = null
private videoBtn: HTMLElement | null = null
private endCallBtn: HTMLElement | null = null
private agentNameElem: HTMLElement | null = null
private videoContainer: HTMLElement | null = null
private vPlaceholder: HTMLElement | null = null
private tsInterval: ReturnType<typeof setInterval>;
constructor(endCall: () => void) {
private tsInterval: ReturnType<typeof setInterval>
private load: Promise<void>
constructor() {
const iframe = this.iframe = document.createElement('iframe');
Object.assign(iframe.style, {
position: "fixed",
@ -31,189 +38,221 @@ export default class CallWindow {
console.error("OpenReplay: CallWindow iframe document is not reachable.")
return;
}
fetch("https://static.openreplay.com/tracker-assist/index.html")
//fetch("file:///Users/shikhu/work/asayer-tester/dist/assist/index.html")
//const baseHref = "https://static.openreplay.com/tracker-assist/test"
const baseHref = "https://static.openreplay.com/tracker-assist/3.4.4"
this.load = fetch(baseHref + "/index.html")
.then(r => r.text())
.then((text) => {
iframe.onload = () => {
doc.body.removeChild(doc.body.children[0]); //?!!>R#
const assistSection = doc.getElementById("or-assist")
assistSection && assistSection.removeAttribute("style");
iframe.style.height = doc.body.scrollHeight + 'px';
iframe.style.width = doc.body.scrollWidth + 'px';
assistSection?.classList.remove("status-connecting")
//iframe.style.height = doc.body.scrollHeight + 'px';
//iframe.style.width = doc.body.scrollWidth + 'px';
this.adjustIframeSize()
iframe.onload = null;
}
text = text.replace(/href="css/g, "href=\"https://static.openreplay.com/tracker-assist/css")
// ?
text = text.replace(/href="css/g, `href="${baseHref}/css`)
doc.open();
doc.write(text);
doc.close();
this.vLocal = doc.getElementById("video-local") as HTMLVideoElement;
this.vRemote = doc.getElementById("video-remote") as HTMLVideoElement;
this.vLocal = doc.getElementById("video-local") as (HTMLVideoElement | null);
this.vRemote = doc.getElementById("video-remote") as (HTMLVideoElement | null);
this.videoContainer = doc.getElementById("video-container");
//
this.vLocal.parentElement && this.vLocal.parentElement.classList.add("d-none");
this.audioBtn = doc.getElementById("audio-btn") as HTMLAnchorElement;
this.audioBtn = doc.getElementById("audio-btn");
if (this.audioBtn) {
this.audioBtn.onclick = () => this.toggleAudio();
this.videoBtn = doc.getElementById("video-btn") as HTMLAnchorElement;
}
this.videoBtn = doc.getElementById("video-btn");
if (this.videoBtn) {
this.videoBtn.onclick = () => this.toggleVideo();
}
this.endCallBtn = doc.getElementById("end-call-btn");
this.userNameSpan = doc.getElementById("username") as HTMLSpanElement;
this.agentNameElem = doc.getElementById("agent-name");
this.vPlaceholder = doc.querySelector("#remote-stream p")
this._trySetAssistentName();
this._trySetStreams();
const endCallBtn = doc.getElementById("end-call-btn") as HTMLAnchorElement;
endCallBtn.onclick = endCall;
const tsText = doc.getElementById("time-stamp");
const startTs = Date.now();
if (tsText) {
const tsElem = doc.getElementById("duration");
if (tsElem) {
const startTs = Number(sessionStorage.getItem(SS_START_TS_KEY)) || Date.now()
sessionStorage.setItem(SS_START_TS_KEY, startTs.toString())
this.tsInterval = setInterval(() => {
const ellapsed = Date.now() - startTs;
const secsFull = ~~(ellapsed / 1000);
const mins = ~~(secsFull / 60);
const ellapsed = Date.now() - startTs
const secsFull = ~~(ellapsed / 1000)
const mins = ~~(secsFull / 60)
const secs = secsFull - mins * 60
tsText.innerText = `${mins}:${secs < 10 ? 0 : ''}${secs}`;
tsElem.innerText = `${mins}:${secs < 10 ? 0 : ''}${secs}`
}, 500);
}
// TODO: better D'n'D
// mb set cursor:move here?
doc.body.setAttribute("draggable", "true");
doc.body.ondragstart = (e) => {
if (!e.dataTransfer || !e.target) { return; }
//@ts-ignore
if (!e.target.classList || !e.target.classList.contains("card-header")) { return; }
if (!e.target.classList || !e.target.classList.contains("drag-area")) { return; }
e.dataTransfer.setDragImage(doc.body, e.clientX, e.clientY);
};
doc.body.ondragend = e => {
Object.assign(iframe.style, {
left: `${e.clientX}px`, // TODO: fix in case e is inside the iframe
left: `${e.clientX}px`, // TODO: fix the case when ecoordinates are inside the iframe
top: `${e.clientY}px`,
bottom: 'auto',
right: 'auto',
})
}
});
//this.toggleVideoUI(false)
//this.toggleRemoteVideoUI(false)
}
// TODO: load(): Promise
private adjustIframeSize() {
const doc = this.iframe.contentDocument
if (!doc) { return }
this.iframe.style.height = doc.body.scrollHeight + 'px';
this.iframe.style.width = doc.body.scrollWidth + 'px';
}
setCallEndAction(endCall: () => void) {
this.load.then(() => {
if (this.endCallBtn) {
this.endCallBtn.onclick = endCall
}
})
}
private aRemote: HTMLAudioElement | null = null;
private localStream: MediaStream | null = null;
private remoteStream: MediaStream | null = null;
private setLocalVideoStream: (MediaStream) => void = () => {};
private videoRequested: boolean = true; // TODO: green camera light
private _trySetStreams() {
if (this.vRemote && !this.vRemote.srcObject && this.remoteStream) {
this.vRemote.srcObject = this.remoteStream;
private checkRemoteVideoInterval: ReturnType<typeof setInterval>
setRemoteStream(rStream: MediaStream) {
this.load.then(() => {
if (this.vRemote && !this.vRemote.srcObject) {
this.vRemote.srcObject = rStream;
if (this.vPlaceholder) {
this.vPlaceholder.innerText = "Video has been paused. Click anywhere to resume.";
}
// Hack for audio (doesen't work in iframe because of some magical reasons (check if it is connected to autoplay?))
// Hack for audio. Doesen't work inside the iframe because of some magical reasons (check if it is connected to autoplay?)
this.aRemote = document.createElement("audio");
this.aRemote.autoplay = true;
this.aRemote.style.display = "none"
this.aRemote.srcObject = this.remoteStream;
this.aRemote.srcObject = rStream;
document.body.appendChild(this.aRemote)
}
if (this.vLocal && !this.vLocal.srcObject && this.localStream) {
this.vLocal.srcObject = this.localStream;
// Hack to determine if the remote video is enabled
if (this.checkRemoteVideoInterval) { clearInterval(this.checkRemoteVideoInterval) } // just in case
let enable = false
this.checkRemoteVideoInterval = setInterval(() => {
const settings = rStream.getVideoTracks()[0]?.getSettings()
//console.log(settings)
const isDummyVideoTrack = !!settings && (settings.width === 2 || settings.frameRate === 0)
const shouldEnable = !isDummyVideoTrack
if (enable !== shouldEnable) {
this.toggleRemoteVideoUI(enable=shouldEnable)
}
}, 1000)
})
}
setRemoteStream(rStream: MediaStream) {
this.remoteStream = rStream;
this._trySetStreams();
toggleRemoteVideoUI(enable: boolean) {
this.load.then(() => {
if (this.videoContainer) {
if (enable) {
this.videoContainer.classList.add("remote")
} else {
this.videoContainer.classList.remove("remote")
}
setLocalStream(lStream: MediaStream, setLocalVideoStream: (MediaStream) => void) {
lStream.getVideoTracks().forEach(track => {
track.enabled = false;
});
this.localStream = lStream;
this.setLocalVideoStream = setLocalVideoStream;
this._trySetStreams();
this.adjustIframeSize()
}
})
}
private localStream: LocalStream | null = null;
// TODO: on construction?
setLocalStream(lStream: LocalStream) {
this.localStream = lStream
}
playRemote() {
this.vRemote && this.vRemote.play()
}
// TODO: determined workflow
_trySetAssistentName() {
if (this.userNameSpan && this.assistentName) {
this.userNameSpan.innerText = this.assistentName;
}
}
private assistentName: string = "";
setAssistentName(name: string) {
this.assistentName = name;
this._trySetAssistentName();
this.load.then(() => {
if (this.agentNameElem) {
this.agentNameElem.innerText = name
}
})
}
toggleAudio() {
let enabled = true;
this.localStream?.getAudioTracks().forEach(track => {
enabled = enabled && !track.enabled;
track.enabled = enabled;
});
const cList = this.audioBtn?.classList;
private toggleAudioUI(enabled: boolean) {
if (!this.audioBtn) { return; }
if (enabled) {
this.audioBtn.classList.remove("muted");
this.audioBtn.childNodes[1].textContent = "Mute";
this.audioBtn.classList.remove("muted")
} else {
this.audioBtn.classList.add("muted");
this.audioBtn.childNodes[1].textContent = "Unmute";
this.audioBtn.classList.add("muted")
}
}
private _toggleVideoUI(enabled) {
if (!this.videoBtn || !this.vLocal || !this.vLocal.parentElement) { return; }
private toggleAudio() {
const enabled = this.localStream?.toggleAudio() || false
this.toggleAudioUI(enabled)
// if (!this.audioBtn) { return; }
// if (enabled) {
// this.audioBtn.classList.remove("muted");
// this.audioBtn.childNodes[1].textContent = "Mute";
// } else {
// this.audioBtn.classList.add("muted");
// this.audioBtn.childNodes[1].textContent = "Unmute";
// }
}
private toggleVideoUI(enabled: boolean) {
if (!this.videoBtn || !this.videoContainer) { return; }
if (enabled) {
this.vLocal.parentElement.classList.remove("d-none");
this.videoContainer.classList.add("local")
this.videoBtn.classList.remove("off");
this.videoBtn.childNodes[1].textContent = "Stop Video";
} else {
this.vLocal.parentElement.classList.add("d-none");
this.videoContainer.classList.remove("local")
this.videoBtn.classList.add("off");
this.videoBtn.childNodes[1].textContent = "Start Video";
}
this.adjustIframeSize()
}
toggleVideo() {
if (!this.videoRequested) {
navigator.mediaDevices.getUserMedia({video:true, audio:false}).then(vd => {
this.videoRequested = true;
this.setLocalVideoStream(vd);
this._toggleVideoUI(true);
this.localStream?.getVideoTracks().forEach(track => {
track.enabled = true;
private videoRequested: boolean = false
private toggleVideo() {
this.localStream?.toggleVideo()
.then(enabled => {
this.toggleVideoUI(enabled)
this.load.then(() => {
if (this.vLocal && this.localStream && !this.vLocal.srcObject) {
this.vLocal.srcObject = this.localStream.stream
}
})
})
});
return;
}
let enabled = true;
this.localStream?.getVideoTracks().forEach(track => {
enabled = enabled && !track.enabled;
track.enabled = enabled;
});
this._toggleVideoUI(enabled);
}
remove() {
clearInterval(this.tsInterval);
this.localStream?.stop()
clearInterval(this.tsInterval)
clearInterval(this.checkRemoteVideoInterval)
if (this.iframe.parentElement) {
document.body.removeChild(this.iframe);
document.body.removeChild(this.iframe)
}
if (this.aRemote && this.aRemote.parentElement) {
document.body.removeChild(this.aRemote);
document.body.removeChild(this.aRemote)
}
sessionStorage.removeItem(SS_START_TS_KEY)
}
}

View file

@ -0,0 +1,85 @@
declare global {
interface HTMLCanvasElement {
captureStream(frameRate?: number): MediaStream;
}
}
function dummyTrack(): MediaStreamTrack {
const canvas = document.createElement("canvas")//, { width: 0, height: 0})
canvas.width=canvas.height=2 // Doesn't work when 1 (?!)
const ctx = canvas.getContext('2d');
ctx?.fillRect(0, 0, canvas.width, canvas.height);
requestAnimationFrame(function draw(){
ctx?.fillRect(0,0, canvas.width, canvas.height)
requestAnimationFrame(draw);
});
// Also works. Probably it should be done once connected.
//setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000)
return canvas.captureStream(60).getTracks()[0];
}
export default function RequestLocalStream(): Promise<LocalStream> {
return navigator.mediaDevices.getUserMedia({ audio:true })
.then(aStream => {
const aTrack = aStream.getAudioTracks()[0]
if (!aTrack) { throw new Error("No audio tracks provided") }
return new _LocalStream(aTrack)
})
}
class _LocalStream {
private mediaRequested: boolean = false
readonly stream: MediaStream
private readonly vdTrack: MediaStreamTrack
constructor(aTrack: MediaStreamTrack) {
this.vdTrack = dummyTrack()
this.stream = new MediaStream([ aTrack, this.vdTrack ])
}
toggleVideo(): Promise<boolean> {
if (!this.mediaRequested) {
return navigator.mediaDevices.getUserMedia({video:true})
.then(vStream => {
const vTrack = vStream.getVideoTracks()[0]
if (!vTrack) {
throw new Error("No video track provided")
}
this.stream.addTrack(vTrack)
this.stream.removeTrack(this.vdTrack)
this.mediaRequested = true
if (this.onVideoTrackCb) {
this.onVideoTrackCb(vTrack)
}
return true
})
.catch(e => {
// TODO: log
return false
})
}
let enabled = true
this.stream.getVideoTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return Promise.resolve(enabled)
}
toggleAudio(): boolean {
let enabled = true
this.stream.getAudioTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return enabled
}
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null
onVideoTrack(cb: (t: MediaStreamTrack) => void) {
this.onVideoTrackCb = cb
}
stop() {
this.stream.getTracks().forEach(t => t.stop())
}
}
export type LocalStream = InstanceType<typeof _LocalStream>

View file

@ -1,7 +1,7 @@
/**
* Hach for the issue of peerjs compilation on angular
* Mor info here: https://github.com/peers/peerjs/issues/552
* Hack for the peerjs compilation on angular
* About this issue: https://github.com/peers/peerjs/issues/552
*/
// @ts-ignore

View file

@ -7,7 +7,7 @@ import type Message from '@openreplay/tracker';
import Mouse from './Mouse';
import CallWindow from './CallWindow';
import ConfirmWindow from './ConfirmWindow';
import RequestLocalStream from './LocalStream';
export interface Options {
confirmText: string,
@ -16,7 +16,6 @@ export interface Options {
config: Object
}
enum CallingState {
Requesting,
True,
@ -39,8 +38,21 @@ export default function(opts: Partial<Options> = {}) {
return;
}
let assistDemandedRestart = false;
let peer : Peer | null = null;
function log(...args) {
// TODO: use centralised warn/log from tracker (?)
appOptions.__debug_log && console.log("OpenReplay Assist. ", ...args)
}
function warn(...args) {
appOptions.__debug_log && console.warn("OpenReplay Assist. ", ...args)
}
let assistDemandedRestart = false
let peer : Peer | null = null
const openDataConnections: Record<string, BufferingConnection> = {}
app.addCommitCallback(function(messages) {
Object.values(openDataConnections).forEach(buffConn => buffConn.send(messages))
})
app.attachStopCallback(function() {
if (assistDemandedRestart) { return; }
@ -104,12 +116,14 @@ export default function(opts: Partial<Options> = {}) {
let callingState: CallingState = CallingState.False;
peer.on('call', function(call) {
if (!peer) { return; }
const dataConn: DataConnection | undefined = peer
.connections[call.peer].find(c => c.type === 'data');
if (callingState !== CallingState.False || !dataConn) {
call.close();
warn("Call closed instantly: ", callingState, dataConn, dataConn.open)
return;
}
@ -135,74 +149,59 @@ export default function(opts: Partial<Options> = {}) {
setCallingState(CallingState.Requesting);
const confirm = new ConfirmWindow(options.confirmText, options.confirmStyle);
confirmAnswer = confirm.mount();
dataConn.on('data', (data) => { // if call closed by a caller before confirm
dataConn.on('data', (data) => { // if call cancelled by a caller before confirmation
if (data === "call_end") {
//console.log('OpenReplay tracker-assist: receiving callend onconfirm')
setCallingState(CallingState.False);
log("Received call_end during confirm window opened")
confirm.remove();
setCallingState(CallingState.False);
}
});
}
confirmAnswer.then(agreed => {
if (!agreed || !dataConn.open) {
call.close();
notifyCallEnd();
setCallingState(CallingState.False);
return;
!dataConn.open && warn("Call cancelled because data connection is closed.")
call.close()
notifyCallEnd()
setCallingState(CallingState.False)
return
}
const mouse = new Mouse();
let callUI;
const mouse = new Mouse()
let callUI = new CallWindow()
const onCallConnect = lStream => {
const onCallEnd = () => {
mouse.remove();
callUI?.remove();
lStream.getTracks().forEach(t => t.stop());
callUI.remove();
setCallingState(CallingState.False);
}
const initiateCallEnd = () => {
//console.log("callend initiated")
log("initiateCallEnd")
call.close()
notifyCallEnd();
onCallEnd();
}
call.answer(lStream);
setCallingState(CallingState.True)
dataConn.on("close", onCallEnd);
RequestLocalStream().then(lStream => {
dataConn.on("close", onCallEnd); // For what case?
//call.on('close', onClose); // Works from time to time (peerjs bug)
const intervalID = setInterval(() => {
const checkConnInterval = setInterval(() => {
if (!dataConn.open) {
initiateCallEnd();
clearInterval(intervalID);
clearInterval(checkConnInterval);
}
if (!call.open) {
onCallEnd();
clearInterval(intervalID);
clearInterval(checkConnInterval);
}
}, 3000);
call.on('error', initiateCallEnd);
callUI = new CallWindow(initiateCallEnd);
callUI.setLocalStream(lStream, (stream) => {
//let videoTrack = stream.getVideoTracks()[0];
//lStream.addTrack(videoTrack);
//call.peerConnection.addTrack(videoTrack);
// call.peerConnection.getSenders()
// var sender = call.peerConnection.getSenders().find(function(s) {
// return s.track .kind == videoTrack.kind;
// });
//sender.replaceTrack(videoTrack);
call.on('error', e => {
warn("Call error:", e)
initiateCallEnd()
});
call.on('stream', function(rStream) {
callUI.setRemoteStream(rStream);
const onInteraction = () => {
const onInteraction = () => { // only if hidden?
callUI.playRemote()
document.removeEventListener("click", onInteraction)
}
@ -214,6 +213,10 @@ export default function(opts: Partial<Options> = {}) {
onCallEnd();
return;
}
// if (data && typeof data.video === 'boolean') {
// log('Recieved video toggle signal: ', data.video)
// callUI.toggleRemoteVideo(data.video)
// }
if (data && typeof data.name === 'string') {
//console.log("name",data)
callUI.setAssistentName(data.name);
@ -222,14 +225,25 @@ export default function(opts: Partial<Options> = {}) {
mouse.move(data);
}
});
}
navigator.mediaDevices.getUserMedia({video:true, audio:true})
.then(onCallConnect)
.catch(_ => { // TODO retry only if specific error
navigator.mediaDevices.getUserMedia({audio:true}) // in case there is no camera on device
.then(onCallConnect)
.catch(e => console.log("OpenReplay tracker-assist: cant reach media devices. ", e));
lStream.onVideoTrack(vTrack => {
const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
if (!sender) {
warn("No video sender found")
return
}
log("sender found:", sender)
sender.replaceTrack(vTrack)
})
callUI.setCallEndAction(initiateCallEnd)
callUI.setLocalStream(lStream)
call.answer(lStream.stream)
setCallingState(CallingState.True)
})
.catch(e => {
warn("Audio mediadevice request error:", e)
onCallEnd()
});
}).catch(); // in case of Confirm.remove() without any confirmation
});