feat(utilities): utilities as a WS standalone server
This commit is contained in:
parent
cbec34db3e
commit
154caec63b
10 changed files with 67 additions and 1166 deletions
891
utilities/package-lock.json
generated
891
utilities/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -19,11 +19,8 @@
|
|||
"homepage": "https://github.com/openreplay/openreplay#readme",
|
||||
"dependencies": {
|
||||
"@maxmind/geoip2-node": "^3.4.0",
|
||||
"aws-sdk": "^2.992.0",
|
||||
"express": "^4.17.1",
|
||||
"peer": "^0.6.1",
|
||||
"socket.io": "^4.4.1",
|
||||
"source-map": "^0.7.3",
|
||||
"ua-parser-js": "^1.0.2"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,57 +1,22 @@
|
|||
const dumps = require('./utils/HeapSnapshot');
|
||||
const sourcemapsReaderServer = require('./servers/sourcemaps-server');
|
||||
const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server');
|
||||
const express = require('express');
|
||||
const {ExpressPeerServer} = require('peer');
|
||||
const socket = require("./servers/websocket");
|
||||
const {request_logger} = require("./utils/helper");
|
||||
|
||||
const HOST = '0.0.0.0';
|
||||
const PORT = 9000;
|
||||
const PORT = 9001;
|
||||
|
||||
const app = express();
|
||||
const wsapp = express();
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const request_logger = (identity) => {
|
||||
return (req, res, next) => {
|
||||
debug && console.log(identity, new Date().toTimeString(), 'REQUEST', req.method, req.originalUrl);
|
||||
res.on('finish', function () {
|
||||
if (this.statusCode !== 200 || debug) {
|
||||
console.log(new Date().toTimeString(), 'RESPONSE', req.method, req.originalUrl, this.statusCode);
|
||||
}
|
||||
})
|
||||
|
||||
next();
|
||||
}
|
||||
};
|
||||
app.use(request_logger("[app]"));
|
||||
wsapp.use(request_logger("[wsapp]"));
|
||||
|
||||
app.use('/sourcemaps', sourcemapsReaderServer);
|
||||
app.use('/assist', peerRouter);
|
||||
wsapp.use('/assist', socket.wsRouter);
|
||||
wsapp.use(`/assist/${process.env.S3_KEY}`, socket.wsRouter);
|
||||
wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router);
|
||||
|
||||
app.use('/heapdump', dumps.router);
|
||||
|
||||
const server = app.listen(PORT, HOST, () => {
|
||||
console.log(`App listening on http://${HOST}:${PORT}`);
|
||||
const wsserver = wsapp.listen(PORT, HOST, () => {
|
||||
console.log(`WS App listening on http://${HOST}:${PORT}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
const wsserver = wsapp.listen(PORT + 1, HOST, () => {
|
||||
console.log(`WS App listening on http://${HOST}:${PORT + 1}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
const peerServer = ExpressPeerServer(server, {
|
||||
debug: true,
|
||||
path: '/',
|
||||
proxied: true,
|
||||
allow_discovery: false
|
||||
});
|
||||
peerServer.on('connection', peerConnection);
|
||||
peerServer.on('disconnect', peerDisconnect);
|
||||
peerServer.on('error', peerError);
|
||||
app.use('/', peerServer);
|
||||
app.enable('trust proxy');
|
||||
wsapp.enable('trust proxy');
|
||||
socket.start(wsserver);
|
||||
module.exports = {wsserver, server};
|
||||
module.exports = {wsserver};
|
||||
console.log(`Heapdump enabled. Send a request to "/heapdump" to download a heapdump,\nor "/heapdump/save" to only generate a heapdump.`);
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
var express = require('express');
|
||||
var peerRouter = express.Router();
|
||||
|
||||
let PROJECT_KEY_LENGTH = parseInt(process.env.PROJECT_KEY_LENGTH) || 20;
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const extractPeerId = (peerId) => {
|
||||
let splited = peerId.split("-");
|
||||
if (splited.length !== 2) {
|
||||
debug && console.error(`cannot split peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
if (PROJECT_KEY_LENGTH > 0 && splited[0].length !== PROJECT_KEY_LENGTH) {
|
||||
debug && console.error(`wrong project key length for peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
return {projectKey: splited[0], sessionId: splited[1]};
|
||||
};
|
||||
const connectedPeers = {};
|
||||
|
||||
const peerConnection = (client) => {
|
||||
debug && console.log(`initiating ${client.id}`);
|
||||
const {projectKey, sessionId} = extractPeerId(client.id);
|
||||
if (projectKey === undefined || sessionId === undefined) {
|
||||
return;
|
||||
}
|
||||
connectedPeers[projectKey] = connectedPeers[projectKey] || [];
|
||||
if (connectedPeers[projectKey].indexOf(sessionId) === -1) {
|
||||
debug && console.log(`new connexion ${client.id}`);
|
||||
connectedPeers[projectKey].push(sessionId);
|
||||
} else {
|
||||
debug && console.log(`reconnecting peer ${client.id}`);
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
const peerDisconnect = (client) => {
|
||||
debug && console.log(`disconnect ${client.id}`);
|
||||
const {projectKey, sessionId} = extractPeerId(client.id);
|
||||
if (projectKey === undefined || sessionId === undefined) {
|
||||
return;
|
||||
}
|
||||
const i = (connectedPeers[projectKey] || []).indexOf(sessionId);
|
||||
if (i === -1) {
|
||||
debug && console.log(`session not found ${client.id}`);
|
||||
} else {
|
||||
connectedPeers[projectKey].splice(i, 1);
|
||||
}
|
||||
}
|
||||
|
||||
const peerError = (error) => {
|
||||
console.error('error fired');
|
||||
console.error(error);
|
||||
}
|
||||
|
||||
|
||||
peerRouter.get(`/${process.env.S3_KEY}/peers`, function (req, res) {
|
||||
debug && console.log("looking for all available sessions");
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": connectedPeers}));
|
||||
});
|
||||
peerRouter.get(`/${process.env.S3_KEY}/peers/:projectKey`, function (req, res) {
|
||||
debug && console.log(`looking for available sessions for ${req.params.projectKey}`);
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": connectedPeers[req.params.projectKey] || []}));
|
||||
});
|
||||
|
||||
|
||||
module.exports = {
|
||||
peerRouter,
|
||||
peerConnection,
|
||||
peerDisconnect,
|
||||
peerError,
|
||||
extractPeerId
|
||||
};
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
'use strict';
|
||||
const sourceMap = require('source-map');
|
||||
const AWS = require('aws-sdk');
|
||||
const sourceMapVersion = require('../package.json').dependencies["source-map"];
|
||||
const URL = require('url');
|
||||
const getVersion = version => version.replace(/[\^\$\=\~]/, "");
|
||||
|
||||
module.exports.sourcemapReader = async event => {
|
||||
sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm`
|
||||
});
|
||||
let s3;
|
||||
if (event.S3_HOST) {
|
||||
s3 = new AWS.S3({
|
||||
endpoint: event.S3_HOST,
|
||||
accessKeyId: event.S3_KEY,
|
||||
secretAccessKey: event.S3_SECRET,
|
||||
region: event.region,
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: 'v4'
|
||||
});
|
||||
} else if (process.env.S3_HOST) {
|
||||
s3 = new AWS.S3({
|
||||
endpoint: process.env.S3_HOST,
|
||||
accessKeyId: process.env.S3_KEY,
|
||||
secretAccessKey: process.env.S3_SECRET,
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: 'v4'
|
||||
});
|
||||
} else {
|
||||
s3 = new AWS.S3({
|
||||
'AccessKeyID': process.env.aws_access_key_id,
|
||||
'SecretAccessKey': process.env.aws_secret_access_key,
|
||||
'Region': process.env.aws_region
|
||||
});
|
||||
}
|
||||
|
||||
var options = {
|
||||
Bucket: event.bucket,
|
||||
Key: event.key
|
||||
};
|
||||
return new Promise(function (resolve, reject) {
|
||||
s3.getObject(options, (err, data) => {
|
||||
if (err) {
|
||||
console.log("Get S3 object failed");
|
||||
console.log(err);
|
||||
return reject(err);
|
||||
}
|
||||
let sourcemap = data.Body.toString();
|
||||
|
||||
return new sourceMap.SourceMapConsumer(sourcemap)
|
||||
.then(consumer => {
|
||||
let results = [];
|
||||
for (let i = 0; i < event.positions.length; i++) {
|
||||
let original = consumer.originalPositionFor({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
let url = URL.parse("");
|
||||
let preview = [];
|
||||
if (original.source) {
|
||||
preview = consumer.sourceContentFor(original.source, true);
|
||||
if (preview !== null) {
|
||||
preview = preview.split("\n")
|
||||
.map((line, i) => [i + 1, line]);
|
||||
if (event.padding) {
|
||||
let start = original.line < event.padding ? 0 : original.line - event.padding;
|
||||
preview = preview.slice(start, original.line + event.padding);
|
||||
}
|
||||
} else {
|
||||
console.log("source not found, null preview for:");
|
||||
console.log(original.source);
|
||||
preview = []
|
||||
}
|
||||
url = URL.parse(original.source);
|
||||
} else {
|
||||
console.log("couldn't find original position of:");
|
||||
console.log({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
}
|
||||
let result = {
|
||||
"absPath": url.href,
|
||||
"filename": url.pathname,
|
||||
"lineNo": original.line,
|
||||
"colNo": original.column,
|
||||
"function": original.name,
|
||||
"context": preview
|
||||
};
|
||||
// console.log(result);
|
||||
results.push(result);
|
||||
}
|
||||
consumer = undefined;
|
||||
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
|
||||
return resolve(results);
|
||||
})
|
||||
.finally(() => {
|
||||
sourcemap = undefined;
|
||||
})
|
||||
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
var express = require('express');
|
||||
var handler = require('./sourcemaps-handler');
|
||||
var router = express.Router();
|
||||
|
||||
router.post('/', (req, res) => {
|
||||
let data = '';
|
||||
req.on('data', chunk => {
|
||||
data += chunk;
|
||||
});
|
||||
req.on('end', function () {
|
||||
data = JSON.parse(data);
|
||||
console.log("Starting parser for: " + data.key);
|
||||
// process.env = {...process.env, ...data.bucket_config};
|
||||
handler.sourcemapReader(data)
|
||||
.then((results) => {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(results));
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Something went wrong");
|
||||
console.error(e);
|
||||
res.statusCode(500);
|
||||
res.end(e);
|
||||
});
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
const _io = require('socket.io');
|
||||
const express = require('express');
|
||||
const uaParser = require('ua-parser-js');
|
||||
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
|
||||
const {extractPeerId} = require('./peerjs-server');
|
||||
const {extractPeerId} = require('../utils/helper');
|
||||
const {geoip} = require('../utils/geoIP');
|
||||
const wsRouter = express.Router();
|
||||
const UPDATE_EVENT = "UPDATE_SESSION";
|
||||
const IDENTITIES = {agent: 'agent', session: 'session'};
|
||||
|
|
@ -79,7 +79,7 @@ const socketsList = async function (req, res) {
|
|||
}
|
||||
respond(res, liveSessions);
|
||||
}
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
|
||||
wsRouter.get(`/sockets-list`, socketsList);
|
||||
|
||||
const socketsListByProject = async function (req, res) {
|
||||
debug && console.log("[WS]looking for available sessions");
|
||||
|
|
@ -105,7 +105,7 @@ const socketsListByProject = async function (req, res) {
|
|||
}
|
||||
respond(res, liveSessions[_projectKey] || []);
|
||||
}
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
|
||||
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
|
||||
|
||||
const socketsLive = async function (req, res) {
|
||||
debug && console.log("[WS]looking for all available LIVE sessions");
|
||||
|
|
@ -113,7 +113,7 @@ const socketsLive = async function (req, res) {
|
|||
let liveSessions = {};
|
||||
let rooms = await getAvailableRooms();
|
||||
for (let peerId of rooms) {
|
||||
let {projectKey, sessionId} = extractPeerId(peerId);
|
||||
let {projectKey} = extractPeerId(peerId);
|
||||
if (projectKey !== undefined) {
|
||||
let connected_sockets = await io.in(peerId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
|
|
@ -132,7 +132,7 @@ const socketsLive = async function (req, res) {
|
|||
}
|
||||
respond(res, liveSessions);
|
||||
}
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
|
||||
wsRouter.get(`/sockets-live`, socketsLive);
|
||||
|
||||
const socketsLiveByProject = async function (req, res) {
|
||||
debug && console.log("[WS]looking for available LIVE sessions");
|
||||
|
|
@ -141,7 +141,7 @@ const socketsLiveByProject = async function (req, res) {
|
|||
let liveSessions = {};
|
||||
let rooms = await getAvailableRooms();
|
||||
for (let peerId of rooms) {
|
||||
let {projectKey, sessionId} = extractPeerId(peerId);
|
||||
let {projectKey} = extractPeerId(peerId);
|
||||
if (projectKey === _projectKey) {
|
||||
let connected_sockets = await io.in(peerId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
|
|
@ -160,7 +160,7 @@ const socketsLiveByProject = async function (req, res) {
|
|||
}
|
||||
respond(res, liveSessions[_projectKey] || []);
|
||||
}
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
|
||||
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
|
||||
|
||||
const findSessionSocketId = async (io, peerId) => {
|
||||
const connected_sockets = await io.in(peerId).fetchSockets();
|
||||
|
|
@ -204,15 +204,6 @@ async function get_all_agents_ids(io, socket) {
|
|||
return agents;
|
||||
}
|
||||
|
||||
let geoip = null;
|
||||
geoip2Reader.open(process.env.MAXMINDDB_FILE, {})
|
||||
.then(reader => {
|
||||
geoip = reader;
|
||||
})
|
||||
.catch(error => {
|
||||
console.log("Error while opening the MAXMINDDB_FILE.")
|
||||
console.error(error);
|
||||
});
|
||||
|
||||
function extractSessionInfo(socket) {
|
||||
if (socket.handshake.query.sessionInfo !== undefined) {
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ async function downloadHeapSnapshot(req, res) {
|
|||
return res.end("should wait for done status");
|
||||
}
|
||||
res.download(location + fileName, function (err) {
|
||||
if (err) {
|
||||
return console.error("error while uploading HeapSnapshot file");
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(location + fileName)
|
||||
} catch (err) {
|
||||
|
|
@ -57,7 +60,7 @@ function createNewHeapSnapshot(req, res) {
|
|||
res.end(JSON.stringify({path: location + fileName, 'done': creationStatus}));
|
||||
}
|
||||
|
||||
router.get(`/${process.env.S3_KEY}/status`, getHeapSnapshotStatus);
|
||||
router.get(`/${process.env.S3_KEY}/new`, createNewHeapSnapshot);
|
||||
router.get(`/${process.env.S3_KEY}/download`, downloadHeapSnapshot);
|
||||
router.get(`/status`, getHeapSnapshotStatus);
|
||||
router.get(`/new`, createNewHeapSnapshot);
|
||||
router.get(`/download`, downloadHeapSnapshot);
|
||||
module.exports = {router}
|
||||
16
utilities/utils/geoIP.js
Normal file
16
utilities/utils/geoIP.js
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
|
||||
let geoip = null;
|
||||
if (process.env.MAXMINDDB_FILE !== undefined) {
|
||||
geoip2Reader.open(process.env.MAXMINDDB_FILE, {})
|
||||
.then(reader => {
|
||||
geoip = reader;
|
||||
})
|
||||
.catch(error => {
|
||||
console.log("Error while opening the MAXMINDDB_FILE.")
|
||||
console.error(error);
|
||||
});
|
||||
} else {
|
||||
console.error("!!! please provide a valid value for MAXMINDDB_FILE env var.");
|
||||
}
|
||||
|
||||
module.exports = {geoip}
|
||||
30
utilities/utils/helper.js
Normal file
30
utilities/utils/helper.js
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
let PROJECT_KEY_LENGTH = parseInt(process.env.PROJECT_KEY_LENGTH) || 20;
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const extractPeerId = (peerId) => {
|
||||
let splited = peerId.split("-");
|
||||
if (splited.length !== 2) {
|
||||
debug && console.error(`cannot split peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
if (PROJECT_KEY_LENGTH > 0 && splited[0].length !== PROJECT_KEY_LENGTH) {
|
||||
debug && console.error(`wrong project key length for peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
return {projectKey: splited[0], sessionId: splited[1]};
|
||||
};
|
||||
const request_logger = (identity) => {
|
||||
return (req, res, next) => {
|
||||
debug && console.log(identity, new Date().toTimeString(), 'REQUEST', req.method, req.originalUrl);
|
||||
res.on('finish', function () {
|
||||
if (this.statusCode !== 200 || debug) {
|
||||
console.log(new Date().toTimeString(), 'RESPONSE', req.method, req.originalUrl, this.statusCode);
|
||||
}
|
||||
})
|
||||
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
extractPeerId, request_logger
|
||||
};
|
||||
Loading…
Add table
Reference in a new issue