Merge remote-tracking branch 'origin/api-v1.8.0_patch1' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-09-20 09:06:44 +01:00
commit f7dc863250
22 changed files with 116 additions and 86 deletions

View file

@ -6,6 +6,8 @@ from chalicelib.core import projects
from starlette.exceptions import HTTPException
from os import access, R_OK
ASSIST_KEY = config("ASSIST_KEY")
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
@ -47,7 +49,7 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.post(config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}",
connected_peers = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
@ -78,9 +80,8 @@ def __get_live_sessions_ws(project_id, data):
def get_live_session_by_id(project_id, session_id):
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(
config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
connected_peers = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -108,9 +109,8 @@ def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(
config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
connected_peers = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -138,7 +138,7 @@ def autocomplete(project_id, q: str, key: str = None):
params["key"] = key
try:
results = requests.get(
config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete",
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print("!! issue with the peer-server")

View file

@ -1,6 +1,5 @@
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils import helper, pg_client
from chalicelib.utils import dev
from chalicelib.utils.TimeUTC import TimeUTC
def get_by_url(project_id, data):
@ -22,8 +21,14 @@ def get_by_url(project_id, data):
GROUP BY selector;""",
args)
cur.execute(
query
)
try:
cur.execute(query)
except Exception as err:
print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data)
print("--------------------")
raise err
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
return helper.dict_to_camel_case(rows)

View file

@ -90,7 +90,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if recording_state:
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)

View file

@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
def reset(data: schemas.ForgetPasswordPayloadSchema):
print("====================== reset password ===============")
print(data)
print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}

View file

@ -2,22 +2,25 @@ import requests
from decouple import config
SMR_URL = config("sourcemaps_reader")
if '%s' in SMR_URL:
if config("SMR_KEY", default=None) is not None:
SMR_URL = SMR_URL % config("SMR_KEY")
else:
SMR_URL = SMR_URL % "smr"
def get_original_trace(key, positions):
payload = {
"key": key,
"positions": positions,
"padding": 5,
"bucket": config('sourcemaps_bucket'),
"S3_KEY": config('S3_KEY', default=config('AWS_ACCESS_KEY_ID')),
"S3_SECRET": config('S3_SECRET', default=config('AWS_SECRET_ACCESS_KEY')),
"region": config('sessions_region', default=config('AWS_DEFAULT_REGION'))
"bucket": config('sourcemaps_bucket')
}
if len(config('S3_HOST', default="")) > 0:
payload["S3_HOST"] = config('S3_HOST')
try:
r = requests.post(config("sourcemaps_reader"), json=payload,
timeout=config("sourcemapTimeout", cast=int, default=5))
r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
if r.status_code != 200:
print(f"Issue getting sourcemap status_code:{r.status_code}")
return None

View file

@ -26,9 +26,9 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-foss
jwt_secret="SET A RANDOM STRING HERE"
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
assist=/assist/%s/sockets-live
assistList=/assist/%s/sockets-list
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
assist=/sockets-live
assistList=/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
@ -45,7 +45,7 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
stage=default-foss
version_number=1.4.0
FS_DIR=/mnt/efs

View file

@ -35,7 +35,9 @@ async def or_middleware(request: Request, call_next):
now = int(time.time() * 1000)
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
now = int(time.time() * 1000) - now
if now > 500:
print(f"Execution time: {now} ms")
except Exception as e:
pg_client.close()
raise e

View file

@ -103,7 +103,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if recording_state:
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)

View file

@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
def reset(data: schemas.ForgetPasswordPayloadSchema):
print("====================== reset password ===============")
print(data)
print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}

View file

@ -201,6 +201,6 @@ def get_available_actions(tenant_id):
cron_jobs = [
{"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60),
{"func": process_traces_queue, "trigger": "interval", "seconds": config("TRACE_PERIOD", cast=int, default=60),
"misfire_grace_time": 20}
]

View file

@ -37,9 +37,9 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-ee
jwt_secret="SET A RANDOM STRING HERE"
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
assist=/assist/%s/sockets-live
assistList=/assist/%s/sockets-list
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
assist=/sockets-live
assistList=/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
@ -56,7 +56,7 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
stage=default-ee
version_number=1.0.0
FS_DIR=/mnt/efs
@ -67,4 +67,5 @@ EXP_METRICS=true
EXP_7D_MV=false
EXP_ALERTS=false
EXP_FUNNELS=false
EXP_RESOURCES=true
EXP_RESOURCES=true
TRACE_PERIOD=300

View file

@ -1,6 +1,8 @@
const dumps = require('./utils/HeapSnapshot');
const {request_logger} = require('./utils/helper');
const express = require('express');
const assert = require('assert').strict;
let socket;
if (process.env.redis === "true") {
socket = require("./servers/websocket-cluster");
@ -10,22 +12,25 @@ if (process.env.redis === "true") {
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
const PORT = process.env.LISTEN_PORT || 9001;
assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
const P_KEY = process.env.ASSIST_KEY;
const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`
let debug = process.env.debug === "1" || false;
const PREFIX = process.env.prefix || `/assist`
let debug = process.env.debug === "1";
const heapdump = process.env.heapdump === "1";
if (process.env.uws !== "true") {
let wsapp = express();
wsapp.use(express.json());
wsapp.use(express.urlencoded({extended: true}));
wsapp.use(request_logger("[wsapp]"));
wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => {
wsapp.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
res.statusCode = 200;
res.end("ok!");
}
);
wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router);
wsapp.use(`${PREFIX}/${process.env.S3_KEY}`, socket.wsRouter);
heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter);
wsapp.enable('trust proxy');
const wsserver = wsapp.listen(PORT, HOST, () => {
console.log(`WS App listening on http://${HOST}:${PORT}`);
@ -44,9 +49,11 @@ if (process.env.uws !== "true") {
const healthFn = (res, req) => {
res.writeStatus('200 OK').end('ok!');
}
uapp.get('/', healthFn);
uapp.get(PREFIX, healthFn);
uapp.get(`${PREFIX}/`, healthFn);
uapp.get(`${PREFIX}/${process.env.S3_KEY}`, healthFn);
uapp.get(`${PREFIX}/${P_KEY}`, healthFn);
uapp.get(`${PREFIX}/${P_KEY}/`, healthFn);
/* Either onAborted or simply finished request */
@ -73,19 +80,19 @@ if (process.env.uws !== "true") {
return fn(req, res);
}
}
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.post(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.post(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.get(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.post(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
socket.start(uapp);

View file

@ -28,7 +28,7 @@ const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
console.log(`Using Redis: ${REDIS_URL}`);
let io;
const debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1";
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {

View file

@ -23,7 +23,7 @@ const {
const wsRouter = express.Router();
let io;
const debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1";
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {

View file

@ -1,5 +1,5 @@
const helper = require('./helper');
let debug = process.env.debug === "1" || false;
let debug = process.env.debug === "1";
const getBodyFromUWSResponse = async function (res) {
return new Promise(((resolve, reject) => {
let buffer;

View file

@ -4,16 +4,19 @@ const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./serve
const express = require('express');
const {ExpressPeerServer} = require('peer');
const debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1";
const heapdump = process.env.heapdump === "1";
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
const PORT = process.env.LISTEN_PORT || 9000;
assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
const P_KEY = process.env.ASSIST_KEY;
const app = express();
app.use(request_logger("[app]"));
app.use(`/${process.env.S3_KEY}/assist`, peerRouter);
app.use(`/${process.env.S3_KEY}/heapdump`, dumps.router);
app.use(`/${P_KEY}/assist`, peerRouter);
heapdump && app.use(`/${P_KEY}/heapdump`, dumps.router);
const server = app.listen(PORT, HOST, () => {
console.log(`App listening on http://${HOST}:${PORT}`);

3
sourcemap-reader/run-dev.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/zsh
MAPPING_WASM=./mappings.wasm npm start

View file

@ -3,18 +3,21 @@ const sourcemapsReaderServer = require('./servers/sourcemaps-server');
const express = require('express');
const {request_logger} = require("./utils/helper");
const HOST = process.env.SR_HOST || '127.0.0.1';
const PORT = process.env.SR_PORT || 9000;
const HOST = process.env.SMR_HOST || '127.0.0.1';
const PORT = process.env.SMR_PORT || 9000;
const PREFIX = process.env.PREFIX || process.env.prefix || ''
const P_KEY = process.env.SMR_KEY || 'smr';
const heapdump = process.env.heapdump === "1";
const app = express();
app.use(request_logger("[SR]"));
app.get(['/'], (req, res) => {
app.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
res.statusCode = 200;
res.end("ok!");
}
);
app.use('/sourcemaps', sourcemapsReaderServer);
app.use('/heapdump', dumps.router);
app.use(`${PREFIX}/${P_KEY}/sourcemaps`, sourcemapsReaderServer);
heapdump && app.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
const server = app.listen(PORT, HOST, () => {
console.log(`SR App listening on http://${HOST}:${PORT}`);

View file

@ -12,16 +12,8 @@ console.log(`>sourceMap initialised using ${process.env.MAPPING_WASM || '/mappin
module.exports.sourcemapReader = async event => {
let s3;
if (event.S3_HOST) {
s3 = new AWS.S3({
endpoint: event.S3_HOST,
accessKeyId: event.S3_KEY,
secretAccessKey: event.S3_SECRET,
region: event.region,
s3ForcePathStyle: true, // needed with minio?
signatureVersion: 'v4'
});
} else if (process.env.S3_HOST) {
if (process.env.S3_HOST) {
s3 = new AWS.S3({
endpoint: process.env.S3_HOST,
accessKeyId: process.env.S3_KEY,

View file

@ -2,18 +2,28 @@ const dumps = require('./utils/HeapSnapshot');
const express = require('express');
const socket = require("./servers/websocket");
const {request_logger} = require("./utils/helper");
const assert = require('assert').strict;
const debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1";
const heapdump = process.env.heapdump === "1";
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
const PORT = process.env.LISTEN_PORT || 9001;
assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
const P_KEY = process.env.ASSIST_KEY;
const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`
const wsapp = express();
wsapp.use(express.json());
wsapp.use(express.urlencoded({extended: true}));
wsapp.use(request_logger("[wsapp]"));
wsapp.use(`/assist/${process.env.S3_KEY}`, socket.wsRouter);
wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router);
wsapp.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
res.statusCode = 200;
res.end("ok!");
}
);
wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter);
heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
const wsserver = wsapp.listen(PORT, HOST, () => {
console.log(`WS App listening on http://${HOST}:${PORT}`);

View file

@ -21,7 +21,7 @@ const {
const wsRouter = express.Router();
let io;
const debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1";
const createSocketIOServer = function (server, prefix) {
io = _io(server, {

View file

@ -3,6 +3,7 @@ const v8 = require('v8');
const express = require('express');
const router = express.Router();
const heapdump = process.env.heapdump === "1";
const location = '/tmp/';
let creationStatus = null;
let fileName = null;
@ -60,9 +61,11 @@ function createNewHeapSnapshot(req, res) {
res.end(JSON.stringify({path: location + fileName, 'done': creationStatus}));
}
router.get(`/status`, getHeapSnapshotStatus);
router.get(`/new`, createNewHeapSnapshot);
router.get(`/download`, downloadHeapSnapshot);
if (heapdump) {
router.get(`/status`, getHeapSnapshotStatus);
router.get(`/new`, createNewHeapSnapshot);
router.get(`/download`, downloadHeapSnapshot);
}
module.exports = {router}
console.log(`HeapSnapshot enabled. Send a request to "/heapdump/new" to generate a heapdump.`);
heapdump && console.log(`HeapSnapshot enabled. Send a request to "/heapdump/new" to generate a heapdump.`);