Merge branch 'dev' of github.com:openreplay/openreplay into funnels

This commit is contained in:
Shekar Siri 2022-04-25 12:07:19 +02:00
commit 53ac4c3321
20 changed files with 2748 additions and 120 deletions

View file

@ -4,7 +4,7 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENV pg_minconn 2
ENV APP_NAME alerts

View file

@ -32,7 +32,7 @@ function make_submodule() {
cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/
# -- end of generated part
}
cp -R ./{Dockerfile.alerts,requirements.txt,.env.default,entrypoint.sh} ./alerts/
cp -R ./{Dockerfile.alerts,requirements.txt,.env.default,entrypoint_alerts.sh} ./alerts/
cp -R ./chalicelib/utils/html ./alerts/chalicelib/utils/html
}

View file

@ -25,7 +25,8 @@ def get_live_sessions_ws(project_id, user_id=None):
if user_id and len(user_id) > 0:
params["userId"] = user_id
try:
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params, timeout=6)
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params,
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -63,7 +64,8 @@ def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}", timeout=6)
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}",
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)

View file

@ -2,4 +2,4 @@
cd sourcemap-reader
nohup npm start &> /tmp/sourcemap-reader.log &
cd ..
uvicorn app:app --host 0.0.0.0 --reload
uvicorn app:app --host 0.0.0.0 --reload --proxy-headers

3
api/entrypoint_alerts.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/bash
uvicorn app:app --host 0.0.0.0 --reload

View file

@ -1,14 +1,17 @@
package messages
import (
"io"
"bytes"
"io"
"github.com/pkg/errors"
)
func ReadBatch(b []byte, callback func(Message)) error {
reader := bytes.NewReader(b)
return ReadBatchReader(bytes.NewReader(b), callback)
}
func ReadBatchReader(reader io.Reader, callback func(Message)) error {
var index uint64
var timestamp int64
for {
@ -21,12 +24,12 @@ func ReadBatch(b []byte, callback func(Message)) error {
msg = transformDepricated(msg)
isBatchMeta := false
switch m := msg.(type){
case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it)
switch m := msg.(type) {
case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it)
if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though
return errors.New("Batch Meta found at the end of the batch")
}
index = m.PageNo << 32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
timestamp = m.Timestamp
isBatchMeta = true
// continue readLoop
@ -34,7 +37,7 @@ func ReadBatch(b []byte, callback func(Message)) error {
if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though
return errors.New("Batch Meta found at the end of the batch")
}
index = m.FirstIndex
index = m.FirstIndex
timestamp = int64(m.Timestamp)
isBatchMeta = true
// continue readLoop
@ -46,23 +49,23 @@ func ReadBatch(b []byte, callback func(Message)) error {
msg.Meta().Index = index
msg.Meta().Timestamp = timestamp
callback(msg)
if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker
if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker
index++
}
}
return errors.New("Error of the codeflow. (Should return on EOF)")
}
const AVG_MESSAGE_SIZE = 40 // TODO: calculate OR calculate dynamically
const AVG_MESSAGE_SIZE = 40 // TODO: calculate OR calculate dynamically
func WriteBatch(mList []Message) []byte {
batch := make([]byte, AVG_MESSAGE_SIZE * len(mList))
batch := make([]byte, AVG_MESSAGE_SIZE*len(mList))
p := 0
for _, msg := range mList {
msgBytes := msg.Encode()
if len(batch) < p + len(msgBytes) {
newBatch := make([]byte, 2*len(batch) + len(msgBytes))
copy(newBatch, batch)
batch = newBatch
if len(batch) < p+len(msgBytes) {
newBatch := make([]byte, 2*len(batch)+len(msgBytes))
copy(newBatch, batch)
batch = newBatch
}
copy(batch[p:], msgBytes)
p += len(msgBytes)
@ -70,12 +73,12 @@ func WriteBatch(mList []Message) []byte {
return batch[:p]
}
func RewriteBatch(b []byte, rewrite func(Message) Message) ([]byte, error) {
mList := make([]Message, 0, len(b)/AVG_MESSAGE_SIZE)
if err := ReadBatch(b, func(m Message) {
func RewriteBatch(reader io.Reader, rewrite func(Message) Message) ([]byte, error) {
mList := make([]Message, 0, 10) // 10?
if err := ReadBatchReader(reader, func(m Message) {
mList = append(mList, rewrite(m))
}); err != nil {
return nil, err
}
return WriteBatch(mList), nil
}
}

View file

@ -3,7 +3,6 @@ package main
import (
"encoding/json"
"errors"
"io/ioutil"
"log"
"math/rand"
"net/http"
@ -76,14 +75,14 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := flaker.Compose(uint64(startTime.UnixMilli()))
sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6))
if err != nil {
responseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixMilli()}
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
producer.Produce(TOPIC_RAW_WEB, tokenData.ID, Encode(&SessionStart{
@ -108,8 +107,8 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
//delayDuration := time.Now().Sub(startTime)
responseWithJSON(w, &response{
//Timestamp: startTime.UnixMilli(),
//Delay: delayDuration.Milliseconds(),
//Timestamp: startTime.UnixNano() / 1e6,
//Delay: delayDuration.Nanoseconds() / 1e6,
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
@ -125,17 +124,8 @@ func pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) {
}
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
defer body.Close()
buf, err := ioutil.ReadAll(body)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
//log.Printf("Sending batch...")
//startTime := time.Now()
// analyticsMessages := make([]Message, 0, 200)
rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message {
rewritenBuf, err := RewriteBatch(body, func(msg Message) Message {
switch m := msg.(type) {
case *SetNodeAttributeURLBased:
if m.Name == "src" || m.Name == "href" {

View file

@ -5,7 +5,7 @@ RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENV pg_minconn 2
ENV APP_NAME alerts

View file

@ -1,4 +1,5 @@
import os
DATABASE = os.environ['DATABASE_NAME']
from db.api import DBConnection
@ -7,16 +8,17 @@ from db.tables import *
if DATABASE == 'redshift':
from db.loaders.redshift_loader import transit_insert_to_redshift
if DATABASE == 'clickhouse':
elif DATABASE == 'clickhouse':
from db.loaders.clickhouse_loader import insert_to_clickhouse
if DATABASE == 'pg':
elif DATABASE == 'pg':
from db.loaders.postgres_loader import insert_to_postgres
if DATABASE == 'bigquery':
elif DATABASE == 'bigquery':
from db.loaders.bigquery_loader import insert_to_bigquery
from bigquery_utils.create_table import create_tables_bigquery
if DATABASE == 'snowflake':
elif DATABASE == 'snowflake':
from db.loaders.snowflake_loader import insert_to_snowflake
else:
raise Exception(f"{DATABASE}-database not supported")
# create tables if don't exist
try:
@ -35,12 +37,11 @@ try:
db = None
except Exception as e:
print(repr(e))
print("Please create the tables with scripts provided in "
"'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'")
print("Please create the tables with scripts provided in " +
f"'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'")
def insert_batch(db: DBConnection, batch, table, level='normal'):
if len(batch) == 0:
return
df = get_df_from_batch(batch, level=level)
@ -60,4 +61,3 @@ def insert_batch(db: DBConnection, batch, table, level='normal'):
if db.config == 'snowflake':
insert_to_snowflake(db=db, df=df, table=table)

View file

@ -1,4 +1,3 @@
require('dotenv').config()
const dumps = require('./utils/HeapSnapshot');
const {request_logger} = require('./utils/helper');
const express = require('express');

View file

@ -396,7 +396,7 @@ module.exports = {
})
.catch((err) => {
console.log("> redis connection error");
console.error(err);
debug && console.error(err);
process.exit(2);
});
},

View file

@ -23,7 +23,7 @@ function onReject() {
}
function onError(e) {
toast.error(e);
toast.error(typeof e === 'string' ? e : e.message);
}

View file

@ -67,7 +67,7 @@ export default class FilterItem {
value: this.value,
operator: this.operator,
source: this.source,
filters: this.filters.map(i => i.toJson()),
filters: Array.isArray(this.filters) ? this.filters.map(i => i.toJson()) : [],
}
return json
}

View file

@ -79,6 +79,7 @@ import type {
ConnectionInformation,
SetViewportSize,
SetViewportScroll,
MouseClick,
} from './messages';
import type { Timed } from './messages/timed';
@ -91,7 +92,7 @@ export default class MessageDistributor extends StatedScreen {
private readonly connectionInfoManger: ListWalker<ConnectionInformation> = new ListWalker();
private readonly performanceTrackManager: PerformanceTrackManager = new PerformanceTrackManager();
private readonly windowNodeCounter: WindowNodeCounter = new WindowNodeCounter();
private readonly clickManager: ListWalker<Timed> = new ListWalker();
private readonly clickManager: ListWalker<MouseClick> = new ListWalker();
private readonly resizeManager: ListWalker<SetViewportSize> = new ListWalker([]);
private readonly pagesManager: PagesManager;
@ -136,9 +137,6 @@ export default class MessageDistributor extends StatedScreen {
if (e.type === EVENT_TYPES.LOCATION) { //TODO type system
this.locationEventManager.add(e);
}
if (e.type === EVENT_TYPES.CLICK) {
this.clickManager.add(e);
}
});
this.session.errors.forEach(e => {
this.lists.exceptions.add(e);
@ -302,9 +300,9 @@ export default class MessageDistributor extends StatedScreen {
// Moving mouse and setting :hover classes on ready view
this.mouseManager.move(t);
const lastClick = this.clickManager.moveToLast(t);
// if (!!lastClick) {
// this.cursor.click();
// }
if (!!lastClick && t - lastClick.time < 600) { // happend during last 600ms
this.cursor.click();
}
// After all changes - redraw the marker
//this.marker.redraw();
})
@ -378,6 +376,9 @@ export default class MessageDistributor extends StatedScreen {
case "mouse_move":
this.mouseManager.add(msg);
break;
case "mouse_click":
this.clickManager.add(msg);
break;
case "set_viewport_scroll":
this.scrollManager.add(msg);
break;

View file

@ -3,41 +3,36 @@ import styles from './cursor.css';
export default class Cursor {
private readonly _cursor: HTMLDivElement;
private position: Point = { x: 0, y: 0 }
private readonly cursor: HTMLDivElement;
private readonly position: Point = { x: -1, y: -1 }
constructor(overlay: HTMLDivElement) {
this._cursor = document.createElement('div');
this._cursor.className = styles.cursor;
overlay.appendChild(this._cursor);
//this._click = document.createElement('div');
//this._click.className = styles.click;
//overlay.appendChild(this._click);
this.cursor = document.createElement('div');
this.cursor.className = styles.cursor;
overlay.appendChild(this.cursor);
}
toggle(flag: boolean) {
if (flag) {
this._cursor.style.display = 'block';
this.cursor.style.display = 'block';
} else {
this._cursor.style.display = 'none';
this.cursor.style.display = 'none';
}
}
move({ x, y }: Point) {
this.position.x = x;
this.position.y = y;
this._cursor.style.left = x + 'px';
this._cursor.style.top = y + 'px';
this.cursor.style.left = x + 'px';
this.cursor.style.top = y + 'px';
}
// click() {
// this._cursor.style.left = this._x + 'px';
// this._cursor.style.top = this._y + 'px';
// this._click.style.display = 'block';
// setTimeout(() => {
// this._click.style.display = "none";
// }, 2000);
// }
click() {
console.log("clickong ", styles.clicked)
this.cursor.classList.add(styles.clicked)
setTimeout(() => {
this.cursor.classList.remove(styles.clicked)
}, 600)
}
getPosition(): Point {
return { x: this.position.x, y: this.position.y };

View file

@ -8,7 +8,7 @@ import BaseScreen from './BaseScreen';
export { INITIAL_STATE, State } from './BaseScreen';
export default class Screen extends BaseScreen {
private cursor: Cursor;
public readonly cursor: Cursor;
private substitutor: BaseScreen | null = null;
private inspector: Inspector | null = null;
private marker: Marker | null = null;

View file

@ -3,26 +3,66 @@
position: absolute;
width: 20px;
height: 20px;
/*border-radius: 20px;*/
background-image: url('data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 320 512"><path d="M302.189 329.126H196.105l55.831 135.993c3.889 9.428-.555 19.999-9.444 23.999l-49.165 21.427c-9.165 4-19.443-.571-23.332-9.714l-53.053-129.136-86.664 89.138C18.729 472.71 0 463.554 0 447.977V18.299C0 1.899 19.921-6.096 30.277 5.443l284.412 292.542c11.472 11.179 3.007 31.141-12.5 31.141z"/></svg>');
background-repeat: no-repeat;
/*border: 4px solid rgba(255, 255, 255, .8);
margin-left: -10px;
margin-top: -10px;*/
pointer-events: none;
user-select: none;
}
/*@keyframes click {
}*/
/*.click {
display: block;
/* ====== *
Source: https://github.com/codrops/ClickEffects/blob/master/css/component.css
* ======= */
.cursor::after {
position: absolute;
width: 20px;
height: 20px;
background: yellow;
border-radius: 20px;
border: 4px solid rgba(255, 255, 255, .8);
margin-left: -10px;
margin-top: -10px;
}*/
margin: -35px 0 0 -35px;
width: 70px;
height: 70px;
border-radius: 50%;
content: '';
opacity: 0;
pointer-events: none;
user-select: none;
box-shadow: inset 0 0 0 35px rgba(111,148,182,0);
}
.cursor.clicked::after {
-webkit-animation: click-animation 0.5s ease-out forwards;
animation: click-animation 0.5s ease-out forwards;
}
@-webkit-keyframes click-animation {
0% {
opacity: 1;
-webkit-transform: scale3d(0.4, 0.4, 1);
transform: scale3d(0.4, 0.4, 1);
}
80% {
box-shadow: inset 0 0 0 2px rgba(111,148,182,0.8);
opacity: 0.1;
}
100% {
box-shadow: inset 0 0 0 2px rgba(111,148,182,0.8);
opacity: 0;
-webkit-transform: scale3d(1.2, 1.2, 1);
transform: scale3d(1.2, 1.2, 1);
}
}
@keyframes click-animation {
0% {
opacity: 1;
-webkit-transform: scale3d(0.4, 0.4, 1);
transform: scale3d(0.4, 0.4, 1);
}
80% {
box-shadow: inset 0 0 0 2px rgba(111,148,182,0.8);
opacity: 0.1;
}
100% {
box-shadow: inset 0 0 0 2px rgba(111,148,182,0.8);
opacity: 0;
-webkit-transform: scale3d(1.2, 1.2, 1);
transform: scale3d(1.2, 1.2, 1);
}
}

View file

@ -34,7 +34,6 @@ export default class MouseManager extends ListWalker<MouseMove> {
move(t: number) {
const lastMouseMove = this.moveToLast(t);
if (!!lastMouseMove){
// @ts-ignore TODO
this.screen.cursor.move(lastMouseMove);
//window.getComputedStyle(this.screen.getCursorTarget()).cursor === 'pointer' // might nfluence performance though
this.updateHover();

View file

@ -21,7 +21,7 @@ const oss = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
ICE_SERVERS: process.env.ICE_SERVERS,
TRACKER_VERSION: '3.5.5' // trackerInfo.version,
TRACKER_VERSION: '3.5.9' // trackerInfo.version,
}
module.exports = {

File diff suppressed because it is too large Load diff