diff --git a/README.md b/README.md
index 71e419646..6a644c0bc 100644
--- a/README.md
+++ b/README.md
@@ -47,7 +47,7 @@ OpenReplay is a session replay suite you can host yourself, that lets you see wh
- **Omni-search:** Search and filter by almost any user action/criteria, session attribute or technical event, so you can answer any question. No instrumentation required.
- **Funnels:** For surfacing the most impactful issues causing conversion and revenue loss.
- **Fine-grained privacy controls:** Choose what to capture, what to obscure or what to ignore so user data doesn't even reach your servers.
-- **Plugins oriented:** Get to the root cause even faster by tracking application state (Redux, VueX, MobX, NgRx) and logging GraphQL queries (Apollo, Relay) and Fetch requests.
+- **Plugins oriented:** Get to the root cause even faster by tracking application state (Redux, VueX, MobX, NgRx, Pinia and Zustand) and logging GraphQL queries (Apollo, Relay) and Fetch/Axios requests.
- **Integrations:** Sync your backend logs with your session replays and see what happened front-to-back. OpenReplay supports Sentry, Datadog, CloudWatch, Stackdriver, Elastic and more.
## Deployment Options
diff --git a/api/.dockerignore b/api/.dockerignore
index b6aaccd33..04ff72f11 100644
--- a/api/.dockerignore
+++ b/api/.dockerignore
@@ -4,3 +4,4 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
+Dockerfile*
\ No newline at end of file
diff --git a/api/Dockerfile b/api/Dockerfile
index 8f8691159..d02dda8ba 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -8,6 +8,7 @@ ARG envarg
ENV SOURCE_MAP_VERSION=0.7.4 \
APP_NAME=chalice \
LISTEN_PORT=8000 \
+ MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
ENTERPRISE_BUILD=${envarg}
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
@@ -20,7 +21,8 @@ RUN cd /work_tmp && npm install
WORKDIR /work
COPY . .
-RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
+RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
+ && mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
RUN adduser -u 1001 openreplay -D
USER 1001
diff --git a/api/Dockerfile.alerts.dockerignore b/api/Dockerfile.alerts.dockerignore
index 3539023b4..bb4179739 100644
--- a/api/Dockerfile.alerts.dockerignore
+++ b/api/Dockerfile.alerts.dockerignore
@@ -4,6 +4,7 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
+Dockerfile*
app.py
entrypoint_alerts.sh
diff --git a/api/app.py b/api/app.py
index cf00c747b..974d7d8d9 100644
--- a/api/app.py
+++ b/api/app.py
@@ -17,6 +17,7 @@ from routers.subs import dashboard, insights, metrics, v1_api
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
app.add_middleware(GZipMiddleware, minimum_size=1000)
+
@app.middleware('http')
async def or_middleware(request: Request, call_next):
global OR_SESSION_TOKEN
@@ -28,7 +29,9 @@ async def or_middleware(request: Request, call_next):
now = int(time.time() * 1000)
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
- print(f"Execution time: {int(time.time() * 1000) - now} ms")
+ now = int(time.time() * 1000) - now
+ if now > 500:
+ print(f"Execution time: {now} ms")
except Exception as e:
pg_client.close()
raise e
diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py
index 0f2c515cf..8c4053a9b 100644
--- a/api/chalicelib/core/assist.py
+++ b/api/chalicelib/core/assist.py
@@ -6,6 +6,8 @@ from chalicelib.core import projects
from starlette.exceptions import HTTPException
from os import access, R_OK
+ASSIST_KEY = config("ASSIST_KEY")
+ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
@@ -47,7 +49,7 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id)
try:
- connected_peers = requests.post(config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}",
+ connected_peers = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
@@ -78,9 +80,8 @@ def __get_live_sessions_ws(project_id, data):
def get_live_session_by_id(project_id, session_id):
project_key = projects.get_project_key(project_id)
try:
- connected_peers = requests.get(
- config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}",
- timeout=config("assistTimeout", cast=int, default=5))
+ connected_peers = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
+ timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@@ -108,9 +109,8 @@ def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
- connected_peers = requests.get(
- config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}",
- timeout=config("assistTimeout", cast=int, default=5))
+ connected_peers = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
+ timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@@ -138,7 +138,7 @@ def autocomplete(project_id, q: str, key: str = None):
params["key"] = key
try:
results = requests.get(
- config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete",
+ ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print("!! issue with the peer-server")
diff --git a/api/chalicelib/core/autocomplete.py b/api/chalicelib/core/autocomplete.py
index cd908a1b5..3ad845a14 100644
--- a/api/chalicelib/core/autocomplete.py
+++ b/api/chalicelib/core/autocomplete.py
@@ -52,7 +52,6 @@ def __get_autocomplete_table(value, project_id):
"c_list": tuple(c_list)
})
try:
- print(query)
cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py
index 5aacb1375..fea3fb407 100644
--- a/api/chalicelib/core/heatmaps.py
+++ b/api/chalicelib/core/heatmaps.py
@@ -1,6 +1,5 @@
-from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils import helper, pg_client
-from chalicelib.utils import dev
+from chalicelib.utils.TimeUTC import TimeUTC
def get_by_url(project_id, data):
@@ -22,8 +21,14 @@ def get_by_url(project_id, data):
GROUP BY selector;""",
args)
- cur.execute(
- query
- )
+ try:
+ cur.execute(query)
+ except Exception as err:
+ print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
+ print(query.decode('UTF-8'))
+ print("--------- PAYLOAD -----------")
+ print(data)
+ print("--------------------")
+ raise err
rows = cur.fetchall()
- return helper.dict_to_camel_case(rows)
+ return helper.dict_to_camel_case(rows)
diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py
index 5d987e485..bf388c093 100644
--- a/api/chalicelib/core/metrics.py
+++ b/api/chalicelib/core/metrics.py
@@ -1632,7 +1632,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="requests", time_column="timestamp", project=False,
duration=False)
- pg_sub_query_subset.append("requests.status/100 = %(status_code)s")
+ pg_sub_query_subset.append("requests.status_code/100 = %(status_code)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH requests AS (SELECT host, timestamp
@@ -1810,7 +1810,7 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("requests.type = 'fetch'")
pg_sub_query.append("requests.method IS NOT NULL")
- pg_sub_query.append(f"requests.status/100 = {status}")
+ pg_sub_query.append(f"requests.status_code/100 = {status}")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT requests.method,
diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py
index 100fe6765..ba334e101 100644
--- a/api/chalicelib/core/projects.py
+++ b/api/chalicelib/core/projects.py
@@ -90,7 +90,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
r.pop("first_recorded_session_at")
r.pop("first_recorded")
- if recording_state:
+ if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
diff --git a/api/chalicelib/core/reset_password.py b/api/chalicelib/core/reset_password.py
index c15a4639b..2026cd829 100644
--- a/api/chalicelib/core/reset_password.py
+++ b/api/chalicelib/core/reset_password.py
@@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
def reset(data: schemas.ForgetPasswordPayloadSchema):
- print("====================== reset password ===============")
- print(data)
+ print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}
diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py
index d6f46da70..d31b8aea0 100644
--- a/api/chalicelib/core/significance.py
+++ b/api/chalicelib/core/significance.py
@@ -495,7 +495,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
all_issues_with_context,
first_stage, last_stage)
- print("len(transitions) =", len(transitions))
+ # print("len(transitions) =", len(transitions))
if any(all_errors):
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py
index 73341cb4d..8714b9ee2 100644
--- a/api/chalicelib/core/sourcemaps.py
+++ b/api/chalicelib/core/sourcemaps.py
@@ -80,12 +80,7 @@ def get_traces_group(project_id, payload):
payloads = {}
all_exists = True
for i, u in enumerate(frames):
- print("===============================")
- print(u["absPath"])
- print("converted to:")
key = __get_key(project_id, u["absPath"]) # use filename instead?
- print(key)
- print("===============================")
if key not in payloads:
file_exists = s3.exists(config('sourcemaps_bucket'), key)
all_exists = all_exists and file_exists
@@ -104,6 +99,9 @@ def get_traces_group(project_id, payload):
if payloads[key] is None:
continue
key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]])
+ if key_results is None:
+ all_exists = False
+ continue
for i, r in enumerate(key_results):
res_index = payloads[key][i]["resultIndex"]
# function name search by frontend lib is better than sourcemaps' one in most cases
diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py
index 83116aed7..c8918cace 100644
--- a/api/chalicelib/core/sourcemaps_parser.py
+++ b/api/chalicelib/core/sourcemaps_parser.py
@@ -2,20 +2,33 @@ import requests
from decouple import config
+SMR_URL = config("sourcemaps_reader")
+
+if '%s' in SMR_URL:
+ if config("SMR_KEY", default=None) is not None:
+ SMR_URL = SMR_URL % config("SMR_KEY")
+ else:
+ SMR_URL = SMR_URL % "smr"
+
def get_original_trace(key, positions):
payload = {
"key": key,
"positions": positions,
"padding": 5,
- "bucket": config('sourcemaps_bucket'),
- "S3_HOST": config('S3_HOST'),
- "S3_KEY": config('S3_KEY'),
- "S3_SECRET": config('S3_SECRET'),
- "region": config('sessions_region')
+ "bucket": config('sourcemaps_bucket')
}
- r = requests.post(config("sourcemaps_reader"), json=payload)
- if r.status_code != 200:
- return {}
- return r.json()
+ try:
+ r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
+ if r.status_code != 200:
+ print(f"Issue getting sourcemap status_code:{r.status_code}")
+ return None
+ return r.json()
+ except requests.exceptions.Timeout:
+ print("Timeout getting sourcemap")
+ return None
+ except Exception as e:
+ print("Issue getting sourcemap")
+ print(e)
+ return None
diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py
index c4149f49d..8b9001649 100644
--- a/api/chalicelib/utils/pg_client.py
+++ b/api/chalicelib/utils/pg_client.py
@@ -20,6 +20,8 @@ PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
+logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
+
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
def __init__(self, minconn, maxconn, *args, **kwargs):
@@ -36,8 +38,15 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
raise e
def putconn(self, *args, **kwargs):
- super().putconn(*args, **kwargs)
- self._semaphore.release()
+ try:
+ super().putconn(*args, **kwargs)
+ self._semaphore.release()
+ except psycopg2.pool.PoolError as e:
+ if str(e) == "trying to put unkeyed connection":
+ print("!!! trying to put unkeyed connection")
+ print(f"env-PG_POOL:{config('PG_POOL', default=None)}")
+ return
+ raise e
postgreSQL_pool: ORThreadedConnectionPool = None
diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py
index efcb09226..f3c580e90 100644
--- a/api/chalicelib/utils/s3.py
+++ b/api/chalicelib/utils/s3.py
@@ -63,9 +63,6 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
def get_file(source_bucket, source_key):
- print("******************************")
- print(f"looking for: {source_key} in {source_bucket}")
- print("******************************")
try:
result = client.get_object(
Bucket=source_bucket,
@@ -73,7 +70,7 @@ def get_file(source_bucket, source_key):
)
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
- print(f'======> No object found - returning None for {source_bucket}/{source_key}')
+ print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}')
return None
else:
raise ex
diff --git a/api/entrypoint.sh b/api/entrypoint.sh
index 68f24cbd9..7342426c2 100755
--- a/api/entrypoint.sh
+++ b/api/entrypoint.sh
@@ -1,5 +1,5 @@
#!/bin/sh
cd sourcemap-reader
-nohup npm start &> /tmp/sourcemap-reader.log &
+nohup npm start &
cd ..
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
diff --git a/api/env.default b/api/env.default
index 563514d1c..c4388c7d5 100644
--- a/api/env.default
+++ b/api/env.default
@@ -26,9 +26,9 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-foss
jwt_secret="SET A RANDOM STRING HERE"
-ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
-assist=/assist/%s/sockets-live
-assistList=/assist/%s/sockets-list
+ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
+assist=/sockets-live
+assistList=/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
@@ -45,7 +45,7 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
-sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
+sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
stage=default-foss
version_number=1.4.0
FS_DIR=/mnt/efs
\ No newline at end of file
diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go
index 629224da7..b81ff9b5a 100644
--- a/backend/cmd/assets/main.go
+++ b/backend/cmd/assets/main.go
@@ -37,11 +37,19 @@ func main() {
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
for iter.Next() {
if iter.Type() == messages.MsgAssetCache {
- msg := iter.Message().Decode().(*messages.AssetCache)
+ m := iter.Message().Decode()
+ if m == nil {
+ return
+ }
+ msg := m.(*messages.AssetCache)
cacher.CacheURL(sessionID, msg.URL)
totalAssets.Add(context.Background(), 1)
} else if iter.Type() == messages.MsgErrorEvent {
- msg := iter.Message().Decode().(*messages.ErrorEvent)
+ m := iter.Message().Decode()
+ if m == nil {
+ return
+ }
+ msg := m.(*messages.ErrorEvent)
if msg.Source != "js_exception" {
continue
}
@@ -55,6 +63,7 @@ func main() {
}
}
}
+ iter.Close()
},
true,
cfg.MessageSizeLimit,
diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go
index 2ea57b459..a807cc253 100644
--- a/backend/cmd/db/main.go
+++ b/backend/cmd/db/main.go
@@ -69,6 +69,9 @@ func main() {
continue
}
msg := iter.Message().Decode()
+ if msg == nil {
+ return
+ }
// Just save session data into db without additional checks
if err := saver.InsertMessage(sessionID, msg); err != nil {
@@ -109,6 +112,7 @@ func main() {
}
})
}
+ iter.Close()
}
// Init consumer
@@ -142,7 +146,7 @@ func main() {
pgDur := time.Now().Sub(start).Milliseconds()
start = time.Now()
- if err := saver.CommitStats(); err != nil {
+ if err := saver.CommitStats(consumer.HasFirstPartition()); err != nil {
log.Printf("Error on stats commit: %v", err)
}
chDur := time.Now().Sub(start).Milliseconds()
diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go
index 524af0894..a2dafa689 100644
--- a/backend/cmd/ender/main.go
+++ b/backend/cmd/ender/main.go
@@ -54,6 +54,7 @@ func main() {
statsLogger.Collect(sessionID, meta)
sessions.UpdateSession(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp)
}
+ iter.Close()
},
false,
cfg.MessageSizeLimit,
diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go
index 977cbda9d..be27a86bd 100644
--- a/backend/cmd/heuristics/main.go
+++ b/backend/cmd/heuristics/main.go
@@ -53,10 +53,18 @@ func main() {
cfg.TopicRawWeb,
},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
+ var lastMessageID uint64
for iter.Next() {
statsLogger.Collect(sessionID, meta)
- builderMap.HandleMessage(sessionID, iter.Message().Decode(), iter.Message().Meta().Index)
+ msg := iter.Message().Decode()
+ if msg == nil {
+ log.Printf("failed batch, sess: %d, lastIndex: %d", sessionID, lastMessageID)
+ continue
+ }
+ lastMessageID = msg.Meta().Index
+ builderMap.HandleMessage(sessionID, msg, iter.Message().Meta().Index)
}
+ iter.Close()
},
false,
cfg.MessageSizeLimit,
diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go
index d247d17b2..bd7fddf20 100644
--- a/backend/cmd/sink/main.go
+++ b/backend/cmd/sink/main.go
@@ -76,7 +76,11 @@ func main() {
iter.Type() == MsgCSSInsertRuleURLBased ||
iter.Type() == MsgAdoptedSSReplaceURLBased ||
iter.Type() == MsgAdoptedSSInsertRuleURLBased {
- msg = assetMessageHandler.ParseAssets(sessionID, msg.Decode()) // TODO: filter type only once (use iterator inide or bring ParseAssets out here).
+ m := msg.Decode()
+ if m == nil {
+ return
+ }
+ msg = assetMessageHandler.ParseAssets(sessionID, m) // TODO: filter type only once (use iterator inide or bring ParseAssets out here).
}
// Filter message
@@ -103,6 +107,7 @@ func main() {
messageSize.Record(context.Background(), float64(len(data)))
savedMessages.Add(context.Background(), 1)
}
+ iter.Close()
},
false,
cfg.MessageSizeLimit,
diff --git a/backend/go.mod b/backend/go.mod
index 5172d7ecb..e38e304a1 100644
--- a/backend/go.mod
+++ b/backend/go.mod
@@ -5,7 +5,7 @@ go 1.18
require (
cloud.google.com/go/logging v1.4.2
github.com/ClickHouse/clickhouse-go/v2 v2.2.0
- github.com/aws/aws-sdk-go v1.35.23
+ github.com/aws/aws-sdk-go v1.44.98
github.com/btcsuite/btcutil v1.0.2
github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/go-redis/redis v6.15.9+incompatible
@@ -13,9 +13,9 @@ require (
github.com/gorilla/mux v1.8.0
github.com/jackc/pgconn v1.6.0
github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451
+ github.com/jackc/pgtype v1.3.0
github.com/jackc/pgx/v4 v4.6.0
github.com/klauspost/pgzip v1.2.5
- github.com/lib/pq v1.2.0
github.com/oschwald/maxminddb-golang v1.7.0
github.com/pkg/errors v0.9.1
github.com/sethvargo/go-envconfig v0.7.0
@@ -49,7 +49,6 @@ require (
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.0.2 // indirect
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 // indirect
- github.com/jackc/pgtype v1.3.0 // indirect
github.com/jackc/puddle v1.2.2-0.20220404125616-4e959849469a // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/klauspost/compress v1.15.7 // indirect
diff --git a/backend/go.sum b/backend/go.sum
index 6b76d1278..681b2b9d4 100644
--- a/backend/go.sum
+++ b/backend/go.sum
@@ -73,8 +73,8 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
-github.com/aws/aws-sdk-go v1.35.23 h1:SCP0d0XvyJTDmfnHEQPvBaYi3kea1VNUo7uQmkVgFts=
-github.com/aws/aws-sdk-go v1.35.23/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k=
+github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo=
+github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
diff --git a/backend/internal/db/datasaver/stats.go b/backend/internal/db/datasaver/stats.go
index 26efe51b5..17028ca5c 100644
--- a/backend/internal/db/datasaver/stats.go
+++ b/backend/internal/db/datasaver/stats.go
@@ -22,6 +22,6 @@ func (si *Saver) InsertStats(session *Session, msg Message) error {
return nil
}
-func (si *Saver) CommitStats() error {
+func (si *Saver) CommitStats(optimize bool) error {
return nil
}
diff --git a/backend/pkg/messages/batch.go b/backend/pkg/messages/batch.go
index 955d0cfc0..887e5ddb3 100644
--- a/backend/pkg/messages/batch.go
+++ b/backend/pkg/messages/batch.go
@@ -11,6 +11,7 @@ type Iterator interface {
Next() bool // Return true if we have next message
Type() int // Return type of the next message
Message() Message // Return raw or decoded message
+ Close()
}
type iteratorImpl struct {
@@ -90,10 +91,14 @@ func (i *iteratorImpl) Next() bool {
switch i.msgType {
case MsgBatchMetadata:
if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though
- log.Printf("Batch Meta found at the end of the batch")
+ log.Printf("Batch Metadata found at the end of the batch")
return false
}
- m := i.msg.Decode().(*BatchMetadata)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*BatchMetadata)
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.timestamp = m.Timestamp
i.version = m.Version
@@ -108,7 +113,11 @@ func (i *iteratorImpl) Next() bool {
log.Printf("Batch Meta found at the end of the batch")
return false
}
- m := i.msg.Decode().(*BatchMeta)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*BatchMeta)
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.timestamp = m.Timestamp
isBatchMeta = true
@@ -118,24 +127,44 @@ func (i *iteratorImpl) Next() bool {
log.Printf("Batch Meta found at the end of the batch")
return false
}
- m := i.msg.Decode().(*IOSBatchMeta)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*IOSBatchMeta)
i.index = m.FirstIndex
i.timestamp = int64(m.Timestamp)
isBatchMeta = true
// continue readLoop
case MsgTimestamp:
- m := i.msg.Decode().(*Timestamp)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*Timestamp)
i.timestamp = int64(m.Timestamp)
// No skipping here for making it easy to encode back the same sequence of message
// continue readLoop
case MsgSessionStart:
- m := i.msg.Decode().(*SessionStart)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*SessionStart)
i.timestamp = int64(m.Timestamp)
case MsgSessionEnd:
- m := i.msg.Decode().(*SessionEnd)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*SessionEnd)
i.timestamp = int64(m.Timestamp)
case MsgSetPageLocation:
- m := i.msg.Decode().(*SetPageLocation)
+ msg := i.msg.Decode()
+ if msg == nil {
+ return false
+ }
+ m := msg.(*SetPageLocation)
i.url = m.URL
}
i.msg.Meta().Index = i.index
@@ -156,6 +185,13 @@ func (i *iteratorImpl) Message() Message {
return i.msg
}
+func (i *iteratorImpl) Close() {
+ _, err := i.data.Seek(0, io.SeekEnd)
+ if err != nil {
+ log.Printf("can't set seek pointer at the end: %s", err)
+ }
+}
+
func messageHasSize(msgType uint64) bool {
return !(msgType == 80 || msgType == 81 || msgType == 82)
}
diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go
index c28e07742..e79d1d987 100644
--- a/backend/pkg/messages/filters.go
+++ b/backend/pkg/messages/filters.go
@@ -2,7 +2,7 @@
package messages
func IsReplayerType(id int) bool {
- return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
+ return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 79 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
}
func IsIOSType(id int) bool {
diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go
index 27712cb1f..8cdb95722 100644
--- a/backend/pkg/messages/messages.go
+++ b/backend/pkg/messages/messages.go
@@ -156,6 +156,8 @@ const (
MsgAdoptedSSRemoveOwner = 77
+ MsgZustand = 79
+
MsgIOSBatchMeta = 107
MsgIOSSessionStart = 90
@@ -3038,6 +3040,40 @@ func (msg *AdoptedSSRemoveOwner) TypeID() int {
return 77
}
+type Zustand struct {
+ message
+ Mutation string
+ State string
+}
+
+func (msg *Zustand) Encode() []byte {
+ buf := make([]byte, 21+len(msg.Mutation)+len(msg.State))
+ buf[0] = 79
+ p := 1
+ p = WriteString(msg.Mutation, buf, p)
+ p = WriteString(msg.State, buf, p)
+ return buf[:p]
+}
+
+func (msg *Zustand) EncodeWithIndex() []byte {
+ encoded := msg.Encode()
+ if IsIOSType(msg.TypeID()) {
+ return encoded
+ }
+ data := make([]byte, len(encoded)+8)
+ copy(data[8:], encoded[:])
+ binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index)
+ return data
+}
+
+func (msg *Zustand) Decode() Message {
+ return msg
+}
+
+func (msg *Zustand) TypeID() int {
+ return 79
+}
+
type IOSBatchMeta struct {
message
Timestamp uint64
diff --git a/backend/pkg/messages/raw.go b/backend/pkg/messages/raw.go
index daa59accd..b9dba5de2 100644
--- a/backend/pkg/messages/raw.go
+++ b/backend/pkg/messages/raw.go
@@ -54,6 +54,7 @@ func (m *RawMessage) Decode() Message {
msg, err := ReadMessage(m.tp, bytes.NewReader(m.data[1:]))
if err != nil {
log.Printf("decode err: %s", err)
+ return nil
}
msg.Meta().SetMeta(m.meta)
return msg
diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go
index 2b12601d9..1b0f579af 100644
--- a/backend/pkg/messages/read-message.go
+++ b/backend/pkg/messages/read-message.go
@@ -1306,6 +1306,18 @@ func DecodeAdoptedSSRemoveOwner(reader io.Reader) (Message, error) {
return msg, err
}
+func DecodeZustand(reader io.Reader) (Message, error) {
+ var err error = nil
+ msg := &Zustand{}
+ if msg.Mutation, err = ReadString(reader); err != nil {
+ return nil, err
+ }
+ if msg.State, err = ReadString(reader); err != nil {
+ return nil, err
+ }
+ return msg, err
+}
+
func DecodeIOSBatchMeta(reader io.Reader) (Message, error) {
var err error = nil
msg := &IOSBatchMeta{}
@@ -1939,6 +1951,9 @@ func ReadMessage(t uint64, reader io.Reader) (Message, error) {
case 77:
return DecodeAdoptedSSRemoveOwner(reader)
+ case 79:
+ return DecodeZustand(reader)
+
case 107:
return DecodeIOSBatchMeta(reader)
diff --git a/backend/pkg/queue/types/types.go b/backend/pkg/queue/types/types.go
index aaf6f7afa..0f196c608 100644
--- a/backend/pkg/queue/types/types.go
+++ b/backend/pkg/queue/types/types.go
@@ -9,6 +9,7 @@ type Consumer interface {
Commit() error
CommitBack(gap int64) error
Close()
+ HasFirstPartition() bool
}
type Producer interface {
diff --git a/backend/pkg/redisstream/consumer.go b/backend/pkg/redisstream/consumer.go
index d32972981..bae70120d 100644
--- a/backend/pkg/redisstream/consumer.go
+++ b/backend/pkg/redisstream/consumer.go
@@ -161,3 +161,7 @@ func (c *Consumer) CommitBack(gap int64) error {
func (c *Consumer) Close() {
// noop
}
+
+func (c *Consumer) HasFirstPartition() bool {
+ return false
+}
diff --git a/ee/api/.dockerignore b/ee/api/.dockerignore
index b456d908b..7d122d6cb 100644
--- a/ee/api/.dockerignore
+++ b/ee/api/.dockerignore
@@ -4,6 +4,7 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
+Dockerfile*
app_crons.py
app_alerts.py
diff --git a/ee/api/.gitignore b/ee/api/.gitignore
index 1afe8462f..d25a4474d 100644
--- a/ee/api/.gitignore
+++ b/ee/api/.gitignore
@@ -180,16 +180,16 @@ Pipfile
.local/*
/chalicelib/core/alerts.py
-/chalicelib/core/alerts_processor.py
+#exp /chalicelib/core/alerts_processor.py
/chalicelib/core/announcements.py
/chalicelib/core/autocomplete.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/countries.py
-/chalicelib/core/errors.py
+#exp /chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
-/chalicelib/core/events.py
+#exp /chalicelib/core/events.py
/chalicelib/core/events_ios.py
-/chalicelib/core/funnels.py
+#exp /chalicelib/core/funnels.py
/chalicelib/core/integration_base.py
/chalicelib/core/integration_base_issue.py
/chalicelib/core/integration_github.py
@@ -214,7 +214,7 @@ Pipfile
/chalicelib/core/sessions_assignments.py
/chalicelib/core/sessions_metas.py
/chalicelib/core/sessions_mobs.py
-/chalicelib/core/significance.py
+#exp /chalicelib/core/significance.py
/chalicelib/core/slack.py
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps.py
@@ -255,11 +255,11 @@ Pipfile
/chalicelib/core/heatmaps.py
/routers/subs/insights.py
/schemas.py
-/chalicelib/core/custom_metrics.py
+#exp /chalicelib/core/custom_metrics.py
/chalicelib/core/performance_event.py
/chalicelib/core/saved_search.py
/app_alerts.py
/build_alerts.sh
/routers/subs/metrics.py
/routers/subs/v1_api.py
-/chalicelib/core/dashboards.py
+#exp /chalicelib/core/dashboards.py
diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile
index dad5fa20d..1b23fc6d4 100644
--- a/ee/api/Dockerfile
+++ b/ee/api/Dockerfile
@@ -6,6 +6,7 @@ ARG envarg
ENV SOURCE_MAP_VERSION=0.7.4 \
APP_NAME=chalice \
LISTEN_PORT=8000 \
+ MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
ENTERPRISE_BUILD=${envarg}
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
@@ -18,7 +19,8 @@ RUN cd /work_tmp && npm install
WORKDIR /work
COPY . .
-RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
+RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
+ && mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
RUN adduser -u 1001 openreplay -D
USER 1001
diff --git a/ee/api/Dockerfile.alerts.dockerignore b/ee/api/Dockerfile.alerts.dockerignore
index 8e1cbef03..1b8f57638 100644
--- a/ee/api/Dockerfile.alerts.dockerignore
+++ b/ee/api/Dockerfile.alerts.dockerignore
@@ -4,6 +4,7 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
+Dockerfile*
app.py
app_crons.py
diff --git a/ee/api/Dockerfile.crons.dockerignore b/ee/api/Dockerfile.crons.dockerignore
index 38fed8fd0..fa6ca7a65 100644
--- a/ee/api/Dockerfile.crons.dockerignore
+++ b/ee/api/Dockerfile.crons.dockerignore
@@ -4,6 +4,7 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
+Dockerfile*
app.py
app_alerts.py
diff --git a/ee/api/app.py b/ee/api/app.py
index 9f2f9a306..ad9310f95 100644
--- a/ee/api/app.py
+++ b/ee/api/app.py
@@ -35,7 +35,9 @@ async def or_middleware(request: Request, call_next):
now = int(time.time() * 1000)
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
- print(f"Execution time: {int(time.time() * 1000) - now} ms")
+ now = int(time.time() * 1000) - now
+ if now > 500:
+ print(f"Execution time: {now} ms")
except Exception as e:
pg_client.close()
raise e
diff --git a/ee/api/app_crons.py b/ee/api/app_crons.py
index 97f4b5406..55bc91619 100644
--- a/ee/api/app_crons.py
+++ b/ee/api/app_crons.py
@@ -1,28 +1,33 @@
print("============= CRONS =============")
-import sys
import asyncio
+import sys
from routers.crons import core_dynamic_crons
+ACTIONS = {
+ "TELEMETRY": core_dynamic_crons.telemetry_cron,
+ "JOB": core_dynamic_crons.run_scheduled_jobs,
+ "REPORT": core_dynamic_crons.weekly_report
+}
+
def default_action(action):
async def _func():
print(f"{action} not found in crons-definitions")
+ print("possible actions:")
+ print(ACTIONS.keys())
return _func
async def process(action):
- await {
- "TELEMETRY": core_dynamic_crons.telemetry_cron,
- "JOB": core_dynamic_crons.run_scheduled_jobs,
- "REPORT": core_dynamic_crons.weekly_report2
- }.get(action.upper(), default_action(action))()
+ await ACTIONS.get(action.upper(), default_action(action))()
if __name__ == '__main__':
if len(sys.argv) < 2 or len(sys.argv[1]) < 1:
- print("please provide actions as argument")
+ print("please provide actions as argument\npossible actions:")
+ print(ACTIONS.keys())
else:
print(f"action: {sys.argv[1]}")
asyncio.run(process(sys.argv[1]))
diff --git a/ee/api/chalicelib/core/__init__.py b/ee/api/chalicelib/core/__init__.py
index 4f6268b65..550327129 100644
--- a/ee/api/chalicelib/core/__init__.py
+++ b/ee/api/chalicelib/core/__init__.py
@@ -42,3 +42,6 @@ if config("EXP_FUNNELS", cast=bool, default=False):
from . import significance_exp as significance
else:
from . import significance as significance
+
+if config("EXP_RESOURCES", cast=bool, default=False):
+ print(">>> Using experimental resources for session-replay")
diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py
new file mode 100644
index 000000000..0a8df1c02
--- /dev/null
+++ b/ee/api/chalicelib/core/alerts_processor.py
@@ -0,0 +1,241 @@
+import decimal
+import logging
+
+from decouple import config
+
+import schemas
+from chalicelib.core import alerts_listener
+from chalicelib.core import alerts
+from chalicelib.utils import pg_client
+from chalicelib.utils.TimeUTC import TimeUTC
+
+if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
+ from chalicelib.core import sessions_legacy as sessions
+else:
+ from chalicelib.core import sessions
+
+logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
+
+LeftToDb = {
+ schemas.AlertColumn.performance__dom_content_loaded__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
+ schemas.AlertColumn.performance__first_meaningful_paint__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
+ schemas.AlertColumn.performance__page_load_time__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
+ schemas.AlertColumn.performance__dom_build_time__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(dom_building_time,0))"},
+ schemas.AlertColumn.performance__speed_index__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
+ schemas.AlertColumn.performance__page_response_time__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(response_time,0))"},
+ schemas.AlertColumn.performance__ttfb__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(first_paint_time,0))"},
+ schemas.AlertColumn.performance__time_to_render__average: {
+ "table": "events.pages INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(visually_complete,0))"},
+ schemas.AlertColumn.performance__image_load_time__average: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
+ schemas.AlertColumn.performance__request_load_time__average: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
+ schemas.AlertColumn.resources__load_time__average: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "AVG(NULLIF(resources.duration,0))"},
+ schemas.AlertColumn.resources__missing__count: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
+ schemas.AlertColumn.errors__4xx_5xx__count: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
+ "condition": "status/100!=2"},
+ schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "COUNT(session_id)", "condition": "status/100=4"},
+ schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "COUNT(session_id)", "condition": "status/100=5"},
+ schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
+ "table": "events.resources INNER JOIN public.sessions USING(session_id)",
+ "formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
+ schemas.AlertColumn.performance__crashes__count: {
+ "table": "public.sessions",
+ "formula": "COUNT(DISTINCT session_id)",
+ "condition": "errors_count > 0 AND duration>0"},
+ schemas.AlertColumn.errors__javascript__count: {
+ "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
+ "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
+ schemas.AlertColumn.errors__backend__count: {
+ "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
+ "formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
+}
+
+# This is the frequency of execution for each threshold
+TimeInterval = {
+ 15: 3,
+ 30: 5,
+ 60: 10,
+ 120: 20,
+ 240: 30,
+ 1440: 60,
+}
+
+
+def can_check(a) -> bool:
+ now = TimeUTC.now()
+
+ repetitionBase = a["options"]["currentPeriod"] \
+ if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
+ and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
+ else a["options"]["previousPeriod"]
+
+ if TimeInterval.get(repetitionBase) is None:
+ logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
+ return False
+
+ return (a["options"]["renotifyInterval"] <= 0 or
+ a["options"].get("lastNotification") is None or
+ a["options"]["lastNotification"] <= 0 or
+ ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
+ and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
+
+
+def Build(a):
+ now = TimeUTC.now()
+ params = {"project_id": a["projectId"], "now": now}
+ full_args = {}
+ j_s = True
+ if a["seriesId"] is not None:
+ a["filter"]["sort"] = "session_id"
+ a["filter"]["order"] = schemas.SortOrderType.desc
+ a["filter"]["startDate"] = -1
+ a["filter"]["endDate"] = TimeUTC.now()
+ full_args, query_part = sessions.search_query_parts(
+ data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
+ issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
+ subQ = f"""SELECT COUNT(session_id) AS value
+ {query_part}"""
+ else:
+ colDef = LeftToDb[a["query"]["left"]]
+ subQ = f"""SELECT {colDef["formula"]} AS value
+ FROM {colDef["table"]}
+ WHERE project_id = %(project_id)s
+ {"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
+ j_s = colDef.get("joinSessions", True)
+
+ q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
+
+ if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
+ if a["seriesId"] is not None:
+ q += f""" FROM ({subQ}) AS stat"""
+ else:
+ q += f""" FROM ({subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
+ {"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
+ {"AND sessions.start_ts <= %(now)s" if j_s else ""}) AS stat"""
+ params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
+ else:
+ if a["change"] == schemas.AlertDetectionType.change:
+ if a["seriesId"] is not None:
+ sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
+ sub1 = f"SELECT (({subQ})-({sub2})) AS value"
+ q += f" FROM ( {sub1} ) AS stat"
+ params = {**params, **full_args,
+ "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
+ "timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
+ else:
+ sub1 = f"""{subQ} AND timestamp>=%(startDate)s
+ AND datetime<=toDateTime(%(now)s/1000)
+ {"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
+ {"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
+ params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
+ sub2 = f"""{subQ} AND timestamp<%(startDate)s
+ AND timestamp>=%(timestamp_sub2)s
+ {"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
+ params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
+ sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
+ q += f" FROM ( {sub1} ) AS stat"
+
+ else:
+ if a["seriesId"] is not None:
+ sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
+ sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
+ q += f" FROM ({sub1}) AS stat"
+ params = {**params, **full_args,
+ "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
+ "timestamp_sub2": TimeUTC.now() \
+ - (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
+ * 60 * 1000}
+ else:
+ sub1 = f"""{subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
+ {"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
+ {"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
+ params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
+ sub2 = f"""{subQ} AND timestamp<%(startDate)s
+ AND timestamp>=%(timestamp_sub2)s
+ {"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
+ params["timestamp_sub2"] = TimeUTC.now() \
+ - (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
+ sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
+ q += f" FROM ({sub1}) AS stat"
+
+ return q, params
+
+
+def process():
+ notifications = []
+ all_alerts = alerts_listener.get_all_alerts()
+ with pg_client.PostgresClient() as cur:
+ for alert in all_alerts:
+ if can_check(alert):
+ logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
+ query, params = Build(alert)
+ query = cur.mogrify(query, params)
+ logging.debug(alert)
+ logging.debug(query)
+ try:
+ cur.execute(query)
+ result = cur.fetchone()
+ if result["valid"]:
+ logging.info("Valid alert, notifying users")
+ notifications.append(generate_notification(alert, result))
+ except Exception as e:
+ logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
+ logging.error(str(e))
+ logging.error(query)
+ if len(notifications) > 0:
+ cur.execute(
+ cur.mogrify(f"""UPDATE public.Alerts
+ SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
+ WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
+ if len(notifications) > 0:
+ alerts.process_notifications(notifications)
+
+
+def generate_notification(alert, result):
+ return {
+ "alertId": alert["alertId"],
+ "tenantId": alert["tenantId"],
+ "title": alert["name"],
+ "description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
+ "buttonText": "Check metrics for more details",
+ "buttonUrl": f"/{alert['projectId']}/metrics",
+ "imageUrl": None,
+ "options": {"source": "ALERT", "sourceId": alert["alertId"],
+ "sourceMeta": alert["detectionMethod"],
+ "message": alert["options"]["message"], "projectId": alert["projectId"],
+ "data": {"title": alert["name"],
+ "limitValue": alert["query"]["right"],
+ "actualValue": float(result["value"]) \
+ if isinstance(result["value"], decimal.Decimal) \
+ else result["value"],
+ "operator": alert["query"]["operator"],
+ "trigger": alert["query"]["left"],
+ "alertId": alert["alertId"],
+ "detectionMethod": alert["detectionMethod"],
+ "currentPeriod": alert["options"]["currentPeriod"],
+ "previousPeriod": alert["options"]["previousPeriod"],
+ "createdAt": TimeUTC.now()}},
+ }
diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py
new file mode 100644
index 000000000..b925429f6
--- /dev/null
+++ b/ee/api/chalicelib/core/custom_metrics.py
@@ -0,0 +1,551 @@
+import json
+from typing import Union
+
+import schemas
+from chalicelib.core import funnels, issues
+from chalicelib.utils import helper, pg_client
+from chalicelib.utils.TimeUTC import TimeUTC
+
+from decouple import config
+
+if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
+ print(">>> Using experimental error search")
+ from . import errors_exp as errors
+else:
+ from . import errors as errors
+
+if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
+ from chalicelib.core import sessions_legacy as sessions
+else:
+ from chalicelib.core import sessions
+
+PIE_CHART_GROUP = 5
+
+
+def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
+ results = []
+ for i, s in enumerate(data.series):
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
+ view_type=data.view_type, metric_type=data.metric_type,
+ metric_of=data.metric_of, metric_value=data.metric_value))
+ if data.view_type == schemas.MetricTimeseriesViewType.progress:
+ r = {"count": results[-1]}
+ diff = s.filter.endDate - s.filter.startDate
+ s.filter.endDate = s.filter.startDate
+ s.filter.startDate = s.filter.endDate - diff
+ r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
+ view_type=data.view_type, metric_type=data.metric_type,
+ metric_of=data.metric_of, metric_value=data.metric_value)
+ r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
+ # r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \
+ # if r["previousCount"] > 0 else 0
+ r["seriesName"] = s.name if s.name else i + 1
+ r["seriesId"] = s.series_id if s.series_id else None
+ results[-1] = r
+ elif data.view_type == schemas.MetricTableViewType.pie_chart:
+ if len(results[i].get("values", [])) > PIE_CHART_GROUP:
+ results[i]["values"] = results[i]["values"][:PIE_CHART_GROUP] \
+ + [{
+ "name": "Others", "group": True,
+ "sessionCount": sum(r["sessionCount"] for r in results[i]["values"][PIE_CHART_GROUP:])
+ }]
+
+ return results
+
+
+def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
+ return data.metric_type == schemas.MetricType.funnel
+
+
+def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
+ if len(data.series) == 0:
+ return {
+ "stages": [],
+ "totalDropDueToIssues": 0
+ }
+ data.series[0].filter.startDate = data.startTimestamp
+ data.series[0].filter.endDate = data.endTimestamp
+ return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
+
+
+def __is_errors_list(data):
+ return data.metric_type == schemas.MetricType.table \
+ and data.metric_of == schemas.TableMetricOfType.errors
+
+
+def __get_errors_list(project_id, user_id, data):
+ if len(data.series) == 0:
+ return {
+ "total": 0,
+ "errors": []
+ }
+ data.series[0].filter.startDate = data.startTimestamp
+ data.series[0].filter.endDate = data.endTimestamp
+ data.series[0].filter.page = data.page
+ data.series[0].filter.limit = data.limit
+ return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
+
+
+def __is_sessions_list(data):
+ return data.metric_type == schemas.MetricType.table \
+ and data.metric_of == schemas.TableMetricOfType.sessions
+
+
+def __get_sessions_list(project_id, user_id, data):
+ if len(data.series) == 0:
+ print("empty series")
+ return {
+ "total": 0,
+ "sessions": []
+ }
+ data.series[0].filter.startDate = data.startTimestamp
+ data.series[0].filter.endDate = data.endTimestamp
+ data.series[0].filter.page = data.page
+ data.series[0].filter.limit = data.limit
+ return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
+
+
+def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
+ if __is_funnel_chart(data):
+ return __get_funnel_chart(project_id=project_id, data=data)
+ elif __is_errors_list(data):
+ return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
+ elif __is_sessions_list(data):
+ return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
+
+ series_charts = __try_live(project_id=project_id, data=data)
+ if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
+ return series_charts
+ results = [{}] * len(series_charts[0])
+ for i in range(len(results)):
+ for j, series_chart in enumerate(series_charts):
+ results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
+ data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
+ return results
+
+
+def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
+ schemas.CustomMetricSessionsPayloadSchema]) \
+ -> Union[schemas.CreateCustomMetricsSchema, None]:
+ if data.series is not None and len(data.series) > 0:
+ metric["series"] = data.series
+ metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
+ if len(data.filters) > 0 or len(data.events) > 0:
+ for s in metric.series:
+ if len(data.filters) > 0:
+ s.filter.filters += data.filters
+ if len(data.events) > 0:
+ s.filter.events += data.events
+ return metric
+
+
+def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
+ if metric is None:
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
+
+ return merged_live(project_id=project_id, data=metric, user_id=user_id)
+ # if __is_funnel_chart(metric):
+ # return __get_funnel_chart(project_id=project_id, data=metric)
+ # elif __is_errors_list(metric):
+ # return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
+ #
+ # series_charts = __try_live(project_id=project_id, data=metric)
+ # if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
+ # return series_charts
+ # results = [{}] * len(series_charts[0])
+ # for i in range(len(results)):
+ # for j, series_chart in enumerate(series_charts):
+ # results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
+ # metric.series[j].name: series_chart[i]["count"]}
+ # return results
+
+
+def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
+ if metric is None:
+ return None
+ results = []
+ for s in metric.series:
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ s.filter.limit = data.limit
+ s.filter.page = data.page
+ results.append({"seriesId": s.series_id, "seriesName": s.name,
+ **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
+
+ return results
+
+
+def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
+ if metric is None:
+ return None
+ for s in metric.series:
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ s.filter.limit = data.limit
+ s.filter.page = data.page
+ return {"seriesId": s.series_id, "seriesName": s.name,
+ **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
+
+
+def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
+ if metric is None:
+ return None
+ for s in metric.series:
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ s.filter.limit = data.limit
+ s.filter.page = data.page
+ return {"seriesId": s.series_id, "seriesName": s.name,
+ **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
+
+
+def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
+ results = []
+ if data.series is None:
+ return results
+ for s in data.series:
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ s.filter.limit = data.limit
+ s.filter.page = data.page
+ results.append({"seriesId": None, "seriesName": s.name,
+ **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
+
+ return results
+
+
+def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
+ with pg_client.PostgresClient() as cur:
+ _data = {}
+ for i, s in enumerate(data.series):
+ for k in s.dict().keys():
+ _data[f"{k}_{i}"] = s.__getattribute__(k)
+ _data[f"index_{i}"] = i
+ _data[f"filter_{i}"] = s.filter.json()
+ series_len = len(data.series)
+ data.series = None
+ params = {"user_id": user_id, "project_id": project_id,
+ "default_config": json.dumps(data.config.dict()),
+ **data.dict(), **_data}
+ query = cur.mogrify(f"""\
+ WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
+ view_type, metric_type, metric_of, metric_value,
+ metric_format, default_config)
+ VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
+ %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
+ %(metric_format)s, %(default_config)s)
+ RETURNING *)
+ INSERT
+ INTO metric_series(metric_id, index, name, filter)
+ VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
+ for i in range(series_len)])}
+ RETURNING metric_id;""", params)
+
+ cur.execute(
+ query
+ )
+ r = cur.fetchone()
+ if dashboard:
+ return r["metric_id"]
+ return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
+
+
+def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ series_ids = [r["seriesId"] for r in metric["series"]]
+ n_series = []
+ d_series_ids = []
+ u_series = []
+ u_series_ids = []
+ params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
+ "user_id": user_id, "project_id": project_id, "view_type": data.view_type,
+ "metric_type": data.metric_type, "metric_of": data.metric_of,
+ "metric_value": data.metric_value, "metric_format": data.metric_format}
+ for i, s in enumerate(data.series):
+ prefix = "u_"
+ if s.index is None:
+ s.index = i
+ if s.series_id is None or s.series_id not in series_ids:
+ n_series.append({"i": i, "s": s})
+ prefix = "n_"
+ else:
+ u_series.append({"i": i, "s": s})
+ u_series_ids.append(s.series_id)
+ ns = s.dict()
+ for k in ns.keys():
+ if k == "filter":
+ ns[k] = json.dumps(ns[k])
+ params[f"{prefix}{k}_{i}"] = ns[k]
+ for i in series_ids:
+ if i not in u_series_ids:
+ d_series_ids.append(i)
+ params["d_series_ids"] = tuple(d_series_ids)
+
+ with pg_client.PostgresClient() as cur:
+ sub_queries = []
+ if len(n_series) > 0:
+ sub_queries.append(f"""\
+ n AS (INSERT INTO metric_series (metric_id, index, name, filter)
+ VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
+ for s in n_series])}
+ RETURNING 1)""")
+ if len(u_series) > 0:
+ sub_queries.append(f"""\
+ u AS (UPDATE metric_series
+ SET name=series.name,
+ filter=series.filter,
+ index=series.index
+ FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
+ for s in u_series])}) AS series(series_id, index, name, filter)
+ WHERE metric_series.metric_id =%(metric_id)s AND metric_series.series_id=series.series_id
+ RETURNING 1)""")
+ if len(d_series_ids) > 0:
+ sub_queries.append("""\
+ d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
+ RETURNING 1)""")
+ query = cur.mogrify(f"""\
+ {"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
+ UPDATE metrics
+ SET name = %(name)s, is_public= %(is_public)s,
+ view_type= %(view_type)s, metric_type= %(metric_type)s,
+ metric_of= %(metric_of)s, metric_value= %(metric_value)s,
+ metric_format= %(metric_format)s,
+ edited_at = timezone('utc'::text, now())
+ WHERE metric_id = %(metric_id)s
+ AND project_id = %(project_id)s
+ AND (user_id = %(user_id)s OR is_public)
+ RETURNING metric_id;""", params)
+ cur.execute(query)
+ return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
+
+
+def get_all(project_id, user_id, include_series=False):
+ with pg_client.PostgresClient() as cur:
+ sub_join = ""
+ if include_series:
+ sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)"""
+ cur.execute(
+ cur.mogrify(
+ f"""SELECT *
+ FROM metrics
+ {sub_join}
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT DISTINCT dashboard_id, name, is_public
+ FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
+ WHERE deleted_at ISNULL
+ AND dashboard_widgets.metric_id = metrics.metric_id
+ AND project_id = %(project_id)s
+ AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)
+ LEFT JOIN LATERAL (SELECT email AS owner_email
+ FROM users
+ WHERE deleted_at ISNULL
+ AND users.user_id = metrics.user_id
+ ) AS owner ON (TRUE)
+ WHERE metrics.project_id = %(project_id)s
+ AND metrics.deleted_at ISNULL
+ AND (user_id = %(user_id)s OR metrics.is_public)
+ ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
+ {"project_id": project_id, "user_id": user_id}
+ )
+ )
+ rows = cur.fetchall()
+ if include_series:
+ for r in rows:
+ # r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ for s in r["series"]:
+ s["filter"] = helper.old_search_payload_to_flat(s["filter"])
+ else:
+ for r in rows:
+ r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
+ rows = helper.list_to_camel_case(rows)
+ return rows
+
+
+def delete(project_id, metric_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify("""\
+ UPDATE public.metrics
+ SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
+ WHERE project_id = %(project_id)s
+ AND metric_id = %(metric_id)s
+ AND (user_id = %(user_id)s OR is_public);""",
+ {"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
+ )
+
+ return {"state": "success"}
+
+
+def get(metric_id, project_id, user_id, flatten=True):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify(
+ """SELECT *
+ FROM metrics
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT dashboard_id, name, is_public
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(project_id)s
+ AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)
+ LEFT JOIN LATERAL (SELECT email AS owner_email
+ FROM users
+ WHERE deleted_at ISNULL
+ AND users.user_id = metrics.user_id
+ ) AS owner ON (TRUE)
+ WHERE metrics.project_id = %(project_id)s
+ AND metrics.deleted_at ISNULL
+ AND (metrics.user_id = %(user_id)s OR metrics.is_public)
+ AND metrics.metric_id = %(metric_id)s
+ ORDER BY created_at;""",
+ {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
+ )
+ )
+ row = cur.fetchone()
+ if row is None:
+ return None
+ row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
+ row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"])
+ if flatten:
+ for s in row["series"]:
+ s["filter"] = helper.old_search_payload_to_flat(s["filter"])
+ return helper.dict_to_camel_case(row)
+
+
+def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
+ with pg_client.PostgresClient() as cur:
+ sub_query = ""
+ if include_dashboard:
+ sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT dashboard_id, name, is_public
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(project_id)s
+ AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)"""
+ cur.execute(
+ cur.mogrify(
+ f"""SELECT *
+ FROM metrics
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ {sub_query}
+ WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL)
+ AND metrics.deleted_at ISNULL
+ AND (metrics.user_id = %(user_id)s OR metrics.is_public)
+ AND metrics.metric_id = %(metric_id)s
+ ORDER BY created_at;""",
+ {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
+ )
+ )
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def get_series_for_alert(project_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify(
+ """SELECT series_id AS value,
+ metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
+ 'count' AS unit,
+ FALSE AS predefined,
+ metric_id,
+ series_id
+ FROM metric_series
+ INNER JOIN metrics USING (metric_id)
+ WHERE metrics.deleted_at ISNULL
+ AND metrics.project_id = %(project_id)s
+ AND metrics.metric_type = 'timeseries'
+ AND (user_id = %(user_id)s OR is_public)
+ ORDER BY name;""",
+ {"project_id": project_id, "user_id": user_id}
+ )
+ )
+ rows = cur.fetchall()
+ return helper.list_to_camel_case(rows)
+
+
+def change_state(project_id, metric_id, user_id, status):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify("""\
+ UPDATE public.metrics
+ SET active = %(status)s
+ WHERE metric_id = %(metric_id)s
+ AND (user_id = %(user_id)s OR is_public);""",
+ {"metric_id": metric_id, "status": status, "user_id": user_id})
+ )
+ return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
+
+
+def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
+ data: schemas.CustomMetricSessionsPayloadSchema
+ # , range_value=None, start_date=None, end_date=None
+ ):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
+ if metric is None:
+ return None
+ for s in metric.series:
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
+ s.filter.limit = data.limit
+ s.filter.page = data.page
+ issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
+ issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
+ issue = None
+ for i in issues_list:
+ if i.get("issueId", "") == issue_id:
+ issue = i
+ break
+ if issue is None:
+ issue = issues.get(project_id=project_id, issue_id=issue_id)
+ if issue is not None:
+ issue = {**issue,
+ "affectedSessions": 0,
+ "affectedUsers": 0,
+ "conversionImpact": 0,
+ "lostConversions": 0,
+ "unaffectedSessions": 0}
+ return {"seriesId": s.series_id, "seriesName": s.name,
+ "sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
+ issue=issue, data=s.filter)
+ if issue is not None else {"total": 0, "sessions": []},
+ "issue": issue}
diff --git a/ee/api/chalicelib/core/dashboards.py b/ee/api/chalicelib/core/dashboards.py
new file mode 100644
index 000000000..d96356df1
--- /dev/null
+++ b/ee/api/chalicelib/core/dashboards.py
@@ -0,0 +1,333 @@
+import json
+
+import schemas
+from chalicelib.core import custom_metrics
+from chalicelib.utils import helper
+from chalicelib.utils import pg_client
+from chalicelib.utils.TimeUTC import TimeUTC
+
+from decouple import config
+
+if config("EXP_METRICS", cast=bool, default=False):
+ from . import metrics_exp as metrics
+else:
+ from . import metrics as metrics
+
+# category name should be lower cased
+CATEGORY_DESCRIPTION = {
+ 'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
+ 'custom': 'Previously created custom metrics by me and my team.',
+ 'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
+ 'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
+ 'resources': 'Find out which resources are missing and those that may be slowing your web app.'
+}
+
+
+def get_templates(project_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
+ FROM (SELECT * , default_config AS config
+ FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE deleted_at IS NULL
+ AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
+ ) AS metrics
+ GROUP BY category
+ ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
+ {"project_id": project_id, "userId": user_id})
+ cur.execute(pg_query)
+ rows = cur.fetchall()
+ for r in rows:
+ r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
+ for w in r["widgets"]:
+ w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
+ w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
+ for s in w["series"]:
+ s["filter"] = helper.old_search_payload_to_flat(s["filter"])
+
+ return helper.list_to_camel_case(rows)
+
+
+def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
+ VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
+ RETURNING *"""
+ params = {"userId": user_id, "projectId": project_id, **data.dict()}
+ if data.metrics is not None and len(data.metrics) > 0:
+ pg_query = f"""WITH dash AS ({pg_query})
+ INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
+ RETURNING (SELECT dashboard_id FROM dash)"""
+ for i, m in enumerate(data.metrics):
+ params[f"metric_id_{i}"] = m
+ # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
+ # .get("properties", {}).get("config", {}).get("default", {})
+ # params[f"config_{i}"]["position"] = i
+ # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
+ params[f"config_{i}"] = json.dumps({"position": i})
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ if row is None:
+ return {"errors": ["something went wrong while creating the dashboard"]}
+ return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
+
+
+def get_dashboards(project_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT *
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(projectId)s
+ AND (user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ rows = cur.fetchall()
+ return helper.list_to_camel_case(rows)
+
+
+def get_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
+ FROM dashboards
+ LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
+ FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
+ FROM metrics
+ INNER JOIN dashboard_widgets USING (metric_id)
+ LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
+ AND metrics.deleted_at ISNULL
+ AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
+ ) AS all_metric_widgets ON (TRUE)
+ WHERE dashboards.deleted_at ISNULL
+ AND dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ if row is not None:
+ row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
+ for w in row["widgets"]:
+ w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
+ w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
+ for s in w["series"]:
+ s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
+ return helper.dict_to_camel_case(row)
+
+
+def delete_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboards
+ SET deleted_at = timezone('utc'::text, now())
+ WHERE dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ return {"data": {"success": True}}
+
+
+def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """SELECT COALESCE(COUNT(*),0) AS count
+ FROM dashboard_widgets
+ WHERE dashboard_id = %(dashboard_id)s;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ offset = row["count"]
+ pg_query = f"""UPDATE dashboards
+ SET name = %(name)s,
+ description= %(description)s
+ {", is_public = %(is_public)s" if data.is_public is not None else ""}
+ {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
+ WHERE dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public)"""
+ if data.metrics is not None and len(data.metrics) > 0:
+ pg_query = f"""WITH dash AS ({pg_query})
+ INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
+ for i, m in enumerate(data.metrics):
+ params[f"metric_id_{i}"] = m
+ # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
+ # .get("properties", {}).get("config", {}).get("default", {})
+ # params[f"config_{i}"]["position"] = i
+ # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
+ params[f"config_{i}"] = json.dumps({"position": i + offset})
+
+ cur.execute(cur.mogrify(pg_query, params))
+
+ return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
+
+
+def get_widget(project_id, user_id, dashboard_id, widget_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """SELECT metrics.*, metric_series.series
+ FROM dashboard_widgets
+ INNER JOIN dashboards USING (dashboard_id)
+ INNER JOIN metrics USING (metric_id)
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE dashboard_id = %(dashboard_id)s
+ AND widget_id = %(widget_id)s
+ AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
+ AND dashboards.deleted_at IS NULL
+ AND metrics.deleted_at ISNULL
+ AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
+ AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
+ %(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
+ WHERE EXISTS(SELECT 1 FROM dashboards
+ WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public))
+ RETURNING *;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
+ params["config"] = json.dumps(data.config)
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboard_widgets
+ SET config= %(config)s
+ WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
+ RETURNING *;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
+ "widget_id": widget_id, **data.dict()}
+ params["config"] = json.dumps(data.config)
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def remove_widget(project_id, user_id, dashboard_id, widget_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """DELETE FROM dashboard_widgets
+ WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ return {"data": {"success": True}}
+
+
+def pin_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboards
+ SET is_pinned = FALSE
+ WHERE project_id=%(project_id)s;
+ UPDATE dashboards
+ SET is_pinned = True
+ WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
+ RETURNING *;"""
+ params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
+ metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
+ return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
+ data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
+
+
+PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
+ schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
+ schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
+ schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
+ schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
+ schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
+ schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
+ schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
+ schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
+ schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
+ schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
+ schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
+ schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
+ schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
+ schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
+ schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
+ schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
+ schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
+ schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
+ schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
+ schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
+ schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
+ schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
+ schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
+ schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
+ schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
+ schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
+ schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
+ schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
+ schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
+ schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
+ schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
+ schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
+ schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
+ schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
+ schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
+ schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
+ schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
+ schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
+ schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
+ schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
+ schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
+ schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
+ schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
+ schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
+ }
+
+
+def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
+ return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
+
+
+def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
+ raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
+ include_dashboard=False)
+ if raw_metric is None:
+ return None
+ metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
+ if metric.is_template and metric.predefined_key is None:
+ return None
+ if metric.is_template:
+ return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
+ else:
+ return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
+ metric=raw_metric)
+
+
+def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
+ raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
+ if raw_metric is None:
+ return None
+ metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
+ if metric.is_template:
+ return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
+ else:
+ return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
+ data=data, metric=raw_metric)
diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py
new file mode 100644
index 000000000..d50c6b54a
--- /dev/null
+++ b/ee/api/chalicelib/core/errors.py
@@ -0,0 +1,786 @@
+import json
+
+import schemas
+from chalicelib.core import sourcemaps
+from chalicelib.utils import pg_client, helper
+from chalicelib.utils.TimeUTC import TimeUTC
+from chalicelib.utils.metrics_helper import __get_step_size
+
+from decouple import config
+
+if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
+ from chalicelib.core import sessions_legacy as sessions
+else:
+ from chalicelib.core import sessions
+
+
+def get(error_id, family=False):
+ if family:
+ return get_batch([error_id])
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
+ {"error_id": error_id})
+ cur.execute(query=query)
+ result = cur.fetchone()
+ if result is not None:
+ result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
+ return helper.dict_to_camel_case(result)
+
+
+def get_batch(error_ids):
+ if len(error_ids) == 0:
+ return []
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ """
+ WITH RECURSIVE error_family AS (
+ SELECT *
+ FROM public.errors
+ WHERE error_id IN %(error_ids)s
+ UNION
+ SELECT child_errors.*
+ FROM public.errors AS child_errors
+ INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
+ )
+ SELECT *
+ FROM error_family;""",
+ {"error_ids": tuple(error_ids)})
+ cur.execute(query=query)
+ errors = cur.fetchall()
+ for e in errors:
+ e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
+ return helper.list_to_camel_case(errors)
+
+
+def __flatten_sort_key_count_version(data, merge_nested=False):
+ if data is None:
+ return []
+ return sorted(
+ [
+ {
+ "name": f'{o["name"]}@{v["version"]}',
+ "count": v["count"]
+ } for o in data for v in o["partition"]
+ ],
+ key=lambda o: o["count"], reverse=True) if merge_nested else \
+ [
+ {
+ "name": o["name"],
+ "count": o["count"],
+ } for o in data
+ ]
+
+
+def __process_tags(row):
+ return [
+ {"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
+ {"name": "browser.ver",
+ "partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
+ {"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
+ {"name": "OS.ver",
+ "partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
+ {"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
+ {"name": "device",
+ "partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
+ {"name": "country", "partitions": row.pop("country_partition")}
+ ]
+
+
+def get_details(project_id, error_id, user_id, **data):
+ pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
+ pg_sub_query24.append("error_id = %(error_id)s")
+ pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
+ pg_sub_query30.append("error_id = %(error_id)s")
+ pg_basic_query = __get_basic_constraints(time_constraint=False)
+ pg_basic_query.append("error_id = %(error_id)s")
+ with pg_client.PostgresClient() as cur:
+ data["startDate24"] = TimeUTC.now(-1)
+ data["endDate24"] = TimeUTC.now()
+ data["startDate30"] = TimeUTC.now(-30)
+ data["endDate30"] = TimeUTC.now()
+ density24 = int(data.get("density24", 24))
+ step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
+ density30 = int(data.get("density30", 30))
+ step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
+ params = {
+ "startDate24": data['startDate24'],
+ "endDate24": data['endDate24'],
+ "startDate30": data['startDate30'],
+ "endDate30": data['endDate30'],
+ "project_id": project_id,
+ "userId": user_id,
+ "step_size24": step_size24,
+ "step_size30": step_size30,
+ "error_id": error_id}
+
+ main_pg_query = f"""\
+ SELECT error_id,
+ name,
+ message,
+ users,
+ sessions,
+ last_occurrence,
+ first_occurrence,
+ last_session_id,
+ browsers_partition,
+ os_partition,
+ device_partition,
+ country_partition,
+ chart24,
+ chart30
+ FROM (SELECT error_id,
+ name,
+ message,
+ COUNT(DISTINCT user_uuid) AS users,
+ COUNT(DISTINCT session_id) AS sessions
+ FROM public.errors
+ INNER JOIN events.errors AS s_errors USING (error_id)
+ INNER JOIN public.sessions USING (session_id)
+ WHERE error_id = %(error_id)s
+ GROUP BY error_id, name, message) AS details
+ INNER JOIN (SELECT error_id,
+ MAX(timestamp) AS last_occurrence,
+ MIN(timestamp) AS first_occurrence
+ FROM events.errors
+ WHERE error_id = %(error_id)s
+ GROUP BY error_id) AS time_details USING (error_id)
+ INNER JOIN (SELECT error_id,
+ session_id AS last_session_id,
+ user_os,
+ user_os_version,
+ user_browser,
+ user_browser_version,
+ user_device,
+ user_device_type,
+ user_uuid
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE error_id = %(error_id)s
+ ORDER BY errors.timestamp DESC
+ LIMIT 1) AS last_session_details USING (error_id)
+ INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
+ FROM (SELECT *
+ FROM (SELECT user_browser AS name,
+ COUNT(session_id) AS count
+ FROM events.errors
+ INNER JOIN sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ GROUP BY user_browser
+ ORDER BY count DESC) AS count_per_browser_query
+ INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
+ FROM (SELECT user_browser_version AS version,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ AND sessions.user_browser = count_per_browser_query.name
+ GROUP BY user_browser_version
+ ORDER BY count DESC) AS version_details
+ ) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
+ FROM (SELECT *
+ FROM (SELECT user_os AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ GROUP BY user_os
+ ORDER BY count DESC) AS count_per_os_details
+ INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
+ FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ AND sessions.user_os = count_per_os_details.name
+ GROUP BY user_os_version
+ ORDER BY count DESC) AS count_per_version_details
+ GROUP BY count_per_os_details.name ) AS os_version_details
+ ON (TRUE)) AS os_details) AS os_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
+ FROM (SELECT *
+ FROM (SELECT user_device_type AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ GROUP BY user_device_type
+ ORDER BY count DESC) AS count_per_device_details
+ INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
+ FROM (SELECT CASE
+ WHEN user_device = '' OR user_device ISNULL
+ THEN 'unknown'
+ ELSE user_device END AS version,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ AND sessions.user_device_type = count_per_device_details.name
+ GROUP BY user_device
+ ORDER BY count DESC) AS count_per_device_v_details
+ GROUP BY count_per_device_details.name ) AS device_version_details
+ ON (TRUE)) AS device_details) AS device_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
+ FROM (SELECT user_country AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_basic_query)}
+ GROUP BY user_country
+ ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
+ FROM (SELECT generated_timestamp AS timestamp,
+ COUNT(session_id) AS count
+ FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
+ LEFT JOIN LATERAL (SELECT DISTINCT session_id
+ FROM events.errors
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query24)}
+ ) AS chart_details ON (TRUE)
+ GROUP BY generated_timestamp
+ ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
+ FROM (SELECT generated_timestamp AS timestamp,
+ COUNT(session_id) AS count
+ FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
+ LEFT JOIN LATERAL (SELECT DISTINCT session_id
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
+ ON (TRUE)
+ GROUP BY timestamp
+ ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
+ """
+
+ # print("--------------------")
+ # print(cur.mogrify(main_pg_query, params))
+ # print("--------------------")
+ cur.execute(cur.mogrify(main_pg_query, params))
+ row = cur.fetchone()
+ if row is None:
+ return {"errors": ["error not found"]}
+ row["tags"] = __process_tags(row)
+
+ query = cur.mogrify(
+ f"""SELECT error_id, status, session_id, start_ts,
+ parent_error_id,session_id, user_anonymous_id,
+ user_id, user_uuid, user_browser, user_browser_version,
+ user_os, user_os_version, user_device, payload,
+ FALSE AS favorite,
+ True AS viewed
+ FROM public.errors AS pe
+ INNER JOIN events.errors AS ee USING (error_id)
+ INNER JOIN public.sessions USING (session_id)
+ WHERE pe.project_id = %(project_id)s
+ AND error_id = %(error_id)s
+ ORDER BY start_ts DESC
+ LIMIT 1;""",
+ {"project_id": project_id, "error_id": error_id, "user_id": user_id})
+ cur.execute(query=query)
+ status = cur.fetchone()
+
+ if status is not None:
+ row["stack"] = format_first_stack_frame(status).pop("stack")
+ row["status"] = status.pop("status")
+ row["parent_error_id"] = status.pop("parent_error_id")
+ row["favorite"] = status.pop("favorite")
+ row["viewed"] = status.pop("viewed")
+ row["last_hydrated_session"] = status
+ else:
+ row["stack"] = []
+ row["last_hydrated_session"] = None
+ row["status"] = "untracked"
+ row["parent_error_id"] = None
+ row["favorite"] = False
+ row["viewed"] = False
+ return {"data": helper.dict_to_camel_case(row)}
+
+
+def get_details_chart(project_id, error_id, user_id, **data):
+ pg_sub_query = __get_basic_constraints()
+ pg_sub_query.append("error_id = %(error_id)s")
+ pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
+ pg_sub_query_chart.append("error_id = %(error_id)s")
+ with pg_client.PostgresClient() as cur:
+ if data.get("startDate") is None:
+ data["startDate"] = TimeUTC.now(-7)
+ else:
+ data["startDate"] = int(data["startDate"])
+ if data.get("endDate") is None:
+ data["endDate"] = TimeUTC.now()
+ else:
+ data["endDate"] = int(data["endDate"])
+ density = int(data.get("density", 7))
+ step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
+ params = {
+ "startDate": data['startDate'],
+ "endDate": data['endDate'],
+ "project_id": project_id,
+ "userId": user_id,
+ "step_size": step_size,
+ "error_id": error_id}
+
+ main_pg_query = f"""\
+ SELECT %(error_id)s AS error_id,
+ browsers_partition,
+ os_partition,
+ device_partition,
+ country_partition,
+ chart
+ FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
+ FROM (SELECT *
+ FROM (SELECT user_browser AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY user_browser
+ ORDER BY count DESC) AS count_per_browser_query
+ INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
+ FROM (SELECT user_browser_version AS version,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND user_browser = count_per_browser_query.name
+ GROUP BY user_browser_version
+ ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
+ ON (TRUE)) AS browser_details) AS browser_details
+ INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
+ FROM (SELECT *
+ FROM (SELECT user_os AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY user_os
+ ORDER BY count DESC) AS count_per_os_details
+ INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
+ FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND user_os = count_per_os_details.name
+ GROUP BY user_os_version
+ ORDER BY count DESC) AS count_per_version_query
+ ) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
+ FROM (SELECT *
+ FROM (SELECT user_device_type AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY user_device_type
+ ORDER BY count DESC) AS count_per_device_details
+ INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
+ FROM (SELECT CASE
+ WHEN user_device = '' OR user_device ISNULL
+ THEN 'unknown'
+ ELSE user_device END AS version,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND user_device_type = count_per_device_details.name
+ GROUP BY user_device_type, user_device
+ ORDER BY count DESC) AS count_per_device_details
+ ) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
+ FROM (SELECT user_country AS name,
+ COUNT(session_id) AS count
+ FROM events.errors INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY user_country
+ ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
+ INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
+ FROM (SELECT generated_timestamp AS timestamp,
+ COUNT(session_id) AS count
+ FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL (SELECT DISTINCT session_id
+ FROM events.errors
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ ) AS chart_details ON (TRUE)
+ GROUP BY generated_timestamp
+ ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
+
+ cur.execute(cur.mogrify(main_pg_query, params))
+ row = cur.fetchone()
+ if row is None:
+ return {"errors": ["error not found"]}
+ row["tags"] = __process_tags(row)
+ return {"data": helper.dict_to_camel_case(row)}
+
+
+def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
+ endTime_arg_name="endDate", chart=False, step_size_name="step_size",
+ project_key="project_id"):
+ if project_key is None:
+ ch_sub_query = []
+ else:
+ ch_sub_query = [f"{project_key} =%(project_id)s"]
+ if time_constraint:
+ ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
+ f"timestamp < %({endTime_arg_name})s"]
+ if chart:
+ ch_sub_query += [f"timestamp >= generated_timestamp",
+ f"timestamp < generated_timestamp + %({step_size_name})s"]
+ if platform == schemas.PlatformType.mobile:
+ ch_sub_query.append("user_device_type = 'mobile'")
+ elif platform == schemas.PlatformType.desktop:
+ ch_sub_query.append("user_device_type = 'desktop'")
+ return ch_sub_query
+
+
+def __get_sort_key(key):
+ return {
+ schemas.ErrorSort.occurrence: "max_datetime",
+ schemas.ErrorSort.users_count: "users",
+ schemas.ErrorSort.sessions_count: "sessions"
+ }.get(key, 'max_datetime')
+
+
+def search(data: schemas.SearchErrorsSchema, project_id, user_id):
+ empty_response = {
+ 'total': 0,
+ 'errors': []
+ }
+
+ platform = None
+ for f in data.filters:
+ if f.type == schemas.FilterType.platform and len(f.value) > 0:
+ platform = f.value[0]
+ pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
+ pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
+ "pe.project_id=%(project_id)s"]
+ # To ignore Script error
+ pg_sub_query.append("pe.message!='Script error.'")
+ pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
+ # pg_sub_query_chart.append("source ='js_exception'")
+ pg_sub_query_chart.append("errors.error_id =details.error_id")
+ statuses = []
+ error_ids = None
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-30)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now(1)
+ if len(data.events) > 0 or len(data.filters) > 0:
+ print("-- searching for sessions before errors")
+ statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
+ error_status=data.status)
+ if len(statuses) == 0:
+ return empty_response
+ error_ids = [e["errorId"] for e in statuses]
+ with pg_client.PostgresClient() as cur:
+ step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
+ sort = __get_sort_key('datetime')
+ if data.sort is not None:
+ sort = __get_sort_key(data.sort)
+ order = schemas.SortOrderType.desc
+ if data.order is not None:
+ order = data.order
+ extra_join = ""
+
+ params = {
+ "startDate": data.startDate,
+ "endDate": data.endDate,
+ "project_id": project_id,
+ "userId": user_id,
+ "step_size": step_size}
+ if data.status != schemas.ErrorStatus.all:
+ pg_sub_query.append("status = %(error_status)s")
+ params["error_status"] = data.status
+ if data.limit is not None and data.page is not None:
+ params["errors_offset"] = (data.page - 1) * data.limit
+ params["errors_limit"] = data.limit
+ else:
+ params["errors_offset"] = 0
+ params["errors_limit"] = 200
+
+ if error_ids is not None:
+ params["error_ids"] = tuple(error_ids)
+ pg_sub_query.append("error_id IN %(error_ids)s")
+ # if data.bookmarked:
+ # pg_sub_query.append("ufe.user_id = %(userId)s")
+ # extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
+ if data.query is not None and len(data.query) > 0:
+ pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
+ params["error_query"] = helper.values_for_operator(value=data.query,
+ op=schemas.SearchEventOperator._contains)
+
+ main_pg_query = f"""SELECT full_count,
+ error_id,
+ name,
+ message,
+ users,
+ sessions,
+ last_occurrence,
+ first_occurrence,
+ chart
+ FROM (SELECT COUNT(details) OVER () AS full_count, details.*
+ FROM (SELECT error_id,
+ name,
+ message,
+ COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users,
+ COUNT(DISTINCT session_id) AS sessions,
+ MAX(timestamp) AS max_datetime,
+ MIN(timestamp) AS min_datetime
+ FROM events.errors
+ INNER JOIN public.errors AS pe USING (error_id)
+ INNER JOIN public.sessions USING (session_id)
+ {extra_join}
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY error_id, name, message
+ ORDER BY {sort} {order}) AS details
+ LIMIT %(errors_limit)s OFFSET %(errors_offset)s
+ ) AS details
+ INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
+ MIN(timestamp) AS first_occurrence
+ FROM events.errors
+ WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
+ INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
+ FROM (SELECT generated_timestamp AS timestamp,
+ COUNT(session_id) AS count
+ FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL (SELECT DISTINCT session_id
+ FROM events.errors
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ ) AS sessions ON (TRUE)
+ GROUP BY timestamp
+ ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
+
+ # print("--------------------")
+ # print(cur.mogrify(main_pg_query, params))
+ # print("--------------------")
+
+ cur.execute(cur.mogrify(main_pg_query, params))
+ rows = cur.fetchall()
+ total = 0 if len(rows) == 0 else rows[0]["full_count"]
+
+ if total == 0:
+ rows = []
+ else:
+ if len(statuses) == 0:
+ query = cur.mogrify(
+ """SELECT error_id,
+ COALESCE((SELECT TRUE
+ FROM public.user_viewed_errors AS ve
+ WHERE errors.error_id = ve.error_id
+ AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
+ FROM public.errors
+ WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
+ {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
+ "user_id": user_id})
+ cur.execute(query=query)
+ statuses = helper.list_to_camel_case(cur.fetchall())
+ statuses = {
+ s["errorId"]: s for s in statuses
+ }
+
+ for r in rows:
+ r.pop("full_count")
+ if r["error_id"] in statuses:
+ r["viewed"] = statuses[r["error_id"]]["viewed"]
+ else:
+ r["viewed"] = False
+
+ return {
+ 'total': total,
+ 'errors': helper.list_to_camel_case(rows)
+ }
+
+
+def __save_stacktrace(error_id, data):
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ """UPDATE public.errors
+ SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
+ WHERE error_id = %(error_id)s;""",
+ {"error_id": error_id, "data": json.dumps(data)})
+ cur.execute(query=query)
+
+
+def get_trace(project_id, error_id):
+ error = get(error_id=error_id, family=False)
+ if error is None:
+ return {"errors": ["error not found"]}
+ if error.get("source", "") != "js_exception":
+ return {"errors": ["this source of errors doesn't have a sourcemap"]}
+ if error.get("payload") is None:
+ return {"errors": ["null payload"]}
+ if error.get("stacktrace") is not None:
+ return {"sourcemapUploaded": True,
+ "trace": error.get("stacktrace"),
+ "preparsed": True}
+ trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
+ if all_exists:
+ __save_stacktrace(error_id=error_id, data=trace)
+ return {"sourcemapUploaded": all_exists,
+ "trace": trace,
+ "preparsed": False}
+
+
+def get_sessions(start_date, end_date, project_id, user_id, error_id):
+ extra_constraints = ["s.project_id = %(project_id)s",
+ "s.start_ts >= %(startDate)s",
+ "s.start_ts <= %(endDate)s",
+ "e.error_id = %(error_id)s"]
+ if start_date is None:
+ start_date = TimeUTC.now(-7)
+ if end_date is None:
+ end_date = TimeUTC.now()
+
+ params = {
+ "startDate": start_date,
+ "endDate": end_date,
+ "project_id": project_id,
+ "userId": user_id,
+ "error_id": error_id}
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ f"""SELECT s.project_id,
+ s.session_id::text AS session_id,
+ s.user_uuid,
+ s.user_id,
+ s.user_agent,
+ s.user_os,
+ s.user_browser,
+ s.user_device,
+ s.user_country,
+ s.start_ts,
+ s.duration,
+ s.events_count,
+ s.pages_count,
+ s.errors_count,
+ s.issue_types,
+ COALESCE((SELECT TRUE
+ FROM public.user_favorite_sessions AS fs
+ WHERE s.session_id = fs.session_id
+ AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
+ COALESCE((SELECT TRUE
+ FROM public.user_viewed_sessions AS fs
+ WHERE s.session_id = fs.session_id
+ AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
+ FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
+ WHERE {" AND ".join(extra_constraints)}
+ ORDER BY s.start_ts DESC;""",
+ params)
+ cur.execute(query=query)
+ sessions_list = []
+ total = cur.rowcount
+ row = cur.fetchone()
+ while row is not None and len(sessions_list) < 100:
+ sessions_list.append(row)
+ row = cur.fetchone()
+
+ return {
+ 'total': total,
+ 'sessions': helper.list_to_camel_case(sessions_list)
+ }
+
+
+ACTION_STATE = {
+ "unsolve": 'unresolved',
+ "solve": 'resolved',
+ "ignore": 'ignored'
+}
+
+
+def change_state(project_id, user_id, error_id, action):
+ errors = get(error_id, family=True)
+ print(len(errors))
+ status = ACTION_STATE.get(action)
+ if errors is None or len(errors) == 0:
+ return {"errors": ["error not found"]}
+ if errors[0]["status"] == status:
+ return {"errors": [f"error is already {status}"]}
+
+ if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
+ return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
+
+ params = {
+ "userId": user_id,
+ "error_ids": tuple([e["errorId"] for e in errors]),
+ "status": status}
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ """UPDATE public.errors
+ SET status = %(status)s
+ WHERE error_id IN %(error_ids)s
+ RETURNING status""",
+ params)
+ cur.execute(query=query)
+ row = cur.fetchone()
+ if row is not None:
+ for e in errors:
+ e["status"] = row["status"]
+ return {"data": errors}
+
+
+MAX_RANK = 2
+
+
+def __status_rank(status):
+ return {
+ 'unresolved': MAX_RANK - 2,
+ 'ignored': MAX_RANK - 1,
+ 'resolved': MAX_RANK
+ }.get(status)
+
+
+def merge(error_ids):
+ error_ids = list(set(error_ids))
+ errors = get_batch(error_ids)
+ if len(error_ids) <= 1 or len(error_ids) > len(errors):
+ return {"errors": ["invalid list of ids"]}
+ error_ids = [e["errorId"] for e in errors]
+ parent_error_id = error_ids[0]
+ status = "unresolved"
+ for e in errors:
+ if __status_rank(status) < __status_rank(e["status"]):
+ status = e["status"]
+ if __status_rank(status) == MAX_RANK:
+ break
+ params = {
+ "error_ids": tuple(error_ids),
+ "parent_error_id": parent_error_id,
+ "status": status
+ }
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ """UPDATE public.errors
+ SET parent_error_id = %(parent_error_id)s, status = %(status)s
+ WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""",
+ params)
+ cur.execute(query=query)
+ # row = cur.fetchone()
+
+ return {"data": "success"}
+
+
+def format_first_stack_frame(error):
+ error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
+ for s in error["stack"]:
+ for c in s.get("context", []):
+ for sci, sc in enumerate(c):
+ if isinstance(sc, str) and len(sc) > 1000:
+ c[sci] = sc[:1000]
+ # convert bytes to string:
+ if isinstance(s["filename"], bytes):
+ s["filename"] = s["filename"].decode("utf-8")
+ return error
+
+
+def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(
+ """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
+ SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
+ FROM (SELECT root_error.error_id
+ FROM events.errors
+ INNER JOIN public.errors AS root_error USING (error_id)
+ LEFT JOIN user_viewed USING (error_id)
+ WHERE project_id = %(project_id)s
+ AND timestamp >= %(startTimestamp)s
+ AND timestamp <= %(endTimestamp)s
+ AND source = 'js_exception'
+ AND root_error.status = 'unresolved'
+ AND user_viewed.error_id ISNULL
+ LIMIT 1
+ ) AS timed_errors;""",
+ {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp})
+ cur.execute(query=query)
+ row = cur.fetchone()
+
+ return {
+ "data": helper.dict_to_camel_case(row)
+ }
diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py
new file mode 100644
index 000000000..7ad9e830d
--- /dev/null
+++ b/ee/api/chalicelib/core/events.py
@@ -0,0 +1,399 @@
+import schemas
+from chalicelib.core import issues
+from chalicelib.core import metadata
+from chalicelib.core import sessions_metas
+
+from chalicelib.utils import pg_client, helper
+from chalicelib.utils.TimeUTC import TimeUTC
+from chalicelib.utils.event_filter_definition import SupportedFilter, Event
+
+from decouple import config
+
+if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
+ from . import autocomplete_exp as autocomplete
+else:
+ from . import autocomplete as autocomplete
+
+
+def get_customs_by_sessionId2_pg(session_id, project_id):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(cur.mogrify("""\
+ SELECT
+ c.*,
+ 'CUSTOM' AS type
+ FROM events_common.customs AS c
+ WHERE
+ c.session_id = %(session_id)s
+ ORDER BY c.timestamp;""",
+ {"project_id": project_id, "session_id": session_id})
+ )
+ rows = cur.fetchall()
+ return helper.dict_to_camel_case(rows)
+
+
+def __merge_cells(rows, start, count, replacement):
+ rows[start] = replacement
+ rows = rows[:start + 1] + rows[start + count:]
+ return rows
+
+
+def __get_grouped_clickrage(rows, session_id, project_id):
+ click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
+ if len(click_rage_issues) == 0:
+ return rows
+
+ for c in click_rage_issues:
+ merge_count = c.get("payload")
+ if merge_count is not None:
+ merge_count = merge_count.get("count", 3)
+ else:
+ merge_count = 3
+ for i in range(len(rows)):
+ if rows[i]["timestamp"] == c["timestamp"]:
+ rows = __merge_cells(rows=rows,
+ start=i,
+ count=merge_count,
+ replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
+ break
+ return rows
+
+
+def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(cur.mogrify("""\
+ SELECT
+ c.*,
+ 'CLICK' AS type
+ FROM events.clicks AS c
+ WHERE
+ c.session_id = %(session_id)s
+ ORDER BY c.timestamp;""",
+ {"project_id": project_id, "session_id": session_id})
+ )
+ rows = cur.fetchall()
+ if group_clickrage:
+ rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
+
+ cur.execute(cur.mogrify("""
+ SELECT
+ i.*,
+ 'INPUT' AS type
+ FROM events.inputs AS i
+ WHERE
+ i.session_id = %(session_id)s
+ ORDER BY i.timestamp;""",
+ {"project_id": project_id, "session_id": session_id})
+ )
+ rows += cur.fetchall()
+ cur.execute(cur.mogrify("""\
+ SELECT
+ l.*,
+ l.path AS value,
+ l.path AS url,
+ 'LOCATION' AS type
+ FROM events.pages AS l
+ WHERE
+ l.session_id = %(session_id)s
+ ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
+ rows += cur.fetchall()
+ rows = helper.list_to_camel_case(rows)
+ rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
+ return rows
+
+
+def __pg_errors_query(source=None, value_length=None):
+ if value_length is None or value_length > 2:
+ return f"""((SELECT DISTINCT ON(lg.message)
+ lg.message AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.message ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION DISTINCT
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION DISTINCT
+ (SELECT DISTINCT ON(lg.message)
+ lg.message AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.message ILIKE %(value)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION DISTINCT
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.name ILIKE %(value)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5));"""
+ return f"""((SELECT DISTINCT ON(lg.message)
+ lg.message AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.message ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION DISTINCT
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5));"""
+
+
+def __search_pg_errors(project_id, value, key=None, source=None):
+ now = TimeUTC.now()
+
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify(__pg_errors_query(source,
+ value_length=len(value)),
+ {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value),
+ "source": source}))
+ results = helper.list_to_camel_case(cur.fetchall())
+ print(f"{TimeUTC.now() - now} : errors")
+ return results
+
+
+def __search_pg_errors_ios(project_id, value, key=None, source=None):
+ now = TimeUTC.now()
+ if len(value) > 2:
+ query = f"""(SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(value)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(value)s
+ LIMIT 5);"""
+ else:
+ query = f"""(SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ LIMIT 5);"""
+ with pg_client.PostgresClient() as cur:
+ cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)}))
+ results = helper.list_to_camel_case(cur.fetchall())
+ print(f"{TimeUTC.now() - now} : errors")
+ return results
+
+
+def __search_pg_metadata(project_id, value, key=None, source=None):
+ meta_keys = metadata.get(project_id=project_id)
+ meta_keys = {m["key"]: m["index"] for m in meta_keys}
+ if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
+ return []
+ sub_from = []
+ if key is not None:
+ meta_keys = {key: meta_keys[key]}
+
+ for k in meta_keys.keys():
+ colname = metadata.index_to_colname(meta_keys[k])
+ if len(value) > 2:
+ sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(svalue)s LIMIT 5)
+ UNION
+ (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(value)s LIMIT 5))
+ """)
+ else:
+ sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(svalue)s LIMIT 5)""")
+ with pg_client.PostgresClient() as cur:
+ cur.execute(cur.mogrify(f"""\
+ SELECT key, value, 'METADATA' AS TYPE
+ FROM({" UNION ALL ".join(sub_from)}) AS all_metas
+ LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)}))
+ results = helper.list_to_camel_case(cur.fetchall())
+ return results
+
+
+class event_type:
+ CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
+ INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
+ LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
+ CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
+ REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
+ GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
+ STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
+ ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
+ column=None) # column=None because errors are searched by name or message
+ METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
+ # IOS
+ CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label")
+ INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
+ VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name")
+ CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
+ REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url")
+ ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
+ column=None) # column=None because errors are searched by name or message
+
+
+SUPPORTED_TYPES = {
+ event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
+ query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
+ event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
+ query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
+ event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
+ query=autocomplete.__generic_query(
+ typename=event_type.LOCATION.ui_type)),
+ event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
+ query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
+ event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
+ query=autocomplete.__generic_query(
+ typename=event_type.REQUEST.ui_type)),
+ event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
+ query=autocomplete.__generic_query(
+ typename=event_type.GRAPHQL.ui_type)),
+ event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
+ query=autocomplete.__generic_query(
+ typename=event_type.STATEACTION.ui_type)),
+ event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
+ query=None),
+ event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
+ query=None),
+ # IOS
+ event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
+ query=autocomplete.__generic_query(
+ typename=event_type.CLICK_IOS.ui_type)),
+ event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
+ query=autocomplete.__generic_query(
+ typename=event_type.INPUT_IOS.ui_type)),
+ event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
+ query=autocomplete.__generic_query(
+ typename=event_type.VIEW_IOS.ui_type)),
+ event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
+ query=autocomplete.__generic_query(
+ typename=event_type.CUSTOM_IOS.ui_type)),
+ event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
+ query=autocomplete.__generic_query(
+ typename=event_type.REQUEST_IOS.ui_type)),
+ event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
+ query=None),
+}
+
+
+def get_errors_by_session_id(session_id, project_id):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(cur.mogrify(f"""\
+ SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
+ FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
+ WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
+ ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
+ errors = cur.fetchall()
+ for e in errors:
+ e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
+ return helper.list_to_camel_case(errors)
+
+
+def search(text, event_type, project_id, source, key):
+ if not event_type:
+ return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
+
+ if event_type in SUPPORTED_TYPES.keys():
+ rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
+ # for IOS events autocomplete
+ # if event_type + "_IOS" in SUPPORTED_TYPES.keys():
+ # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
+ # source=source)
+ elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
+ rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
+ source=source)
+ elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
+ return sessions_metas.search(text, event_type, project_id)
+ elif event_type.endswith("_IOS") \
+ and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
+ return sessions_metas.search(text, event_type, project_id)
+ else:
+ return {"errors": ["unsupported event"]}
+
+ return {"data": rows}
diff --git a/ee/api/chalicelib/core/funnels.py b/ee/api/chalicelib/core/funnels.py
new file mode 100644
index 000000000..0afce0219
--- /dev/null
+++ b/ee/api/chalicelib/core/funnels.py
@@ -0,0 +1,380 @@
+import json
+from typing import List
+
+import chalicelib.utils.helper
+import schemas
+from chalicelib.core import significance
+from chalicelib.utils import dev
+from chalicelib.utils import helper, pg_client
+from chalicelib.utils.TimeUTC import TimeUTC
+
+from decouple import config
+
+if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
+ from chalicelib.core import sessions_legacy as sessions
+else:
+ from chalicelib.core import sessions
+
+REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
+
+ALLOW_UPDATE_FOR = ["name", "filter"]
+
+
+def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
+ ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
+ schemas.EventType.location, schemas.EventType.custom,
+ schemas.EventType.click_ios, schemas.EventType.input_ios,
+ schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
+ return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
+
+
+def __parse_events(f_events: List[dict]):
+ return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
+
+
+def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
+ return [e.dict() for e in f_events]
+
+
+def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
+ if f_events is None:
+ return
+ events = []
+ for e in f_events:
+ if e.operator is None:
+ e.operator = schemas.SearchEventOperator._is
+
+ if not isinstance(e.value, list):
+ e.value = [e.value]
+ is_any = sessions._isAny_opreator(e.operator)
+ if not is_any and isinstance(e.value, list) and len(e.value) == 0:
+ continue
+ events.append(e)
+ return events
+
+
+def __transform_old_funnels(events):
+ for e in events:
+ if not isinstance(e.get("value"), list):
+ e["value"] = [e["value"]]
+ return events
+
+
+def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
+ helper.delete_keys_from_dict(filter, REMOVE_KEYS)
+ filter.events = filter_stages(stages=filter.events)
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify("""\
+ INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
+ VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
+ RETURNING *;""",
+ {"user_id": user_id, "project_id": project_id, "name": name,
+ "filter": json.dumps(filter.dict()),
+ "is_public": is_public})
+
+ cur.execute(
+ query
+ )
+ r = cur.fetchone()
+ r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ r = helper.dict_to_camel_case(r)
+ r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
+ return {"data": r}
+
+
+def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
+ s_query = []
+ if filter is not None:
+ helper.delete_keys_from_dict(filter, REMOVE_KEYS)
+ s_query.append("filter = %(filter)s::jsonb")
+ if name is not None and len(name) > 0:
+ s_query.append("name = %(name)s")
+ if is_public is not None:
+ s_query.append("is_public = %(is_public)s")
+ if len(s_query) == 0:
+ return {"errors": ["Nothing to update"]}
+ with pg_client.PostgresClient() as cur:
+ query = cur.mogrify(f"""\
+ UPDATE public.funnels
+ SET {" , ".join(s_query)}
+ WHERE funnel_id=%(funnel_id)s
+ AND project_id = %(project_id)s
+ AND (user_id = %(user_id)s OR is_public)
+ RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
+ "filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
+ "project_id": project_id})
+ # print("--------------------")
+ # print(query)
+ # print("--------------------")
+ cur.execute(
+ query
+ )
+ r = cur.fetchone()
+ if r is None:
+ return {"errors": ["funnel not found"]}
+ r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ r = helper.dict_to_camel_case(r)
+ r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
+ r["filter"] = helper.old_search_payload_to_flat(r["filter"])
+ return {"data": r}
+
+
+def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify(
+ f"""\
+ SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
+ {",filter" if details else ""}
+ FROM public.funnels
+ WHERE project_id = %(project_id)s
+ AND funnels.deleted_at IS NULL
+ AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
+ {"project_id": project_id, "user_id": user_id}
+ )
+ )
+
+ rows = cur.fetchall()
+ rows = helper.list_to_camel_case(rows)
+ for row in rows:
+ row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
+ if details:
+ row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
+ if row.get("filter") is not None and row["filter"].get("events") is not None:
+ row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
+
+ get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
+ end_date=end_date)
+ counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
+ project_id=project_id, user_id=None, count_only=True)
+ row["sessionsCount"] = counts["countSessions"]
+ row["usersCount"] = counts["countUsers"]
+ filter_clone = dict(row["filter"])
+ overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
+ row["stages"] = overview["stages"]
+ row.pop("filter")
+ row["stagesCount"] = len(row["stages"])
+ # TODO: ask david to count it alone
+ row["criticalIssuesCount"] = overview["criticalIssuesCount"]
+ row["missedConversions"] = 0 if len(row["stages"]) < 2 \
+ else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
+ row["filter"] = helper.old_search_payload_to_flat(filter_clone)
+ return rows
+
+
+def get_possible_issue_types(project_id):
+ return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
+ ['click_rage', 'dead_click', 'excessive_scrolling',
+ 'bad_request', 'missing_resource', 'memory', 'cpu',
+ 'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
+ 'js_error']]
+
+
+def get_start_end_time(filter_d, range_value, start_date, end_date):
+ if start_date is not None and end_date is not None:
+ filter_d["startDate"], filter_d["endDate"] = start_date, end_date
+ elif range_value is not None and len(range_value) > 0:
+ filter_d["rangeValue"] = range_value
+ filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
+ else:
+ filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
+
+
+def delete(project_id, funnel_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify("""\
+ UPDATE public.funnels
+ SET deleted_at = timezone('utc'::text, now())
+ WHERE project_id = %(project_id)s
+ AND funnel_id = %(funnel_id)s
+ AND (user_id = %(user_id)s OR is_public);""",
+ {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
+ )
+
+ return {"data": {"state": "success"}}
+
+
+def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
+ return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
+ project_id=project_id,
+ user_id=user_id)
+
+
+def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
+ data.events = filter_stages(data.events)
+ data.events = __fix_stages(data.events)
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
+ start_date=data.startDate, end_date=data.endDate)
+ data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
+ return sessions.search_sessions(data=data, project_id=project_id,
+ user_id=user_id)
+
+
+def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
+ insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
+ insights = helper.list_to_camel_case(insights)
+ if len(insights) > 0:
+ # fix: this fix for huge drop count
+ if total_drop_due_to_issues > insights[0]["sessionsCount"]:
+ total_drop_due_to_issues = insights[0]["sessionsCount"]
+ # end fix
+ insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
+ return {"data": {"stages": insights,
+ "totalDropDueToIssues": total_drop_due_to_issues}}
+
+
+def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
+ data.events = filter_stages(__parse_events(data.events))
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
+ start_date=data.startDate,
+ end_date=data.endDate)
+ data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
+ data.events = __fix_stages(data.events)
+ insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
+ insights = helper.list_to_camel_case(insights)
+ if len(insights) > 0:
+ # fix: this fix for huge drop count
+ if total_drop_due_to_issues > insights[0]["sessionsCount"]:
+ total_drop_due_to_issues = insights[0]["sessionsCount"]
+ # end fix
+ insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
+ return {"data": {"stages": insights,
+ "totalDropDueToIssues": total_drop_due_to_issues}}
+
+
+# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
+def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
+ data.events = filter_stages(__parse_events(data.events))
+ data.events = __fix_stages(data.events)
+ if len(data.events) == 0:
+ return {"stages": [], "totalDropDueToIssues": 0}
+ insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
+ insights = helper.list_to_camel_case(insights)
+ if len(insights) > 0:
+ # TODO: check if this correct
+ if total_drop_due_to_issues > insights[0]["sessionsCount"]:
+ if len(insights) == 0:
+ total_drop_due_to_issues = 0
+ else:
+ total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
+ insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
+ return {"stages": insights,
+ "totalDropDueToIssues": total_drop_due_to_issues}
+
+
+def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
+ return {"data": {
+ "issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
+ }}
+
+
+def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
+ data.events = filter_stages(data.events)
+ data.events = __fix_stages(data.events)
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
+ start_date=data.startDate,
+ end_date=data.endDate)
+ data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
+ if len(data.events) < 2:
+ return {"issues": []}
+ return {
+ "issues": helper.dict_to_camel_case(
+ significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
+ last_stage=len(data.events)))}
+
+
+# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
+def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
+ data.events = filter_stages(data.events)
+ data.events = __fix_stages(data.events)
+ if len(data.events) < 0:
+ return {"issues": []}
+
+ return {
+ "issues": helper.dict_to_camel_case(
+ significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
+ last_stage=len(data.events)))}
+
+
+def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify(
+ """\
+ SELECT
+ *
+ FROM public.funnels
+ WHERE project_id = %(project_id)s
+ AND deleted_at IS NULL
+ AND funnel_id = %(funnel_id)s
+ AND (user_id = %(user_id)s OR is_public);""",
+ {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
+ )
+ )
+
+ f = helper.dict_to_camel_case(cur.fetchone())
+ if f is None:
+ return None
+ if f.get("filter") is not None and f["filter"].get("events") is not None:
+ f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
+ f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
+ f["filter"]["events"] = __parse_events(f["filter"]["events"])
+ f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
+ if fix_stages:
+ f["filter"]["events"] = __fix_stages(f["filter"]["events"])
+ f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
+ if flatten:
+ f["filter"] = helper.old_search_payload_to_flat(f["filter"])
+ return f
+
+
+def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
+ start_date=None, end_date=None):
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
+ if f is None:
+ return {"errors": ["funnel not found"]}
+ data.startDate = data.startDate if data.startDate is not None else start_date
+ data.endDate = data.endDate if data.endDate is not None else end_date
+ get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
+ end_date=data.endDate)
+ data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
+
+ issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
+ .get("issues", {})
+ issues = issues.get("significant", []) + issues.get("insignificant", [])
+ issue = None
+ for i in issues:
+ if i.get("issueId", "") == issue_id:
+ issue = i
+ break
+ return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
+ data=data) if issue is not None else {"total": 0, "sessions": []},
+ # "stages": helper.list_to_camel_case(insights),
+ # "totalDropDueToIssues": total_drop_due_to_issues,
+ "issue": issue}
diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py
index 62a1fbb27..3861f48aa 100644
--- a/ee/api/chalicelib/core/metrics.py
+++ b/ee/api/chalicelib/core/metrics.py
@@ -793,7 +793,7 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
GROUP BY url_hostpath
ORDER BY doc_count DESC
LIMIT 10;"""
- params = {"project_id": project_id, "startTimestamp": startTimestamp,
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py
index 9e5600865..c5ab7c800 100644
--- a/ee/api/chalicelib/core/projects.py
+++ b/ee/api/chalicelib/core/projects.py
@@ -103,7 +103,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
r.pop("first_recorded_session_at")
r.pop("first_recorded")
- if recording_state:
+ if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py
index bb83cfa3a..6f1af14b6 100644
--- a/ee/api/chalicelib/core/reset_password.py
+++ b/ee/api/chalicelib/core/reset_password.py
@@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
def reset(data: schemas.ForgetPasswordPayloadSchema):
- print("====================== reset password ===============")
- print(data)
+ print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}
diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py
index 71e493a4d..e5d7ee126 100644
--- a/ee/api/chalicelib/core/resources.py
+++ b/ee/api/chalicelib/core/resources.py
@@ -1,4 +1,4 @@
-from chalicelib.utils import helper
+from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils import ch_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
@@ -9,14 +9,26 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
duration = 0
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
- ch_query = """\
- SELECT
- datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
- FROM resources
- WHERE session_id = toUInt64(%(session_id)s)
- AND project_id=%(project_id)s
- AND datetime >= toDateTime(%(res_start_ts)s / 1000)
- AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
+ if config("EXP_RESOURCES", cast=bool, default=False):
+ ch_query = f"""SELECT
+ datetime,url,type,duration,ttfb,header_size,
+ encoded_body_size,decoded_body_size,success,
+ if(success, 200, 400) AS status
+ FROM {exp_ch_helper.get_main_resources_table(start_ts)}
+ WHERE session_id = toUInt64(%(session_id)s)
+ AND project_id = toUInt16(%(project_id)s)
+ AND datetime >= toDateTime(%(res_start_ts)s / 1000)
+ AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
+ else:
+ ch_query = """SELECT
+ datetime,url,type,duration,ttfb,header_size,
+ encoded_body_size,decoded_body_size,success,
+ coalesce(status,if(success, 200, status)) AS status
+ FROM resources
+ WHERE session_id = toUInt64(%(session_id)s)
+ AND project_id = toUInt64(%(project_id)s)
+ AND datetime >= toDateTime(%(res_start_ts)s / 1000)
+ AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
rows = ch.execute(query=ch_query, params=params)
diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py
new file mode 100644
index 000000000..3aa701f97
--- /dev/null
+++ b/ee/api/chalicelib/core/significance.py
@@ -0,0 +1,644 @@
+__author__ = "AZNAUROV David"
+__maintainer__ = "KRAIEM Taha Yassine"
+
+import schemas
+from chalicelib.core import events, metadata
+from chalicelib.utils import dev
+
+from decouple import config
+
+if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
+ from chalicelib.core import sessions_legacy as sessions
+else:
+ from chalicelib.core import sessions
+
+"""
+todo: remove LIMIT from the query
+"""
+
+from typing import List
+import math
+import warnings
+from collections import defaultdict
+
+from psycopg2.extras import RealDictRow
+from chalicelib.utils import pg_client, helper
+
+SIGNIFICANCE_THRSH = 0.4
+
+T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
+ 11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
+ 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
+
+
+def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
+ """
+ Add minimal timestamp
+ :param filter_d: dict contains events&filters&...
+ :return:
+ """
+ stages: [dict] = filter_d.get("events", [])
+ filters: [dict] = filter_d.get("filters", [])
+ filter_issues = filter_d.get("issueTypes")
+ if filter_issues is None or len(filter_issues) == 0:
+ filter_issues = []
+ stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
+ first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
+ "s.start_ts <= %(endTimestamp)s"]
+ filter_extra_from = []
+ n_stages_query = []
+ values = {}
+ if len(filters) > 0:
+ meta_keys = None
+ for i, f in enumerate(filters):
+ if not isinstance(f["value"], list):
+ f.value = [f["value"]]
+ if len(f["value"]) == 0 or f["value"] is None:
+ continue
+ f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
+ # filter_args = _multiple_values(f["value"])
+ op = sessions.__get_sql_operator(f["operator"])
+
+ filter_type = f["type"]
+ # values[f_k] = sessions.__get_sql_value_multiple(f["value"])
+ f_k = f"f_value{i}"
+ values = {**values,
+ **sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
+ value_key=f_k)}
+ if filter_type == schemas.FilterType.user_browser:
+ # op = sessions.__get_sql_operator_multiple(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
+
+ elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
+ # op = sessions.__get_sql_operator_multiple(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
+
+ elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
+ # op = sessions.__get_sql_operator_multiple(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
+
+ elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
+ # op = sessions.__get_sql_operator_multiple(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
+ elif filter_type == schemas.FilterType.duration:
+ if len(f["value"]) > 0 and f["value"][0] is not None:
+ first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
+ values["minDuration"] = f["value"][0]
+ if len(f["value"]) > 1 and f["value"][1] is not None and int(f["value"][1]) > 0:
+ first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
+ values["maxDuration"] = f["value"][1]
+ elif filter_type == schemas.FilterType.referrer:
+ # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
+ filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
+ # op = sessions.__get_sql_operator_multiple(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
+ elif filter_type == events.event_type.METADATA.ui_type:
+ if meta_keys is None:
+ meta_keys = metadata.get(project_id=project_id)
+ meta_keys = {m["key"]: m["index"] for m in meta_keys}
+ # op = sessions.__get_sql_operator(f["operator"])
+ if f.get("key") in meta_keys.keys():
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(
+ f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
+ value_key=f_k))
+ # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
+ elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
+ # op = sessions.__get_sql_operator(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
+ # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
+ elif filter_type in [schemas.FilterType.user_anonymous_id,
+ schemas.FilterType.user_anonymous_id_ios]:
+ # op = sessions.__get_sql_operator(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
+ # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
+ elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
+ # op = sessions.__get_sql_operator(f["operator"])
+ first_stage_extra_constraints.append(
+ sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
+ # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
+ i = -1
+ for s in stages:
+
+ if s.get("operator") is None:
+ s["operator"] = "is"
+
+ if not isinstance(s["value"], list):
+ s["value"] = [s["value"]]
+ is_any = sessions._isAny_opreator(s["operator"])
+ if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
+ continue
+ i += 1
+ if i == 0:
+ extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
+ else:
+ extra_from = []
+ op = sessions.__get_sql_operator(s["operator"])
+ event_type = s["type"].upper()
+ if event_type == events.event_type.CLICK.ui_type:
+ next_table = events.event_type.CLICK.table
+ next_col_name = events.event_type.CLICK.column
+ elif event_type == events.event_type.INPUT.ui_type:
+ next_table = events.event_type.INPUT.table
+ next_col_name = events.event_type.INPUT.column
+ elif event_type == events.event_type.LOCATION.ui_type:
+ next_table = events.event_type.LOCATION.table
+ next_col_name = events.event_type.LOCATION.column
+ elif event_type == events.event_type.CUSTOM.ui_type:
+ next_table = events.event_type.CUSTOM.table
+ next_col_name = events.event_type.CUSTOM.column
+ # IOS --------------
+ elif event_type == events.event_type.CLICK_IOS.ui_type:
+ next_table = events.event_type.CLICK_IOS.table
+ next_col_name = events.event_type.CLICK_IOS.column
+ elif event_type == events.event_type.INPUT_IOS.ui_type:
+ next_table = events.event_type.INPUT_IOS.table
+ next_col_name = events.event_type.INPUT_IOS.column
+ elif event_type == events.event_type.VIEW_IOS.ui_type:
+ next_table = events.event_type.VIEW_IOS.table
+ next_col_name = events.event_type.VIEW_IOS.column
+ elif event_type == events.event_type.CUSTOM_IOS.ui_type:
+ next_table = events.event_type.CUSTOM_IOS.table
+ next_col_name = events.event_type.CUSTOM_IOS.column
+ else:
+ print("=================UNDEFINED")
+ continue
+
+ values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
+ value_key=f"value{i + 1}")}
+ if sessions.__is_negation_operator(op) and i > 0:
+ op = sessions.__reverse_sql_operator(op)
+ main_condition = "left_not.session_id ISNULL"
+ extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
+ FROM {next_table} AS s_main
+ WHERE s_main.{next_col_name} {op} %(value{i + 1})s
+ AND s_main.timestamp >= T{i}.stage{i}_timestamp
+ AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
+ else:
+ if is_any:
+ main_condition = "TRUE"
+ else:
+ main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
+ values=s["value"], value_key=f"value{i + 1}")
+ n_stages_query.append(f"""
+ (SELECT main.session_id,
+ {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
+ '{event_type}' AS type,
+ '{s["operator"]}' AS operator
+ FROM {next_table} AS main {" ".join(extra_from)}
+ WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
+ {f"AND main.session_id=T1.session_id" if i > 0 else ""}
+ AND {main_condition}
+ {(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
+ {(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
+ GROUP BY main.session_id)
+ AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
+ """)
+ if len(n_stages_query) == 0:
+ return []
+ n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
+ n_stages_query += ") AS stages_t"
+
+ n_stages_query = f"""
+ SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM (
+ SELECT * FROM (
+ SELECT * FROM
+ {n_stages_query}
+ LEFT JOIN LATERAL
+ (
+ SELECT * FROM
+ (SELECT ISE.session_id,
+ ISS.type as issue_type,
+ ISE.timestamp AS issue_timestamp,
+ ISS.context_string as issue_context,
+ ISS.issue_id as issue_id
+ FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
+ WHERE ISE.timestamp >= stages_t.stage1_timestamp
+ AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
+ AND ISS.project_id=%(project_id)s
+ {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
+ ) AS issues_t
+ USING (session_id)) AS stages_and_issues_t
+ inner join sessions USING(session_id);
+ """
+
+ # LIMIT 10000
+ params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
+ "issueTypes": tuple(filter_issues), **values}
+ with pg_client.PostgresClient() as cur:
+ # print("---------------------------------------------------")
+ # print(cur.mogrify(n_stages_query, params))
+ # print("---------------------------------------------------")
+ cur.execute(cur.mogrify(n_stages_query, params))
+ rows = cur.fetchall()
+ return rows
+
+
+def pearson_corr(x: list, y: list):
+ n = len(x)
+ if n != len(y):
+ raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
+
+ if n < 2:
+ warnings.warn(f'x and y must have length at least 2. Got {n} instead')
+ return None, None, False
+
+ # If an input is constant, the correlation coefficient is not defined.
+ if all(t == x[0] for t in x) or all(t == y[0] for t in y):
+ warnings.warn("An input array is constant; the correlation coefficent is not defined.")
+ return None, None, False
+
+ if n == 2:
+ return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0
+
+ xmean = sum(x) / len(x)
+ ymean = sum(y) / len(y)
+
+ xm = [el - xmean for el in x]
+ ym = [el - ymean for el in y]
+
+ normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
+ normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
+
+ threshold = 1e-8
+ if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
+ # If all the values in x (likewise y) are very close to the mean,
+ # the loss of precision that occurs in the subtraction xm = x - xmean
+ # might result in large errors in r.
+ warnings.warn("An input array is constant; the correlation coefficent is not defined.")
+
+ r = sum(
+ i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
+
+ # Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
+ # However, if r < 0, we don't care, as our problem is to find only positive correlations
+ r = max(min(r, 1.0), 0.0)
+
+ # approximated confidence
+ if n < 31:
+ t_c = T_VALUES[n]
+ elif n < 50:
+ t_c = 2.02
+ else:
+ t_c = 2
+ if r >= 0.999:
+ confidence = 1
+ else:
+ confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
+
+ if confidence > SIGNIFICANCE_THRSH:
+ return r, confidence, True
+ else:
+ return r, confidence, False
+
+
+def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
+ """
+ Returns two lists with binary values 0/1:
+
+ transitions ::: if transited from the first stage to the last - 1
+ else - 0
+ errors ::: a dictionary where the keys are all unique issues (currently context-wise)
+ the values are lists
+ if an issue happened between the first stage to the last - 1
+ else - 0
+
+ For a small task of calculating a total drop due to issues,
+ we need to disregard the issue type when creating the `errors`-like array.
+ The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
+ The `transitions` array stays the same
+ """
+ transitions = []
+ n_sess_affected = 0
+ errors = {}
+ for issue in all_issues_with_context:
+ split = issue.split('__^__')
+ errors[issue] = {
+ "errors": [],
+ "issue_type": split[0],
+ "context": split[1]}
+
+ for row in rows:
+ t = 0
+ first_ts = row[f'stage{first_stage}_timestamp']
+ last_ts = row[f'stage{last_stage}_timestamp']
+ if first_ts is None:
+ continue
+ elif first_ts is not None and last_ts is not None:
+ t = 1
+ transitions.append(t)
+
+ ic_present = False
+ for issue_type_with_context in errors:
+ ic = 0
+ issue_type = errors[issue_type_with_context]["issue_type"]
+ context = errors[issue_type_with_context]["context"]
+ if row['issue_type'] is not None:
+ if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
+ context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
+ if issue_type == row['issue_type'] and context == context_in_row:
+ ic = 1
+ ic_present = True
+ errors[issue_type_with_context]["errors"].append(ic)
+
+ if ic_present and t:
+ n_sess_affected += 1
+
+ # def tuple_or(t: tuple):
+ # x = 0
+ # for el in t:
+ # x |= el
+ # return x
+ def tuple_or(t: tuple):
+ for el in t:
+ if el > 0:
+ return 1
+ return 0
+
+ errors = {key: errors[key]["errors"] for key in errors}
+ all_errors = [tuple_or(t) for t in zip(*errors.values())]
+
+ return transitions, errors, all_errors, n_sess_affected
+
+
+def get_affected_users_for_all_issues(rows, first_stage, last_stage):
+ """
+
+ :param rows:
+ :param first_stage:
+ :param last_stage:
+ :return:
+ """
+ affected_users = defaultdict(lambda: set())
+ affected_sessions = defaultdict(lambda: set())
+ contexts = defaultdict(lambda: None)
+ n_affected_users_dict = defaultdict(lambda: None)
+ n_affected_sessions_dict = defaultdict(lambda: None)
+ all_issues_with_context = set()
+ n_issues_dict = defaultdict(lambda: 0)
+ issues_by_session = defaultdict(lambda: 0)
+
+ for row in rows:
+
+ # check that the session has reached the first stage of subfunnel:
+ if row[f'stage{first_stage}_timestamp'] is None:
+ continue
+
+ iss = row['issue_type']
+ iss_ts = row['issue_timestamp']
+
+ # check that the issue exists and belongs to subfunnel:
+ if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
+ (row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
+ context_string = row['issue_context'] if row['issue_context'] is not None else ''
+ issue_with_context = iss + '__^__' + context_string
+ contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
+ all_issues_with_context.add(issue_with_context)
+ n_issues_dict[issue_with_context] += 1
+ if row['user_uuid'] is not None:
+ affected_users[issue_with_context].add(row['user_uuid'])
+
+ affected_sessions[issue_with_context].add(row['session_id'])
+ issues_by_session[row[f'session_id']] += 1
+
+ if len(affected_users) > 0:
+ n_affected_users_dict.update({
+ iss: len(affected_users[iss]) for iss in affected_users
+ })
+ if len(affected_sessions) > 0:
+ n_affected_sessions_dict.update({
+ iss: len(affected_sessions[iss]) for iss in affected_sessions
+ })
+ return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
+
+
+def count_sessions(rows, n_stages):
+ session_counts = {i: set() for i in range(1, n_stages + 1)}
+ for ind, row in enumerate(rows):
+ for i in range(1, n_stages + 1):
+ if row[f"stage{i}_timestamp"] is not None:
+ session_counts[i].add(row[f"session_id"])
+ session_counts = {i: len(session_counts[i]) for i in session_counts}
+ return session_counts
+
+
+def count_users(rows, n_stages):
+ users_in_stages = defaultdict(lambda: set())
+
+ for ind, row in enumerate(rows):
+ for i in range(1, n_stages + 1):
+ if row[f"stage{i}_timestamp"] is not None:
+ users_in_stages[i].add(row["user_uuid"])
+
+ users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
+
+ return users_count
+
+
+def get_stages(stages, rows):
+ n_stages = len(stages)
+ session_counts = count_sessions(rows, n_stages)
+ users_counts = count_users(rows, n_stages)
+
+ stages_list = []
+ for i, stage in enumerate(stages):
+
+ drop = None
+ if i != 0:
+ if session_counts[i] == 0:
+ drop = 0
+ elif session_counts[i] > 0:
+ drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
+
+ stages_list.append(
+ {"value": stage["value"],
+ "type": stage["type"],
+ "operator": stage["operator"],
+ "sessionsCount": session_counts[i + 1],
+ "drop_pct": drop,
+ "usersCount": users_counts[i + 1],
+ "dropDueToIssues": 0
+ }
+ )
+ return stages_list
+
+
+def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
+ """
+
+ :param stages:
+ :param rows:
+ :param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
+ :param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
+ :return:
+ """
+
+ n_stages = len(stages)
+
+ if first_stage is None:
+ first_stage = 1
+ if last_stage is None:
+ last_stage = n_stages
+ if last_stage > n_stages:
+ print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
+ last_stage = n_stages
+
+ n_critical_issues = 0
+ issues_dict = dict({"significant": [],
+ "insignificant": []})
+ session_counts = count_sessions(rows, n_stages)
+ drop = session_counts[first_stage] - session_counts[last_stage]
+
+ all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
+ rows, first_stage, last_stage)
+ transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
+ all_issues_with_context,
+ first_stage, last_stage)
+
+ # print("len(transitions) =", len(transitions))
+
+ if any(all_errors):
+ total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
+ if total_drop_corr is not None and drop is not None:
+ total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
+ else:
+ total_drop_due_to_issues = 0
+ else:
+ total_drop_due_to_issues = 0
+
+ if drop_only:
+ return total_drop_due_to_issues
+ for issue in all_issues_with_context:
+
+ if not any(errors[issue]):
+ continue
+ r, confidence, is_sign = pearson_corr(transitions, errors[issue])
+
+ if r is not None and drop is not None and is_sign:
+ lost_conversions = int(r * affected_sessions[issue])
+ else:
+ lost_conversions = None
+ if r is None:
+ r = 0
+ split = issue.split('__^__')
+ issues_dict['significant' if is_sign else 'insignificant'].append({
+ "type": split[0],
+ "title": helper.get_issue_title(split[0]),
+ "affected_sessions": affected_sessions[issue],
+ "unaffected_sessions": session_counts[1] - affected_sessions[issue],
+ "lost_conversions": lost_conversions,
+ "affected_users": affected_users_dict[issue],
+ "conversion_impact": round(r * 100),
+ "context_string": contexts[issue]["context"],
+ "issue_id": contexts[issue]["id"]
+ })
+
+ if is_sign:
+ n_critical_issues += n_issues_dict[issue]
+
+ return n_critical_issues, issues_dict, total_drop_due_to_issues
+
+
+def get_top_insights(filter_d, project_id):
+ output = []
+ stages = filter_d.get("events", [])
+ # TODO: handle 1 stage alone
+ if len(stages) == 0:
+ print("no stages found")
+ return output, 0
+ elif len(stages) == 1:
+ # TODO: count sessions, and users for single stage
+ output = [{
+ "type": stages[0]["type"],
+ "value": stages[0]["value"],
+ "dropPercentage": None,
+ "operator": stages[0]["operator"],
+ "sessionsCount": 0,
+ "dropPct": 0,
+ "usersCount": 0,
+ "dropDueToIssues": 0
+
+ }]
+ counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
+ project_id=project_id,
+ user_id=None, count_only=True)
+ output[0]["sessionsCount"] = counts["countSessions"]
+ output[0]["usersCount"] = counts["countUsers"]
+ return output, 0
+ # The result of the multi-stage query
+ rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
+ if len(rows) == 0:
+ return get_stages(stages, []), 0
+ # Obtain the first part of the output
+ stages_list = get_stages(stages, rows)
+ # Obtain the second part of the output
+ total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
+ last_stage=filter_d.get("lastStage"), drop_only=True)
+ return stages_list, total_drop_due_to_issues
+
+
+def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
+ output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
+ stages = filter_d.get("events", [])
+ # The result of the multi-stage query
+ rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
+ # print(json.dumps(rows[0],indent=4))
+ # return
+ if len(rows) == 0:
+ return output
+ # Obtain the second part of the output
+ n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
+ last_stage=last_stage)
+ output['total_drop_due_to_issues'] = total_drop_due_to_issues
+ # output['critical_issues_count'] = n_critical_issues
+ output = {**output, **issues_dict}
+ return output
+
+
+def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
+ output = dict()
+ stages = filter_d["events"]
+ # TODO: handle 1 stage alone
+ if len(stages) == 0:
+ return {"stages": [],
+ "criticalIssuesCount": 0}
+ elif len(stages) == 1:
+ # TODO: count sessions, and users for single stage
+ output["stages"] = [{
+ "type": stages[0]["type"],
+ "value": stages[0]["value"],
+ "sessionsCount": None,
+ "dropPercentage": None,
+ "usersCount": None
+ }]
+ return output
+ # The result of the multi-stage query
+ rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
+ if len(rows) == 0:
+ # PS: not sure what to return if rows are empty
+ output["stages"] = [{
+ "type": stages[0]["type"],
+ "value": stages[0]["value"],
+ "sessionsCount": None,
+ "dropPercentage": None,
+ "usersCount": None
+ }]
+ output['criticalIssuesCount'] = 0
+ return output
+ # Obtain the first part of the output
+ stages_list = get_stages(stages, rows)
+
+ # Obtain the second part of the output
+ n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
+ last_stage=last_stage)
+
+ output['stages'] = stages_list
+ output['criticalIssuesCount'] = n_critical_issues
+ return output
diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py
index 35339a133..14f26e0b6 100644
--- a/ee/api/chalicelib/core/traces.py
+++ b/ee/api/chalicelib/core/traces.py
@@ -201,6 +201,6 @@ def get_available_actions(tenant_id):
cron_jobs = [
- {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60),
+ {"func": process_traces_queue, "trigger": "interval", "seconds": config("TRACE_PERIOD", cast=int, default=60),
"misfire_grace_time": 20}
]
diff --git a/ee/api/clean.sh b/ee/api/clean.sh
index b05ce1ee4..9aa916080 100755
--- a/ee/api/clean.sh
+++ b/ee/api/clean.sh
@@ -1,17 +1,17 @@
#!/bin/bash
rm -rf ./chalicelib/core/alerts.py
-rm -rf ./chalicelib/core/alerts_processor.py
+#exp rm -rf ./chalicelib/core/alerts_processor.py
rm -rf ./chalicelib/core/announcements.py
rm -rf ./chalicelib/core/autocomplete.py
rm -rf ./chalicelib/core/collaboration_slack.py
rm -rf ./chalicelib/core/countries.py
-rm -rf ./chalicelib/core/errors.py
+#exp rm -rf ./chalicelib/core/errors.py
rm -rf ./chalicelib/core/errors_favorite.py
-rm -rf ./chalicelib/core/events.py
+#exp rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_ios.py
-rm -rf ./chalicelib/core/dashboards.py
-rm -rf ./chalicelib/core/funnels.py
+#exp rm -rf ./chalicelib/core/dashboards.py
+#exp rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/integration_base.py
rm -rf ./chalicelib/core/integration_base_issue.py
rm -rf ./chalicelib/core/integration_github.py
@@ -36,7 +36,7 @@ rm -rf ./chalicelib/core/sessions.py
rm -rf ./chalicelib/core/sessions_assignments.py
rm -rf ./chalicelib/core/sessions_metas.py
rm -rf ./chalicelib/core/sessions_mobs.py
-rm -rf ./chalicelib/core/significance.py
+#exp rm -rf ./chalicelib/core/significance.py
rm -rf ./chalicelib/core/slack.py
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps.py
@@ -78,7 +78,7 @@ rm -rf ./routers/subs/insights.py
rm -rf ./schemas.py
rm -rf ./routers/subs/v1_api.py
rm -rf ./routers/subs/metrics.py
-rm -rf ./chalicelib/core/custom_metrics.py
+#exp rm -rf ./chalicelib/core/custom_metrics.py
rm -rf ./chalicelib/core/performance_event.py
rm -rf ./chalicelib/core/saved_search.py
rm -rf ./app_alerts.py
diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh
index fcb58b528..b5997ee3b 100755
--- a/ee/api/entrypoint.sh
+++ b/ee/api/entrypoint.sh
@@ -2,6 +2,6 @@
sh env_vars.sh
source /tmp/.env.override
cd sourcemap-reader
-nohup npm start &> /tmp/sourcemap-reader.log &
+nohup npm start &
cd ..
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
diff --git a/ee/api/env.default b/ee/api/env.default
index 2d4a4c1e6..9d6fe66d7 100644
--- a/ee/api/env.default
+++ b/ee/api/env.default
@@ -37,9 +37,9 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-ee
jwt_secret="SET A RANDOM STRING HERE"
-ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
-assist=/assist/%s/sockets-live
-assistList=/assist/%s/sockets-list
+ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
+assist=/sockets-live
+assistList=/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
@@ -56,14 +56,16 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
-sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
+sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
stage=default-ee
version_number=1.0.0
FS_DIR=/mnt/efs
EXP_SESSIONS_SEARCH=false
EXP_AUTOCOMPLETE=false
EXP_ERRORS_SEARCH=false
-EXP_METRICS=false
+EXP_METRICS=true
EXP_7D_MV=false
EXP_ALERTS=false
-EXP_FUNNELS=false
\ No newline at end of file
+EXP_FUNNELS=false
+EXP_RESOURCES=true
+TRACE_PERIOD=300
\ No newline at end of file
diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py
index e6675c4f3..a414aed05 100644
--- a/ee/api/routers/core_dynamic.py
+++ b/ee/api/routers/core_dynamic.py
@@ -6,9 +6,8 @@ from starlette.responses import RedirectResponse
import schemas
import schemas_ee
-from chalicelib.core import integrations_manager
from chalicelib.core import sessions
-from chalicelib.core import tenants, users, metadata, projects, license
+from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import SAML2_helper
diff --git a/ee/api/routers/crons/core_dynamic_crons.py b/ee/api/routers/crons/core_dynamic_crons.py
index 504300759..1d8320eb7 100644
--- a/ee/api/routers/crons/core_dynamic_crons.py
+++ b/ee/api/routers/crons/core_dynamic_crons.py
@@ -1,5 +1,6 @@
from chalicelib.core import telemetry, unlock
-from chalicelib.core import weekly_report, jobs
+from chalicelib.core import jobs
+from chalicelib.core import weekly_report as weekly_report_script
from decouple import config
@@ -7,15 +8,14 @@ async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
-async def weekly_report2() -> None:
- weekly_report.cron()
+async def weekly_report() -> None:
+ weekly_report_script.cron()
async def telemetry_cron() -> None:
telemetry.compute()
-# @app.schedule(Cron('0/60', '*', '*', '*', '?', '*'))
def unlock_cron() -> None:
print("validating license")
unlock.check()
@@ -28,7 +28,7 @@ cron_jobs = [
SINGLE_CRONS = [{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
- {"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5,
+ {"func": weekly_report, "trigger": "cron", "day_of_week": "mon", "hour": 5,
"misfire_grace_time": 60 * 60}]
if config("LOCAL_CRONS", default=False, cast=bool):
diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py
index 9a79551b7..dc08ac569 100644
--- a/ee/api/routers/ee.py
+++ b/ee/api/routers/ee.py
@@ -1,7 +1,6 @@
from chalicelib.core import roles, traces
from chalicelib.core import unlock
from chalicelib.utils import assist_helper
-from chalicelib.utils.TimeUTC import TimeUTC
unlock.check()
diff --git a/ee/backend/internal/db/datasaver/stats.go b/ee/backend/internal/db/datasaver/stats.go
index ecf418090..e018a2575 100644
--- a/ee/backend/internal/db/datasaver/stats.go
+++ b/ee/backend/internal/db/datasaver/stats.go
@@ -2,23 +2,18 @@ package datasaver
import (
"log"
- "time"
-
"openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/messages"
)
-var finalizeTicker <-chan time.Time
-
func (si *Saver) InitStats() {
si.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING"))
if err := si.ch.Prepare(); err != nil {
log.Fatalf("Clickhouse prepare error: %v\n", err)
}
si.pg.Conn.SetClickHouse(si.ch)
- finalizeTicker = time.Tick(20 * time.Minute)
}
func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error {
@@ -43,13 +38,6 @@ func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error
return nil
}
-func (si *Saver) CommitStats() error {
- select {
- case <-finalizeTicker:
- if err := si.ch.FinaliseSessionsTable(); err != nil {
- log.Printf("Stats: FinaliseSessionsTable returned an error. %v", err)
- }
- default:
- }
+func (si *Saver) CommitStats(optimize bool) error {
return si.ch.Commit()
}
diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go
index ae7a8177b..e15ca13b2 100644
--- a/ee/backend/pkg/db/clickhouse/connector.go
+++ b/ee/backend/pkg/db/clickhouse/connector.go
@@ -69,7 +69,6 @@ var CONTAINER_TYPE_MAP = map[uint64]string{0: "window", 1: "iframe", 2: "embed",
type Connector interface {
Prepare() error
Commit() error
- FinaliseSessionsTable() error
InsertWebSession(session *types.Session) error
InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error
InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error
@@ -157,13 +156,6 @@ func (c *connectorImpl) Commit() error {
return nil
}
-func (c *connectorImpl) FinaliseSessionsTable() error {
- if err := c.conn.Exec(context.Background(), "OPTIMIZE TABLE sessions FINAL"); err != nil {
- return fmt.Errorf("can't finalise sessions table: %s", err)
- }
- return nil
-}
-
func (c *connectorImpl) checkError(name string, err error) {
if err != clickhouse.ErrBatchAlreadySent {
log.Printf("can't create %s batch after failed append operation: %s", name, err)
diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go
index eb9047831..ca37917f1 100644
--- a/ee/backend/pkg/kafka/consumer.go
+++ b/ee/backend/pkg/kafka/consumer.go
@@ -194,3 +194,16 @@ func (consumer *Consumer) Close() {
log.Printf("Kafka consumer close error: %v", err)
}
}
+
+func (consumer *Consumer) HasFirstPartition() bool {
+ assigned, err := consumer.c.Assignment()
+ if err != nil {
+ return false
+ }
+ for _, p := range assigned {
+ if p.Partition == 1 {
+ return true
+ }
+ }
+ return false
+}
diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py
index f645e2995..e1fe393a4 100644
--- a/ee/connectors/msgcodec/messages.py
+++ b/ee/connectors/msgcodec/messages.py
@@ -63,13 +63,6 @@ class SessionStart(Message):
self.user_id = user_id
-class SessionDisconnect(Message):
- __id__ = 2
-
- def __init__(self, timestamp):
- self.timestamp = timestamp
-
-
class SessionEnd(Message):
__id__ = 3
@@ -106,7 +99,6 @@ class CreateDocument(Message):
__id__ = 7
def __init__(self, ):
- pass
@@ -752,6 +744,14 @@ class AdoptedSSRemoveOwner(Message):
self.id = id
+class Zustand(Message):
+ __id__ = 79
+
+ def __init__(self, mutation, state):
+ self.mutation = mutation
+ self.state = state
+
+
class IOSBatchMeta(Message):
__id__ = 107
diff --git a/ee/connectors/msgcodec/msgcodec.py b/ee/connectors/msgcodec/msgcodec.py
index 76468682a..d53c3e75d 100644
--- a/ee/connectors/msgcodec/msgcodec.py
+++ b/ee/connectors/msgcodec/msgcodec.py
@@ -2,6 +2,7 @@
from msgcodec.codec import Codec
from msgcodec.messages import *
+from typing import List
import io
class MessageCodec(Codec):
@@ -42,7 +43,7 @@ class MessageCodec(Codec):
raise UnicodeDecodeError(f"Error while decoding message key (SessionID) from {b}\n{e}")
return decoded
- def decode_detailed(self, b: bytes):
+ def decode_detailed(self, b: bytes) -> List[Message]:
reader = io.BytesIO(b)
messages_list = list()
messages_list.append(self.handler(reader, 0))
@@ -61,7 +62,7 @@ class MessageCodec(Codec):
break
return messages_list
- def handler(self, reader: io.BytesIO, mode=0):
+ def handler(self, reader: io.BytesIO, mode=0) -> Message:
message_id = self.read_message_id(reader)
if mode == 1:
# We skip the three bytes representing the length of message. It can be used to skip unwanted messages
@@ -71,9 +72,10 @@ class MessageCodec(Codec):
# Old format with no bytes for message length
return self.read_head_message(reader, message_id)
else:
- raise IOError()
+ raise IOError()
+
+ def read_head_message(self, reader: io.BytesIO, message_id) -> Message:
- def read_head_message(self, reader: io.BytesIO, message_id: int):
if message_id == 80:
return BatchMeta(
page_no=self.read_uint(reader),
@@ -121,11 +123,6 @@ class MessageCodec(Codec):
user_id=self.read_string(reader)
)
- if message_id == 2:
- return SessionDisconnect(
- timestamp=self.read_uint(reader)
- )
-
if message_id == 3:
return SessionEnd(
timestamp=self.read_uint(reader)
@@ -665,6 +662,12 @@ class MessageCodec(Codec):
id=self.read_uint(reader)
)
+ if message_id == 79:
+ return Zustand(
+ mutation=self.read_string(reader),
+ state=self.read_string(reader)
+ )
+
if message_id == 107:
return IOSBatchMeta(
timestamp=self.read_uint(reader),
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
new file mode 100644
index 000000000..f02f9e0a8
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
@@ -0,0 +1,38 @@
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.8.1-ee'
+$$ LANGUAGE sql IMMUTABLE;
+
+
+INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
+ view_type)
+VALUES ('Fetch Calls with Errors', 'errors', '{
+ "col": 4,
+ "row": 2,
+ "position": 0
+}', true, true, true, 'calls_errors', 'predefined', 'table')
+ON CONFLICT (predefined_key) DO UPDATE
+ SET name=excluded.name,
+ category=excluded.category,
+ default_config=excluded.default_config,
+ is_predefined=excluded.is_predefined,
+ is_template=excluded.is_template,
+ is_public=excluded.is_public,
+ metric_type=excluded.metric_type,
+ view_type=excluded.view_type;
+
+ALTER TABLE IF EXISTS oauth_authentication
+ DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
+
+DROP INDEX IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
+
+ALTER TABLE IF EXISTS oauth_authentication
+ DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_key;
+
+DROP INDEX IF EXISTS oauth_authentication_user_id_provider_key;
+
+CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
+
+COMMIT;
\ No newline at end of file
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index 91cb307eb..723b8eb09 100644
--- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
-SELECT 'v1.8.0-ee'
+SELECT 'v1.8.1-ee'
$$ LANGUAGE sql IMMUTABLE;
@@ -221,10 +221,9 @@ $$
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
provider oauth_provider NOT NULL,
provider_user_id text NOT NULL,
- token text NOT NULL,
- UNIQUE (user_id, provider)
+ token text NOT NULL
);
-
+ CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider);
CREATE TABLE IF NOT EXISTS projects
(
@@ -1344,7 +1343,7 @@ VALUES ('Captured sessions', 'web vitals', '{
"position": 0
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{
- "col": 2,
+ "col": 4,
"row": 2,
"position": 0
}', true, true, true, 'calls_errors', 'predefined', 'table'),
diff --git a/ee/utilities/server.js b/ee/utilities/server.js
index 13a89be79..48799e279 100644
--- a/ee/utilities/server.js
+++ b/ee/utilities/server.js
@@ -1,6 +1,8 @@
const dumps = require('./utils/HeapSnapshot');
const {request_logger} = require('./utils/helper');
const express = require('express');
+const assert = require('assert').strict;
+
let socket;
if (process.env.redis === "true") {
socket = require("./servers/websocket-cluster");
@@ -8,24 +10,27 @@ if (process.env.redis === "true") {
socket = require("./servers/websocket");
}
-const HOST = '0.0.0.0';
+const HOST = process.env.LISTEN_HOST || '0.0.0.0';
const PORT = process.env.LISTEN_PORT || 9001;
+assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
+const P_KEY = process.env.ASSIST_KEY;
+const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`
-let debug = process.env.debug === "1" || false;
-const PREFIX = process.env.prefix || `/assist`
+let debug = process.env.debug === "1";
+const heapdump = process.env.heapdump === "1";
if (process.env.uws !== "true") {
let wsapp = express();
wsapp.use(express.json());
wsapp.use(express.urlencoded({extended: true}));
wsapp.use(request_logger("[wsapp]"));
- wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => {
+ wsapp.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
res.statusCode = 200;
res.end("ok!");
}
);
- wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router);
- wsapp.use(`${PREFIX}/${process.env.S3_KEY}`, socket.wsRouter);
+ heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
+ wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter);
wsapp.enable('trust proxy');
const wsserver = wsapp.listen(PORT, HOST, () => {
console.log(`WS App listening on http://${HOST}:${PORT}`);
@@ -44,9 +49,11 @@ if (process.env.uws !== "true") {
const healthFn = (res, req) => {
res.writeStatus('200 OK').end('ok!');
}
+ uapp.get('/', healthFn);
uapp.get(PREFIX, healthFn);
uapp.get(`${PREFIX}/`, healthFn);
- uapp.get(`${PREFIX}/${process.env.S3_KEY}`, healthFn);
+ uapp.get(`${PREFIX}/${P_KEY}`, healthFn);
+ uapp.get(`${PREFIX}/${P_KEY}/`, healthFn);
/* Either onAborted or simply finished request */
@@ -73,19 +80,19 @@ if (process.env.uws !== "true") {
return fn(req, res);
}
}
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
- uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
- uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
+ uapp.post(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
+ uapp.post(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
- uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
- uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
- uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
+ uapp.post(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
+ uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
+ uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
socket.start(uapp);
diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js
index 4f310468a..ab3d3ea59 100644
--- a/ee/utilities/servers/websocket-cluster.js
+++ b/ee/utilities/servers/websocket-cluster.js
@@ -28,7 +28,7 @@ const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
console.log(`Using Redis: ${REDIS_URL}`);
let io;
-const debug = process.env.debug === "1" || false;
+const debug = process.env.debug === "1";
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
@@ -283,6 +283,7 @@ module.exports = {
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
+ socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket._connectedAt = new Date();
socket.peerId = socket.handshake.query.peerId;
@@ -351,7 +352,6 @@ module.exports = {
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
- socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
socket.onAny(async (eventName, ...args) => {
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js
index 224f44490..259838fb1 100644
--- a/ee/utilities/servers/websocket.js
+++ b/ee/utilities/servers/websocket.js
@@ -23,7 +23,7 @@ const {
const wsRouter = express.Router();
let io;
-const debug = process.env.debug === "1" || false;
+const debug = process.env.debug === "1";
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
@@ -261,6 +261,7 @@ module.exports = {
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
+ socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket._connectedAt = new Date();
socket.peerId = socket.handshake.query.peerId;
@@ -327,7 +328,6 @@ module.exports = {
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
- socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
socket.onAny(async (eventName, ...args) => {
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js
index 86997f0c4..e189135f0 100644
--- a/ee/utilities/utils/helper-ee.js
+++ b/ee/utilities/utils/helper-ee.js
@@ -1,5 +1,5 @@
const helper = require('./helper');
-let debug = process.env.debug === "1" || false;
+let debug = process.env.debug === "1";
const getBodyFromUWSResponse = async function (res) {
return new Promise(((resolve, reject) => {
let buffer;
diff --git a/frontend/.env.sample b/frontend/.env.sample
index 4972c59e4..3c8da9433 100644
--- a/frontend/.env.sample
+++ b/frontend/.env.sample
@@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
MINIO_SECRET_KEY = ''
# APP and TRACKER VERSIONS
-VERSION = '1.8.0'
-TRACKER_VERSION = '3.6.0'
+VERSION = '1.8.1'
+TRACKER_VERSION = '4.1.0'
diff --git a/frontend/app/Router.js b/frontend/app/Router.js
index 8bd3de882..4d6c9f941 100644
--- a/frontend/app/Router.js
+++ b/frontend/app/Router.js
@@ -126,7 +126,7 @@ class Router extends React.Component {
}
fetchInitialData = async () => {
- await this.props.fetchUserInfo(),
+ await this.props.fetchUserInfo()
await this.props.fetchSiteList()
const { mstore } = this.props;
mstore.initClient();
diff --git a/frontend/app/api_middleware.js b/frontend/app/api_middleware.js
index 8f9965ec5..783ebe8c3 100644
--- a/frontend/app/api_middleware.js
+++ b/frontend/app/api_middleware.js
@@ -12,10 +12,14 @@ export default store => next => (action) => {
const client = new APIClient();
return call(client)
- .then(response => {
+ .then(async response => {
if (response.status === 403) {
next({ type: DELETE });
}
+ if (!response.ok) {
+ const text = await response.text()
+ return Promise.reject(text);
+ }
return response.json()
})
.then(json => json || {}) // TEMP TODO on server: no empty responces
@@ -31,7 +35,7 @@ export default store => next => (action) => {
})
.catch((e) => {
logger.error("Error during API request. ", e)
- return next({ type: FAILURE, errors: [ "Connection error", String(e) ] });
+ return next({ type: FAILURE, errors: JSON.parse(e).errors || [] });
});
};
diff --git a/frontend/app/assets/integrations/pinia.svg b/frontend/app/assets/integrations/pinia.svg
new file mode 100644
index 000000000..3a20cba15
--- /dev/null
+++ b/frontend/app/assets/integrations/pinia.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx
index 08961af36..c070c3b50 100644
--- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx
+++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx
@@ -26,7 +26,7 @@ function ChatWindow({ userId, incomeStream, localStream, endCall, isPrestart }:
>
- Talking to {userId ? userId : 'Anonymous User'}
+ Call with {userId ? userId : 'Anonymous User'}
{incomeStream && incomeStream.length > 2 ? ' (+ other agents in the call)' : ''}
diff --git a/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx b/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx
new file mode 100644
index 000000000..6d702acdb
--- /dev/null
+++ b/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx
@@ -0,0 +1,53 @@
+import React from 'react';
+import { INDEXES } from 'App/constants/zindex';
+import { connect } from 'react-redux';
+import { Button, Loader, Icon } from 'UI';
+import { initiateCallEnd, releaseRemoteControl } from 'Player';
+
+interface Props {
+ userDisplayName: string;
+ type: WindowType;
+}
+
+export enum WindowType {
+ Call,
+ Control,
+}
+
+const WIN_VARIANTS = {
+ [WindowType.Call]: {
+ text: 'to accept the call',
+ icon: 'call' as const,
+ action: initiateCallEnd,
+ },
+ [WindowType.Control]: {
+ text: 'to accept remote control request',
+ icon: 'remote-control' as const,
+ action: releaseRemoteControl,
+ },
+};
+
+function RequestingWindow({ userDisplayName, type }: Props) {
+ return (
+
+
+
+
+ Waiting for {userDisplayName}
+
+
{WIN_VARIANTS[type].text}
+
+
+ Cancel
+
+
+
+ );
+}
+
+export default connect((state) => ({
+ userDisplayName: state.getIn(['sessions', 'current', 'userDisplayName']),
+}))(RequestingWindow);
diff --git a/frontend/app/components/Assist/RequestingWindow/index.ts b/frontend/app/components/Assist/RequestingWindow/index.ts
new file mode 100644
index 000000000..1a50403c4
--- /dev/null
+++ b/frontend/app/components/Assist/RequestingWindow/index.ts
@@ -0,0 +1 @@
+export { default, WindowType } from './RequestingWindow'
diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx
index 948b86967..85c2f6ad3 100644
--- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx
+++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx
@@ -5,200 +5,252 @@ import cn from 'classnames';
import { toggleChatWindow } from 'Duck/sessions';
import { connectPlayer } from 'Player/store';
import ChatWindow from '../../ChatWindow';
-import { callPeer, setCallArgs, requestReleaseRemoteControl, toggleAnnotation } from 'Player';
-import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager';
+import {
+ callPeer,
+ setCallArgs,
+ requestReleaseRemoteControl,
+ toggleAnnotation,
+ toggleUserName,
+} from 'Player';
+import {
+ CallingState,
+ ConnectionStatus,
+ RemoteControlStatus,
+} from 'Player/MessageDistributor/managers/AssistManager';
import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
-
+import { Tooltip } from 'react-tippy';
import { toast } from 'react-toastify';
import { confirm } from 'UI';
import stl from './AassistActions.module.css';
function onReject() {
- toast.info(`Call was rejected.`);
+ toast.info(`Call was rejected.`);
}
function onError(e: any) {
- console.log(e)
- toast.error(typeof e === 'string' ? e : e.message);
+ console.log(e);
+ toast.error(typeof e === 'string' ? e : e.message);
}
interface Props {
- userId: string;
- calling: CallingState;
- annotating: boolean;
- peerConnectionStatus: ConnectionStatus;
- remoteControlStatus: RemoteControlStatus;
- hasPermission: boolean;
- isEnterprise: boolean;
- isCallActive: boolean;
- agentIds: string[];
- livePlay: boolean;
+ userId: string;
+ calling: CallingState;
+ annotating: boolean;
+ peerConnectionStatus: ConnectionStatus;
+ remoteControlStatus: RemoteControlStatus;
+ hasPermission: boolean;
+ isEnterprise: boolean;
+ isCallActive: boolean;
+ agentIds: string[];
+ livePlay: boolean;
+ userDisplayName: string;
}
function AssistActions({
- userId,
- calling,
- annotating,
- peerConnectionStatus,
- remoteControlStatus,
- hasPermission,
- isEnterprise,
- isCallActive,
- agentIds,
- livePlay
+ userId,
+ calling,
+ annotating,
+ peerConnectionStatus,
+ remoteControlStatus,
+ hasPermission,
+ isEnterprise,
+ isCallActive,
+ agentIds,
+ livePlay,
+ userDisplayName,
}: Props) {
- const [isPrestart, setPrestart] = useState(false);
- const [incomeStream, setIncomeStream] = useState
([]);
- const [localStream, setLocalStream] = useState(null);
- const [callObject, setCallObject] = useState<{ end: () => void } | null>(null);
+ const [isPrestart, setPrestart] = useState(false);
+ const [incomeStream, setIncomeStream] = useState([]);
+ const [localStream, setLocalStream] = useState(null);
+ const [callObject, setCallObject] = useState<{ end: () => void } | null>(null);
- const onCall = calling === CallingState.OnCall || calling === CallingState.Reconnecting;
- const cannotCall = peerConnectionStatus !== ConnectionStatus.Connected || (isEnterprise && !hasPermission);
- const remoteActive = remoteControlStatus === RemoteControlStatus.Enabled;
+ const onCall = calling === CallingState.OnCall || calling === CallingState.Reconnecting;
+ const callRequesting = calling === CallingState.Connecting;
+ const cannotCall =
+ peerConnectionStatus !== ConnectionStatus.Connected || (isEnterprise && !hasPermission);
- useEffect(() => {
- return callObject?.end()
- }, [])
+ const remoteRequesting = remoteControlStatus === RemoteControlStatus.Requesting;
+ const remoteActive = remoteControlStatus === RemoteControlStatus.Enabled;
- useEffect(() => {
- if (peerConnectionStatus == ConnectionStatus.Disconnected) {
- toast.info(`Live session was closed.`);
- }
- }, [peerConnectionStatus]);
-
- const addIncomeStream = (stream: MediaStream) => {
- setIncomeStream(oldState => {
- if (!oldState.find(existingStream => existingStream.id === stream.id)) {
- return [...oldState, stream]
- }
- return oldState
- });
+ useEffect(() => {
+ if (!onCall && isCallActive && agentIds) {
+ setPrestart(true);
+ // call(agentIds); do not autocall on prestart, can change later
}
+ }, [agentIds, isCallActive]);
- function call(additionalAgentIds?: string[]) {
- RequestLocalStream().then(lStream => {
- setLocalStream(lStream);
- setCallArgs(
- lStream,
- addIncomeStream,
- lStream.stop.bind(lStream),
- onReject,
- onError
- )
- setCallObject(callPeer());
- if (additionalAgentIds) {
- callPeer(additionalAgentIds)
- }
- }).catch(onError)
+ useEffect(() => {
+ if (!livePlay) {
+ if (annotating) {
+ toggleAnnotation(false);
+ }
+ if (remoteActive) {
+ requestReleaseRemoteControl();
+ }
}
+ }, [livePlay]);
- React.useEffect(() => {
- if (!onCall && isCallActive && agentIds) {
- setPrestart(true);
- // call(agentIds); do not autocall on prestart, can change later
+ useEffect(() => {
+ if (remoteActive) {
+ toggleUserName(userDisplayName);
+ } else {
+ toggleUserName();
+ }
+ }, [remoteActive]);
+
+ useEffect(() => {
+ return callObject?.end();
+ }, []);
+
+ useEffect(() => {
+ if (peerConnectionStatus == ConnectionStatus.Disconnected) {
+ toast.info(`Live session was closed.`);
+ }
+ }, [peerConnectionStatus]);
+
+ const addIncomeStream = (stream: MediaStream) => {
+ setIncomeStream((oldState) => {
+ if (!oldState.find((existingStream) => existingStream.id === stream.id)) {
+ return [...oldState, stream];
+ }
+ return oldState;
+ });
+ };
+
+ function call(additionalAgentIds?: string[]) {
+ RequestLocalStream()
+ .then((lStream) => {
+ setLocalStream(lStream);
+ setCallArgs(lStream, addIncomeStream, lStream.stop.bind(lStream), onReject, onError);
+ setCallObject(callPeer());
+ if (additionalAgentIds) {
+ callPeer(additionalAgentIds);
}
- }, [agentIds, isCallActive])
+ })
+ .catch(onError);
+ }
- const confirmCall = async () => {
- if (
- await confirm({
- header: 'Start Call',
- confirmButton: 'Call',
- confirmation: `Are you sure you want to call ${userId ? userId : 'User'}?`,
- })
- ) {
- call(agentIds);
- }
- };
+ const confirmCall = async () => {
+ if (callRequesting || remoteRequesting) return;
- React.useEffect(() => {
- if (!livePlay) {
- if (annotating) {
- toggleAnnotation(false);
- }
- if (remoteActive) {
- requestReleaseRemoteControl()
- }
- }
- }, [livePlay])
+ if (
+ await confirm({
+ header: 'Start Call',
+ confirmButton: 'Call',
+ confirmation: `Are you sure you want to call ${userId ? userId : 'User'}?`,
+ })
+ ) {
+ call(agentIds);
+ }
+ };
- return (
-
- {(onCall || remoteActive) && (
- <>
-
toggleAnnotation(!annotating)}
- role="button"
- >
-
- Annotate
-
- {/* */}
-
-
- >
- )}
-
{
+ if (callRequesting || remoteRequesting) return;
+ requestReleaseRemoteControl();
+ };
+
+ return (
+
+ {(onCall || remoteActive) && (
+ <>
+
toggleAnnotation(!annotating)}
+ role="button"
+ >
+
-
- Remote Control
-
- {/* */}
-
-
+ Annotate
+
+
+
+ >
+ )}
-
-
-
- {onCall ? 'End' : isPrestart ? 'Join Call' : 'Call'}
-
- {/* */}
-
-
-
-
- {onCall && callObject && (
-
- )}
-
+ {/* @ts-ignore */}
+
+
+
+ Remote Control
+
- );
+
+
+
+
+
+
+ {onCall ? 'End' : isPrestart ? 'Join Call' : 'Call'}
+
+
+
+
+
+ {onCall && callObject && (
+
+ )}
+
+
+ );
}
const con = connect(
- (state) => {
- const permissions = state.getIn(['user', 'account', 'permissions']) || [];
- return {
- hasPermission: permissions.includes('ASSIST_CALL'),
- isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
- };
- },
- { toggleChatWindow }
+ (state) => {
+ const permissions = state.getIn(['user', 'account', 'permissions']) || [];
+ return {
+ hasPermission: permissions.includes('ASSIST_CALL'),
+ isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
+ userDisplayName: state.getIn(['sessions', 'current', 'userDisplayName']),
+ };
+ },
+ { toggleChatWindow }
);
export default con(
- connectPlayer((state) => ({
- calling: state.calling,
- annotating: state.annotating,
- remoteControlStatus: state.remoteControl,
- peerConnectionStatus: state.peerConnectionStatus,
- livePlay: state.livePlay,
- }))(AssistActions)
+ connectPlayer((state) => ({
+ calling: state.calling,
+ annotating: state.annotating,
+ remoteControlStatus: state.remoteControl,
+ peerConnectionStatus: state.peerConnectionStatus,
+ livePlay: state.livePlay,
+ }))(AssistActions)
);
diff --git a/frontend/app/components/Client/Integrations/IntegrationForm.js b/frontend/app/components/Client/Integrations/IntegrationForm.js
index ad6689f3b..c1115f405 100644
--- a/frontend/app/components/Client/Integrations/IntegrationForm.js
+++ b/frontend/app/components/Client/Integrations/IntegrationForm.js
@@ -2,7 +2,7 @@ import React from 'react';
import { connect } from 'react-redux';
import { Input, Form, Button, Checkbox, Loader } from 'UI';
import SiteDropdown from 'Shared/SiteDropdown';
-import { save, init, edit, remove, fetchList } from 'Duck/integrations/actions';
+import { save, init, edit, remove } from 'Duck/integrations/actions';
import { fetchIntegrationList } from 'Duck/integrations/integrations';
@connect(
@@ -21,16 +21,22 @@ import { fetchIntegrationList } from 'Duck/integrations/integrations';
init,
edit,
remove,
- fetchList,
+ // fetchList,
fetchIntegrationList,
}
)
export default class IntegrationForm extends React.PureComponent {
constructor(props) {
super(props);
- // const currentSiteId = this.props.initialSiteId;
- // this.state = { currentSiteId };
- // this.init(currentSiteId);
+ }
+
+ fetchList = () => {
+ const { siteId, initialSiteId } = this.props;
+ if (!siteId) {
+ this.props.fetchIntegrationList(initialSiteId);
+ } else {
+ this.props.fetchIntegrationList(siteId);
+ }
}
write = ({ target: { value, name: key, type, checked } }) => {
@@ -57,6 +63,7 @@ export default class IntegrationForm extends React.PureComponent {
// const { currentSiteId } = this.state;
this.props.save(customPath || name, !ignoreProject ? this.props.siteId : null, config).then(() => {
// this.props.fetchList(name);
+ this.fetchList();
this.props.onClose();
if (isExists) return;
});
@@ -67,7 +74,7 @@ export default class IntegrationForm extends React.PureComponent {
this.props.remove(name, !ignoreProject ? config.projectId : null).then(
function () {
this.props.onClose();
- this.props.fetchList(name);
+ this.fetchList();
}.bind(this)
);
};
diff --git a/frontend/app/components/Client/Integrations/IntegrationItem.tsx b/frontend/app/components/Client/Integrations/IntegrationItem.tsx
index f1b69c029..ec730e7c1 100644
--- a/frontend/app/components/Client/Integrations/IntegrationItem.tsx
+++ b/frontend/app/components/Client/Integrations/IntegrationItem.tsx
@@ -22,7 +22,9 @@ const IntegrationItem = (props: Props) => {
)}
-
+ {integration.icon.length ? : (
+ {integration.header}
+ )}
{integration.title}
{/*
{integration.subtitle && integration.subtitle}
*/}
diff --git a/frontend/app/components/Client/Integrations/Integrations.tsx b/frontend/app/components/Client/Integrations/Integrations.tsx
index 8e301ac8a..8080213b1 100644
--- a/frontend/app/components/Client/Integrations/Integrations.tsx
+++ b/frontend/app/components/Client/Integrations/Integrations.tsx
@@ -29,6 +29,8 @@ import AssistDoc from './AssistDoc';
import { PageTitle, Loader } from 'UI';
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
import withPageTitle from 'HOCs/withPageTitle';
+import PiniaDoc from './PiniaDoc'
+import ZustandDoc from './ZustandDoc'
interface Props {
fetch: (name: string, siteId: string) => void;
@@ -162,6 +164,7 @@ const integrations = [
integrations: [
{ title: 'Redux', slug: '', icon: 'integrations/redux', component:
},
{ title: 'VueX', slug: '', icon: 'integrations/vuejs', component:
},
+ { title: 'Pinia', slug: '', icon: 'integrations/pinia', component:
},
{ title: 'GraphQL', slug: '', icon: 'integrations/graphql', component:
},
{ title: 'NgRx', slug: '', icon: 'integrations/ngrx', component:
},
{ title: 'MobX', slug: '', icon: 'integrations/mobx', component:
},
@@ -169,6 +172,7 @@ const integrations = [
{ title: 'Profiler', slug: '', icon: 'integrations/openreplay', component:
},
{ title: 'Axios', slug: '', icon: 'integrations/openreplay', component:
},
{ title: 'Assist', slug: '', icon: 'integrations/openreplay', component:
},
+ { title: 'Zustand', slug: '', icon: '', header: '🐻', component:
}
],
},
];
diff --git a/frontend/app/components/Client/Integrations/PiniaDoc/PiniaDoc.tsx b/frontend/app/components/Client/Integrations/PiniaDoc/PiniaDoc.tsx
new file mode 100644
index 000000000..8a2033a2d
--- /dev/null
+++ b/frontend/app/components/Client/Integrations/PiniaDoc/PiniaDoc.tsx
@@ -0,0 +1,102 @@
+import React from 'react';
+import Highlight from 'react-highlight';
+import ToggleContent from '../../../shared/ToggleContent';
+import DocLink from 'Shared/DocLink/DocLink';
+import { connect } from 'react-redux';
+
+const PiniaDoc = (props) => {
+ const { projectKey } = props;
+ return (
+
+
VueX
+
+
+ This plugin allows you to capture Pinia mutations + state and inspect them later on while
+ replaying session recordings. This is very useful for understanding and fixing issues.
+
+
+
Installation
+
{`npm i @openreplay/tracker-vuex --save`}
+
+
Usage
+
+ Initialize the @openreplay/tracker package as usual and load the plugin into it. Then put
+ the generated plugin into your plugins field of your store.
+
+
+
+
+ {`import Vuex from 'vuex'
+import OpenReplay from '@openreplay/tracker';
+import trackerVuex from '@openreplay/tracker-vuex';
+//...
+const tracker = new OpenReplay({
+ projectKey: '${projectKey}'
+});
+tracker.start();
+//...
+const examplePiniaStore = useExamplePiniaStore()
+// check list of available options below
+const vuexPlugin = tracker.use(trackerVuex())
+// add a name to your store, optional
+//(will be randomly generated otherwise)
+const piniaStorePlugin = vuexPlugin('STORE NAME')
+
+// start tracking state updates
+piniaStorePlugin(examplePiniaStore)
+// now you can use examplePiniaStore as
+// usual pinia store
+// (destructure values or return it as a whole etc)
+`}
+
+ }
+ second={
+
+ {`import Vuex from 'vuex'
+import OpenReplay from '@openreplay/tracker/cjs';
+import trackerVuex from '@openreplay/tracker-vuex/cjs';
+//...
+const tracker = new OpenReplay({
+ projectKey: '${projectKey}'
+});
+//...
+
+// start tracker when the app is mounted
+tracker.start();
+
+//...
+const examplePiniaStore = useExamplePiniaStore()
+// check list of available options below
+const vuexPlugin = tracker.use(trackerVuex())
+// add a name to your store, optional
+// (will be randomly generated otherwise)
+const piniaStorePlugin = vuexPlugin('STORE NAME')
+
+// start tracking state updates
+piniaStorePlugin(examplePiniaStore)
+// now you can use examplePiniaStore as
+// usual pinia store
+// (destructure values or return it as a whole etc)
+}`}
+
+ }
+ />
+
+
+
+
+ );
+};
+
+PiniaDoc.displayName = 'PiniaDoc';
+
+export default connect((state) => ({
+ projectKey: state.getIn(['site', 'instance', 'projectKey']),
+}))(PiniaDoc);
diff --git a/frontend/app/components/Client/Integrations/PiniaDoc/index.js b/frontend/app/components/Client/Integrations/PiniaDoc/index.js
new file mode 100644
index 000000000..730c76beb
--- /dev/null
+++ b/frontend/app/components/Client/Integrations/PiniaDoc/index.js
@@ -0,0 +1 @@
+export { default } from './PiniaDoc'
diff --git a/frontend/app/components/Client/Integrations/ZustandDoc/ZustandDoc.js b/frontend/app/components/Client/Integrations/ZustandDoc/ZustandDoc.js
new file mode 100644
index 000000000..35ff5ecdf
--- /dev/null
+++ b/frontend/app/components/Client/Integrations/ZustandDoc/ZustandDoc.js
@@ -0,0 +1,92 @@
+import React from 'react';
+import Highlight from 'react-highlight';
+import ToggleContent from '../../../shared/ToggleContent';
+import DocLink from 'Shared/DocLink/DocLink';
+import { connect } from 'react-redux';
+
+const ZustandDoc = (props) => {
+ const { projectKey } = props;
+ return (
+
+
Zustand
+
+
+ This plugin allows you to capture Zustand mutations/state and inspect them later on while replaying session recordings. This is very
+ useful for understanding and fixing issues.
+
+
+
Installation
+
{`npm i @openreplay/tracker-zustand --save`}
+
+
Usage
+
+ Initialize the @openreplay/tracker package as usual and load the plugin into it. Then put the generated plugin into your plugins
+ field of your store.
+
+
+
+
+ {`import create from "zustand";
+import Tracker from '@openreplay/tracker';
+import trackerZustand from '@openreplay/tracker-zustand';
+
+
+const tracker = new Tracker({
+ projectKey: ${projectKey},
+});
+
+const zustandPlugin = tracker.use(trackerZustand())
+// store name, optional
+// randomly generated if undefined
+const bearStoreLogger = zustandPlugin('bear_store')
+
+
+const useBearStore = create(
+ bearStoreLogger((set: any) => ({
+ bears: 0,
+ increasePopulation: () => set((state: any) => ({ bears: state.bears + 1 })),
+ removeAllBears: () => set({ bears: 0 }),
+ }))
+)`}
+
+ }
+ second={
+
+ {`import create from "zustand";
+import Tracker from '@openreplay/tracker/cjs';
+import trackerZustand from '@openreplay/tracker-zustand/cjs';
+
+
+const tracker = new Tracker({
+ projectKey: ${projectKey},
+});
+
+const zustandPlugin = tracker.use(trackerZustand())
+// store name, optional
+// randomly generated if undefined
+const bearStoreLogger = zustandPlugin('bear_store')
+
+
+const useBearStore = create(
+ bearStoreLogger((set: any) => ({
+ bears: 0,
+ increasePopulation: () => set((state: any) => ({ bears: state.bears + 1 })),
+ removeAllBears: () => set({ bears: 0 }),
+ }))
+)`}
+
+ }
+ />
+
+
+
+
+ );
+};
+
+ZustandDoc.displayName = 'ZustandDoc';
+
+export default connect((state) => ({ projectKey: state.getIn(['site', 'instance', 'projectKey'])}) )(ZustandDoc)
diff --git a/frontend/app/components/Client/Integrations/ZustandDoc/index.js b/frontend/app/components/Client/Integrations/ZustandDoc/index.js
new file mode 100644
index 000000000..f7061eb7a
--- /dev/null
+++ b/frontend/app/components/Client/Integrations/ZustandDoc/index.js
@@ -0,0 +1 @@
+export { default } from './ZustandDoc'
diff --git a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js
index 8314e521a..498631ad1 100644
--- a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js
+++ b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js
@@ -7,112 +7,126 @@ import { CLIENT_TABS, client as clientRoute } from 'App/routes';
import { withRouter } from 'react-router-dom';
function PreferencesMenu({ account, activeTab, history, isEnterprise }) {
- const isAdmin = account.admin || account.superAdmin;
- const setTab = (tab) => {
- history.push(clientRoute(tab));
- };
+ const isAdmin = account.admin || account.superAdmin;
+ const setTab = (tab) => {
+ history.push(clientRoute(tab));
+ };
- return (
-
-
-
-
- setTab(CLIENT_TABS.PROFILE)}
- />
-
-
-
- setTab(CLIENT_TABS.INTEGRATIONS)}
- />
-
-
-
- setTab(CLIENT_TABS.CUSTOM_FIELDS)}
- title="Metadata"
- />
-
-
- {
-
- setTab(CLIENT_TABS.WEBHOOKS)}
- />
-
- }
-
-
- setTab(CLIENT_TABS.SITES)}
- />
-
-
- {isEnterprise && isAdmin && (
-
- setTab(CLIENT_TABS.MANAGE_ROLES)}
- />
-
- )}
-
- {isEnterprise && isAdmin && (
-
- setTab(CLIENT_TABS.AUDIT)}
- />
-
- )}
-
- {isAdmin && (
-
- setTab(CLIENT_TABS.MANAGE_USERS)}
- />
-
- )}
-
-
- setTab(CLIENT_TABS.NOTIFICATIONS)}
- />
-
+ return (
+
+
+
+
+ setTab(CLIENT_TABS.PROFILE)}
+ />
+
+
+
+ setTab(CLIENT_TABS.INTEGRATIONS)}
+ />
+
+
+
+ setTab(CLIENT_TABS.CUSTOM_FIELDS)}
+ title="Metadata"
+ />
+
+
+ {
+
+ setTab(CLIENT_TABS.WEBHOOKS)}
+ />
+
+ }
+
+
+ setTab(CLIENT_TABS.SITES)}
+ />
+
+
+ {isEnterprise && isAdmin && (
+
+
setTab(CLIENT_TABS.MANAGE_ROLES)}
+ leading={ }
+ />
+
+ )}
+
+ {isEnterprise && isAdmin && (
+
+
setTab(CLIENT_TABS.AUDIT)}
+ leading={ }
+ />
+
+ )}
+
+ {isAdmin && (
+
+
setTab(CLIENT_TABS.MANAGE_USERS)}
+ leading={ }
+ />
+
+ )}
+
+
+ setTab(CLIENT_TABS.NOTIFICATIONS)}
+ />
+
+
+ );
}
export default connect((state) => ({
- isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
- account: state.getIn(['user', 'account']),
+ isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
+ account: state.getIn(['user', 'account']),
}))(withRouter(PreferencesMenu));
+
+function AdminOnlyBadge() {
+ return (
+
+ Admin Only
+
+ );
+}
diff --git a/frontend/app/components/Client/PreferencesMenu/preferencesMenu.module.css b/frontend/app/components/Client/PreferencesMenu/preferencesMenu.module.css
index 818008fb8..c129c5cb8 100644
--- a/frontend/app/components/Client/PreferencesMenu/preferencesMenu.module.css
+++ b/frontend/app/components/Client/PreferencesMenu/preferencesMenu.module.css
@@ -1,7 +1,7 @@
.wrapper {
position: fixed;
top: 81px;
- width: 200px;
+ width: 210px;
}
.header {
diff --git a/frontend/app/components/Client/Roles/Roles.tsx b/frontend/app/components/Client/Roles/Roles.tsx
index 1c2939928..75469d3cf 100644
--- a/frontend/app/components/Client/Roles/Roles.tsx
+++ b/frontend/app/components/Client/Roles/Roles.tsx
@@ -38,7 +38,7 @@ function Roles(props: Props) {
useEffect(() => {
if (removeErrors && removeErrors.size > 0) {
- removeErrors.forEach((e) => {
+ removeErrors.forEach((e: any) => {
toast.error(e);
});
}
@@ -47,21 +47,20 @@ function Roles(props: Props) {
};
}, [removeErrors]);
- const closeModal = (showToastMessage) => {
- if (showToastMessage) {
- toast.success(showToastMessage);
- props.fetchList();
- }
- setShowmModal(false);
- setTimeout(() => {
- init();
- }, 100);
- };
+ // const closeModal = (showToastMessage: boolean) => {
+ // if (showToastMessage) {
+ // toast.success(showToastMessage);
+ // props.fetchList();
+ // }
+ // // setShowmModal(false);
+ // setTimeout(() => {
+ // init();
+ // }, 100);
+ // };
const editHandler = (role: any) => {
init(role);
showModal(
, { right: true });
- // setShowmModal(true);
};
const deleteHandler = async (role: any) => {
@@ -71,7 +70,7 @@ function Roles(props: Props) {
confirmation: `Are you sure you want to remove this role?`,
})
) {
- deleteRole(role.roleId);
+ deleteRole(role.roleId).then(hideModal);
}
};
@@ -83,7 +82,7 @@ function Roles(props: Props) {
Roles and Access
- setShowmModal(true)}>Add
+ editHandler({})}>Add
@@ -123,7 +122,7 @@ function Roles(props: Props) {
export default connect(
(state: any) => {
const permissions = state.getIn(['roles', 'permissions']);
- const permissionsMap = {};
+ const permissionsMap: any = {};
permissions.forEach((p: any) => {
permissionsMap[p.value] = p.text;
});
diff --git a/frontend/app/components/Client/Sites/NewSiteForm.js b/frontend/app/components/Client/Sites/NewSiteForm.js
index 0a9dc81c7..75776e1f1 100644
--- a/frontend/app/components/Client/Sites/NewSiteForm.js
+++ b/frontend/app/components/Client/Sites/NewSiteForm.js
@@ -7,6 +7,9 @@ import { setSiteId } from 'Duck/site';
import { withRouter } from 'react-router-dom';
import styles from './siteForm.module.css';
import { confirm } from 'UI';
+import { clearSearch } from 'Duck/search';
+import { clearSearch as clearSearchLive } from 'Duck/liveSearch';
+import { withStore } from 'App/mstore';
@connect(
(state) => ({
@@ -23,13 +26,17 @@ import { confirm } from 'UI';
pushNewSite,
fetchList,
setSiteId,
+ clearSearch,
+ clearSearchLive,
}
)
@withRouter
+@withStore
export default class NewSiteForm extends React.PureComponent {
state = {
existsError: false,
};
+
componentDidMount() {
const {
@@ -60,16 +67,10 @@ export default class NewSiteForm extends React.PureComponent {
});
} else {
this.props.save(this.props.site).then(() => {
- this.props.fetchList().then(() => {
- const { sites } = this.props;
- const site = sites.last();
- if (!pathname.includes('/client')) {
- this.props.setSiteId(site.get('id'));
- }
- this.props.onClose(null, site);
- });
-
- // this.props.pushNewSite(site)
+ this.props.onClose(null);
+ this.props.clearSearch();
+ this.props.clearSearchLive();
+ this.props.mstore.initClient();
});
}
};
diff --git a/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx b/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx
index 1611b6cf2..f5cb0a744 100644
--- a/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx
+++ b/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx
@@ -27,11 +27,15 @@ function UserListItem(props: Props) {
{user.name}
- {isEnterprise &&
}
+ {/* {isEnterprise &&
} */}
{!isEnterprise &&
}
- {isEnterprise &&
{user.roleName} }
+ {isEnterprise && (
+ <>
+
{user.roleName}
+ { user.isSuperAdmin || user.isAdmin && <>
> }
+ >)}
{!isOnboarding && (
diff --git a/frontend/app/components/Client/client.module.css b/frontend/app/components/Client/client.module.css
index 43d311b31..815b7e8d6 100644
--- a/frontend/app/components/Client/client.module.css
+++ b/frontend/app/components/Client/client.module.css
@@ -10,7 +10,7 @@
/* min-height: calc(100vh - 81px); */
& .tabMenu {
- width: 240px;
+ width: 250px;
margin: 0;
background-color: $gray-lightest;
}
diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx
index 47c88c0aa..9fc69d018 100644
--- a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx
+++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx
@@ -7,6 +7,7 @@ import MethodType from './MethodType';
import cn from 'classnames';
import stl from './callWithErrors.module.css';
import { NO_METRIC_DATA } from 'App/constants/messages'
+import { List } from 'immutable';
const cols = [
{
@@ -51,10 +52,10 @@ interface Props {
function CallWithErrors(props: Props) {
const { data, metric } = props;
const [search, setSearch] = React.useState('')
- const test = (value = '', serach) => getRE(serach, 'i').test(value);
- const _data = search ? metric.data.chart.filter(i => test(i.urlHostpath, search)) : metric.data.chart.images;
+ const test = (value = '', serach: any) => getRE(serach, 'i').test(value);
+ const _data = search ? metric.data.chart.filter((i: any) => test(i.urlHostpath, search)) : metric.data.chart;
- const write = ({ target: { name, value } }) => {
+ const write = ({ target: { name, value } }: any) => {
setSearch(value)
};
@@ -71,8 +72,9 @@ function CallWithErrors(props: Props) {
diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx
index aef9bbec0..6a6807004 100644
--- a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx
+++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx
@@ -47,6 +47,8 @@ function MissingResources(props: Props) {
if (!isTemplate) {
cols.push(copyPathCol);
}
+
+ console.log('metric.data.chart', metric.data.chart);
return (
-
+
{ data.endedAt && data.startedAt && `${ diffFromNowString(data.endedAt) } ago - ${ diffFromNowString(data.startedAt) } old` }
diff --git a/frontend/app/components/Dashboard/Widgets/common/table.module.css b/frontend/app/components/Dashboard/Widgets/common/table.module.css
index d49c52040..9c69c1ce5 100644
--- a/frontend/app/components/Dashboard/Widgets/common/table.module.css
+++ b/frontend/app/components/Dashboard/Widgets/common/table.module.css
@@ -23,7 +23,7 @@
.row {
display: flex;
align-items: center;
- min-height: 54px;
+ min-height: 50px;
font-size: 13px;
& .cell {
diff --git a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx
index 470a43cb0..b870435b7 100644
--- a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx
+++ b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx
@@ -18,6 +18,7 @@ import SelectDateRange from 'Shared/SelectDateRange';
import { Tooltip } from 'react-tippy';
import Breadcrumb from 'Shared/Breadcrumb';
import AddMetricContainer from '../DashboardWidgetGrid/AddMetricContainer';
+import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
interface IProps {
siteId: string;
@@ -32,6 +33,7 @@ function DashboardView(props: Props) {
const { dashboardStore } = useStore();
const { showModal } = useModal();
+ const [showTooltip, setShowTooltip] = React.useState(false);
const [focusTitle, setFocusedInput] = React.useState(true);
const [showEditModal, setShowEditModal] = React.useState(false);
@@ -125,18 +127,24 @@ function DashboardView(props: Props) {
className="mr-3 select-none border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
actionButton={
/* @ts-ignore */
- }
+ html={
+
+
setShowTooltip(false)}>
+ setShowTooltip(false)} isPopup siteId={siteId} />
+
+
+ }
>
-
+ setShowTooltip(true)}>
Add Metric
diff --git a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/AddMetricContainer.tsx b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/AddMetricContainer.tsx
index b33cccf76..b1d84f54e 100644
--- a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/AddMetricContainer.tsx
+++ b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/AddMetricContainer.tsx
@@ -8,7 +8,7 @@ import AddPredefinedMetric from './AddPredefinedMetric';
import cn from 'classnames';
interface AddMetricButtonProps {
- iconName: string;
+ iconName: "bar-pencil" | "grid-check";
title: string;
description: string;
isPremade?: boolean;
@@ -47,11 +47,18 @@ function AddMetricButton({ iconName, title, description, onClick, isPremade, isP
);
}
-function AddMetricContainer({ siteId, isPopup }: any) {
+interface Props {
+ siteId: string
+ isPopup?: boolean
+ onAction?: () => void
+}
+
+function AddMetricContainer({ siteId, isPopup, onAction }: Props) {
const { showModal } = useModal();
const { dashboardStore } = useStore();
const onAddCustomMetrics = () => {
+ onAction?.()
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
showModal(
{
+ onAction?.()
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
showModal(
-
{title}
+
{title}
{description}
-
- + Create Custom Metric
-
+
+
+ + Create Custom Metric
+
+
Past 7 Days
+
-
-
-
- {activeCategory &&
- categories.map((category) => (
-
-
-
- ))}
+
+
+
+
+ {activeCategory &&
+ categories.map((category) => (
+
+
+
+ ))}
+
-
-
+
))}
-
-
+
+
diff --git a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx
index 5807e0c3d..42f7c6fd7 100644
--- a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx
+++ b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx
@@ -1,9 +1,11 @@
import React from 'react';
+import { toJS } from 'mobx'
import { useStore } from 'App/mstore';
import WidgetWrapper from '../WidgetWrapper';
-import { NoContent, Loader } from 'UI';
+import { NoContent, Loader, Icon } from 'UI';
import { useObserver } from 'mobx-react-lite';
import AddMetricContainer from './AddMetricContainer'
+import Widget from 'App/mstore/types/widget';
interface Props {
siteId: string,
@@ -15,8 +17,20 @@ function DashboardWidgetGrid(props: Props) {
const { dashboardId, siteId } = props;
const { dashboardStore } = useStore();
const loading = useObserver(() => dashboardStore.isLoading);
- const dashboard: any = dashboardStore.selectedDashboard;
- const list: any = useObserver(() => dashboard?.widgets);
+ const dashboard = dashboardStore.selectedDashboard;
+ const list = useObserver(() => dashboard?.widgets);
+ const smallWidgets: Widget[] = []
+ const regularWidgets: Widget[] = []
+
+ list.forEach(item => {
+ if (item.config.col === 1) {
+ smallWidgets.push(item)
+ } else {
+ regularWidgets.push(item)
+ }
+ })
+
+ const smallWidgetsLen = smallWidgets.length
return useObserver(() => (
// @ts-ignore
@@ -29,17 +43,49 @@ function DashboardWidgetGrid(props: Props) {
}
>
+ {smallWidgets.length > 0 ? (
+ <>
+
+
+ Web Vitals
+
+
+ {smallWidgets && smallWidgets.map((item: any, index: any) => (
+
+ dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
+ dashboardId={dashboardId}
+ siteId={siteId}
+ isWidget={true}
+ grid="vitals"
+ />
+
+ ))}
+
+ >
+ ) : null}
+
+ {smallWidgets.length > 0 && regularWidgets.length > 0 ? (
+
+
+ All Metrics
+
+ ) : null}
- {list && list.map((item: any, index: any) => (
-
dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
- dashboardId={dashboardId}
- siteId={siteId}
- isWidget={true}
- />
+ {regularWidgets && regularWidgets.map((item: any, index: any) => (
+
+ dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
+ dashboardId={dashboardId}
+ siteId={siteId}
+ isWidget={true}
+ grid="other"
+ />
+
))}
diff --git a/frontend/app/components/Dashboard/components/Errors/ErrorDetailsModal/ErrorDetailsModal.tsx b/frontend/app/components/Dashboard/components/Errors/ErrorDetailsModal/ErrorDetailsModal.tsx
index 214dfd168..38f86af66 100644
--- a/frontend/app/components/Dashboard/components/Errors/ErrorDetailsModal/ErrorDetailsModal.tsx
+++ b/frontend/app/components/Dashboard/components/Errors/ErrorDetailsModal/ErrorDetailsModal.tsx
@@ -10,7 +10,7 @@ function ErrorDetailsModal(props: Props) {
style={{ width: '85vw', maxWidth: '1200px' }}
className="bg-white h-screen p-4 overflow-y-auto"
>
-
+
);
}
diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx
index 6354af350..1de86785c 100644
--- a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx
+++ b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx
@@ -26,10 +26,11 @@ interface Props {
onClick?: () => void;
isWidget?: boolean;
hideName?: boolean;
+ grid?: string;
}
function WidgetWrapper(props: Props & RouteComponentProps) {
const { dashboardStore } = useStore();
- const { isWidget = false, active = false, index = 0, moveListItem = null, isPreview = false, isTemplate = false, dashboardId, siteId } = props;
+ const { isWidget = false, active = false, index = 0, moveListItem = null, isPreview = false, isTemplate = false, siteId, grid = "" } = props;
const widget: any = props.widget;
const isTimeSeries = widget.metricType === 'timeseries';
const isPredefined = widget.metricType === 'predefined';
@@ -37,7 +38,7 @@ function WidgetWrapper(props: Props & RouteComponentProps) {
const [{ isDragging }, dragRef] = useDrag({
type: 'item',
- item: { index },
+ item: { index, grid },
collect: (monitor) => ({
isDragging: monitor.isDragging(),
opacity: monitor.isDragging() ? 0.5 : 1,
@@ -47,9 +48,12 @@ function WidgetWrapper(props: Props & RouteComponentProps) {
const [{ isOver, canDrop }, dropRef] = useDrop({
accept: 'item',
drop: (item: any) => {
- if (item.index === index) return;
+ if (item.index === index || (item.grid !== grid)) return;
moveListItem(item.index, index);
},
+ canDrop(item) {
+ return item.grid === grid
+ },
collect: (monitor: any) => ({
isOver: monitor.isOver(),
canDrop: monitor.canDrop(),
diff --git a/frontend/app/components/Errors/Error/ErrorInfo.js b/frontend/app/components/Errors/Error/ErrorInfo.js
index 8407826de..4b941d2de 100644
--- a/frontend/app/components/Errors/Error/ErrorInfo.js
+++ b/frontend/app/components/Errors/Error/ErrorInfo.js
@@ -11,6 +11,7 @@ import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
@connect(
(state) => ({
errorIdInStore: state.getIn(['errors', 'instance']).errorId,
+ list: state.getIn(['errors', 'instanceTrace']),
loading: state.getIn(['errors', 'fetch', 'loading']) || state.getIn(['errors', 'fetchTrace', 'loading']),
errorOnFetch: state.getIn(['errors', 'fetch', 'errors']) || state.getIn(['errors', 'fetchTrace', 'errors']),
}),
diff --git a/frontend/app/components/Errors/Error/MainSection.js b/frontend/app/components/Errors/Error/MainSection.js
index 534f417f8..35b1fef7e 100644
--- a/frontend/app/components/Errors/Error/MainSection.js
+++ b/frontend/app/components/Errors/Error/MainSection.js
@@ -63,6 +63,7 @@ export default class MainSection extends React.PureComponent {
render() {
const { error, trace, sourcemapUploaded, ignoreLoading, resolveToggleLoading, toggleFavoriteLoading, className, traceLoading } = this.props;
+ const isPlayer = window.location.pathname.includes('/session/')
return (
@@ -143,15 +144,17 @@ export default class MainSection extends React.PureComponent {
/>
*/}
-
-
Last session with this error
- {resentOrDate(error.lastOccurrence)}
-
-
- Find all sessions with this error
-
-
-
+ {!isPlayer && (
+
+
Last session with this error
+ {resentOrDate(error.lastOccurrence)}
+
+
+ Find all sessions with this error
+
+
+
+ )}
diff --git a/frontend/app/components/Session/Layout/Player/Timeline.js b/frontend/app/components/Session/Layout/Player/Timeline.js
index 6332e4662..6aecca736 100644
--- a/frontend/app/components/Session/Layout/Player/Timeline.js
+++ b/frontend/app/components/Session/Layout/Player/Timeline.js
@@ -8,52 +8,48 @@ import PlayerTime from './PlayerTime';
import cls from './timeline.module.css';
export default function Timeline({ player }) {
-
- const seekProgress = useCallback((e) => {
- if (player.controlsDisabled) {
- return;
- }
+ const seekProgress = useCallback((e) => {
+ if (player.controlsDisabled) {
+ return;
+ }
const p = e.nativeEvent.offsetX / e.target.offsetWidth;
const time = Math.max(Math.round(p * player.state.endTime), 0);
player.jump(time);
});
const scale = 100 / player.state.endTime;
- return (
-
-
-
-
-
- { player.lists[EVENTS].list.map(e => (
-
- ))}
- { player.lists[CRASHES].list.map(e => (
+
+ return (
+
+
+
+
+
+ {player.lists[EVENTS].list.map((e) => (
+
+ ))}
+ {player.lists[CRASHES].list.map((e) => (
- { `Crash ${e.name}:` }
-
- { e.reason }
-
+ content={
+
+ {`Crash ${e.name}:`}
+
+ {e.reason}
+
}
>
- ))}
-
-
+ ))}
+
+
- );
-}
\ No newline at end of file
+ );
+}
diff --git a/frontend/app/components/Session_/Autoscroll.tsx b/frontend/app/components/Session_/Autoscroll.tsx
index 305b12dad..051f2024f 100644
--- a/frontend/app/components/Session_/Autoscroll.tsx
+++ b/frontend/app/components/Session_/Autoscroll.tsx
@@ -113,7 +113,7 @@ export default class Autoscroll extends React.PureComponent
-
this.setState({ autoScroll: !this.state.autoScroll })} /> Autoscroll
+ {/*
this.setState({ autoScroll: !this.state.autoScroll })} /> Autoscroll */}
{navigation && (
<>
diff --git a/frontend/app/components/Session_/Exceptions/Exceptions.js b/frontend/app/components/Session_/Exceptions/Exceptions.js
index 16371d110..b6c65b5ba 100644
--- a/frontend/app/components/Session_/Exceptions/Exceptions.js
+++ b/frontend/app/components/Session_/Exceptions/Exceptions.js
@@ -140,7 +140,7 @@ export default class Exceptions extends React.PureComponent {
error={e}
key={e.key}
selected={lastIndex === index}
- inactive={index > lastIndex}
+ // inactive={index > lastIndex}
onErrorClick={(jsEvent) => {
jsEvent.stopPropagation();
jsEvent.preventDefault();
diff --git a/frontend/app/components/Session_/Issues/IssueForm.js b/frontend/app/components/Session_/Issues/IssueForm.js
index bedbb2860..b263739f3 100644
--- a/frontend/app/components/Session_/Issues/IssueForm.js
+++ b/frontend/app/components/Session_/Issues/IssueForm.js
@@ -3,17 +3,17 @@ import { connect } from 'react-redux';
import { Form, Input, Button, CircularLoader } from 'UI';
//import { } from 'Duck/issues';
import { addActivity, init, edit, fetchAssignments, fetchMeta } from 'Duck/assignments';
-import Select from 'Shared/Select'
+import Select from 'Shared/Select';
const SelectedValue = ({ icon, text }) => {
- return(
+ return (
{/*
*/}
- { icon }
-
{ text }
+ {icon}
+
{text}
- )
-}
+ );
+};
class IssueForm extends React.PureComponent {
componentDidMount() {
@@ -21,7 +21,7 @@ class IssueForm extends React.PureComponent {
this.props.init({
projectId: projects[0] ? projects[0].id : '',
- issueType: issueTypes[0] ? issueTypes[0].id : ''
+ issueType: issueTypes[0] ? issueTypes[0].id : '',
});
}
@@ -41,43 +41,46 @@ class IssueForm extends React.PureComponent {
addActivity(sessionId, instance.toJS()).then(() => {
const { errors } = this.props;
if (!errors || errors.length === 0) {
- this.props.init({projectId: instance.projectId});
+ this.props.init({ projectId: instance.projectId });
this.props.fetchAssignments(sessionId);
this.props.closeHandler();
}
});
- }
+ };
write = (e) => {
- const { target: { name, value } } = e;
- this.props.edit({ [ name ]: value })
+ const {
+ target: { name, value },
+ } = e;
+ this.props.edit({ [name]: value });
};
- writeOption = ({ name, value }) => this.props.edit({ [ name ]: value });
+ writeOption = ({ name, value }) => this.props.edit({ [name]: value.value });
render() {
- const { creating, projects, users, issueTypes, instance, closeHandler, metaLoading } = this.props;
- const projectOptions = projects.map(({name, id}) => ({label: name, value: id })).toArray();
- const userOptions = users.map(({name, id}) => ({label: name, value: id })).toArray();
+ const { creating, projects, users, issueTypes, instance, closeHandler, metaLoading } =
+ this.props;
+ const projectOptions = projects.map(({ name, id }) => ({ label: name, value: id })).toArray();
+ const userOptions = users.map(({ name, id }) => ({ label: name, value: id })).toArray();
- const issueTypeOptions = issueTypes.map(({name, id, iconUrl, color }) => {
- return { label: name, value: id, iconUrl, color }
+ const issueTypeOptions = issueTypes.map(({ name, id, iconUrl, color }) => {
+ return { label: name, value: id, iconUrl, color };
});
- const selectedIssueType = issueTypes.filter(issue => issue.id == instance.issueType)[0];
+ const selectedIssueType = issueTypes.filter((issue) => issue.id == instance.issueType)[0];
return (
-
Project
-
+
@@ -87,12 +90,18 @@ class IssueForm extends React.PureComponent {
selection
name="issueType"
labeled
- options={ issueTypeOptions }
- value={ instance.issueType }
+ options={issueTypeOptions}
+ // value={ instance.issueType }
fluid
- onChange={ this.writeOption }
+ onChange={this.writeOption}
placeholder="Select issue type"
- text={ selectedIssueType ?
: '' }
+ text={
+ selectedIssueType ? (
+
+ ) : (
+ ''
+ )
+ }
/>
@@ -101,10 +110,10 @@ class IssueForm extends React.PureComponent {
@@ -113,9 +122,9 @@ class IssueForm extends React.PureComponent {
Summary
@@ -127,25 +136,22 @@ class IssueForm extends React.PureComponent {
{'Create'}
-
+
{'Cancel'}
@@ -153,12 +159,15 @@ class IssueForm extends React.PureComponent {
}
}
-export default connect(state => ({
- creating: state.getIn(['assignments', 'addActivity', 'loading']),
- projects: state.getIn(['assignments', 'projects']),
- users: state.getIn(['assignments', 'users']),
- instance: state.getIn(['assignments', 'instance']),
- metaLoading: state.getIn(['assignments', 'fetchMeta', 'loading']),
- issueTypes: state.getIn(['assignments', 'issueTypes']),
- errors: state.getIn([ 'assignments', 'addActivity', 'errors' ])
-}), { addActivity, init, edit, fetchAssignments, fetchMeta })(IssueForm)
+export default connect(
+ (state) => ({
+ creating: state.getIn(['assignments', 'addActivity', 'loading']),
+ projects: state.getIn(['assignments', 'projects']),
+ users: state.getIn(['assignments', 'users']),
+ instance: state.getIn(['assignments', 'instance']),
+ metaLoading: state.getIn(['assignments', 'fetchMeta', 'loading']),
+ issueTypes: state.getIn(['assignments', 'issueTypes']),
+ errors: state.getIn(['assignments', 'addActivity', 'errors']),
+ }),
+ { addActivity, init, edit, fetchAssignments, fetchMeta }
+)(IssueForm);
diff --git a/frontend/app/components/Session_/Issues/Issues.js b/frontend/app/components/Session_/Issues/Issues.js
index e47ea8fc9..200b5b278 100644
--- a/frontend/app/components/Session_/Issues/Issues.js
+++ b/frontend/app/components/Session_/Issues/Issues.js
@@ -5,6 +5,7 @@ import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
import IssuesModal from './IssuesModal';
import { fetchProjects, fetchMeta } from 'Duck/assignments';
import stl from './issues.module.css';
+import { Tooltip } from 'react-tippy'
@connect(state => ({
issues: state.getIn(['assignments', 'list']),
@@ -59,17 +60,17 @@ class Issues extends React.Component {
return (
+
);
diff --git a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx
index 74253774b..2f1f75f2c 100644
--- a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx
+++ b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx
@@ -141,7 +141,6 @@ export default connect(
stackEventList: state.stackList,
performanceChartData: state.performanceChartData,
endTime: state.endTime,
- // endTime: 30000000,
}))(OverviewPanel)
);
diff --git a/frontend/app/components/Session_/Performance/Performance.tsx b/frontend/app/components/Session_/Performance/Performance.tsx
index 8a9706be2..13c135f7b 100644
--- a/frontend/app/components/Session_/Performance/Performance.tsx
+++ b/frontend/app/components/Session_/Performance/Performance.tsx
@@ -147,7 +147,7 @@ const VERY_LOW_FPS = 20;
const LOW_FPS_MARKER_VALUE = 5;
const HIDDEN_SCREEN_MARKER_VALUE = 20;
function addFpsMetadata(data) {
- return data.map((point, i) => {
+ return [...data].map((point, i) => {
let fpsVeryLowMarker = null;
let fpsLowMarker = null;
let hiddenScreenMarker = 0;
diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js
index 0601e092f..5109bf96b 100644
--- a/frontend/app/components/Session_/Player/Controls/Controls.js
+++ b/frontend/app/components/Session_/Player/Controls/Controls.js
@@ -10,7 +10,7 @@ import {
import LiveTag from 'Shared/LiveTag';
import { toggleTimetravel, jumpToLive } from 'Player';
-import { Icon, Button } from 'UI';
+import { Icon } from 'UI';
import { toggleInspectorMode } from 'Player';
import {
fullscreenOn,
@@ -48,6 +48,8 @@ function getStorageIconName(type) {
return 'vendors/vuex';
case STORAGE_TYPES.NGRX:
return 'vendors/ngrx';
+ case STORAGE_TYPES.ZUSTAND:
+ return 'vendors/zustand';
case STORAGE_TYPES.NONE:
return 'store';
}
@@ -73,6 +75,8 @@ function getStorageName(type) {
return 'VUEX';
case STORAGE_TYPES.NGRX:
return 'NGRX';
+ case STORAGE_TYPES.ZUSTAND:
+ return 'ZUSTAND';
case STORAGE_TYPES.NONE:
return 'STATE';
}
@@ -322,15 +326,13 @@ export default class Controls extends React.Component {
return (
- {!live || liveTimeTravel ? (
-
- ) : null}
+
{!fullscreen && (
@@ -366,15 +368,6 @@ export default class Controls extends React.Component {
-
- {!liveTimeTravel && (
-
- See Past Activity
-
- )}
)}
@@ -489,7 +482,6 @@ export default class Controls extends React.Component {
containerClassName="mx-2"
/>
)}
- {/* {!live &&
} */}
{!live && (
{this.controlIcon(
diff --git a/frontend/app/components/Session_/Player/Controls/TimeTracker.js b/frontend/app/components/Session_/Player/Controls/TimeTracker.js
index 55e1b1c43..b5f9e2de3 100644
--- a/frontend/app/components/Session_/Player/Controls/TimeTracker.js
+++ b/frontend/app/components/Session_/Player/Controls/TimeTracker.js
@@ -1,12 +1,12 @@
import React from 'react';
import { connectPlayer } from 'Player';
import styles from './timeTracker.module.css';
+import cn from 'classnames'
-
-const TimeTracker = ({ time, scale }) => (
+const TimeTracker = ({ time, scale, live, left }) => (
99 ? styles.liveTime : null) }
style={ { width: `${ time * scale }%` } }
/>
@@ -16,4 +16,4 @@ TimeTracker.displayName = 'TimeTracker';
export default connectPlayer(state => ({
time: state.time,
-}))(TimeTracker);
\ No newline at end of file
+}))(TimeTracker);
diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.js b/frontend/app/components/Session_/Player/Controls/Timeline.js
index c53018362..e53f567f0 100644
--- a/frontend/app/components/Session_/Player/Controls/Timeline.js
+++ b/frontend/app/components/Session_/Player/Controls/Timeline.js
@@ -1,404 +1,235 @@
import React from 'react';
import { connect } from 'react-redux';
-import cn from 'classnames';
-import { connectPlayer, Controls } from 'Player';
-import { TimelinePointer, Icon } from 'UI';
+import { connectPlayer, Controls, toggleTimetravel } from 'Player';
import TimeTracker from './TimeTracker';
import stl from './timeline.module.css';
-import { TYPES } from 'Types/session/event';
import { setTimelinePointer, setTimelineHoverTime } from 'Duck/sessions';
import DraggableCircle from './DraggableCircle';
import CustomDragLayer from './CustomDragLayer';
import { debounce } from 'App/utils';
-import { Tooltip } from 'react-tippy';
import TooltipContainer from './components/TooltipContainer';
const BOUNDRY = 0;
function getTimelinePosition(value, scale) {
- const pos = value * scale;
+ const pos = value * scale;
- return pos > 100 ? 99 : pos;
+ return pos > 100 ? 99 : pos;
}
-const getPointerIcon = (type) => {
- // exception,
- switch (type) {
- case 'fetch':
- return 'funnel/file-earmark-minus-fill';
- case 'exception':
- return 'funnel/exclamation-circle-fill';
- case 'log':
- return 'funnel/exclamation-circle-fill';
- case 'stack':
- return 'funnel/patch-exclamation-fill';
- case 'resource':
- return 'funnel/file-earmark-minus-fill';
-
- case 'dead_click':
- return 'funnel/dizzy';
- case 'click_rage':
- return 'funnel/dizzy';
- case 'excessive_scrolling':
- return 'funnel/mouse';
- case 'bad_request':
- return 'funnel/file-medical-alt';
- case 'missing_resource':
- return 'funnel/file-earmark-minus-fill';
- case 'memory':
- return 'funnel/sd-card';
- case 'cpu':
- return 'funnel/microchip';
- case 'slow_resource':
- return 'funnel/hourglass-top';
- case 'slow_page_load':
- return 'funnel/hourglass-top';
- case 'crash':
- return 'funnel/file-exclamation';
- case 'js_exception':
- return 'funnel/exclamation-circle-fill';
- }
-
- return 'info';
-};
-
let deboucneJump = () => null;
let debounceTooltipChange = () => null;
@connectPlayer((state) => ({
- playing: state.playing,
- time: state.time,
- skipIntervals: state.skipIntervals,
- events: state.eventList,
- skip: state.skip,
- // not updating properly rn
- // skipToIssue: state.skipToIssue,
- disabled: state.cssLoading || state.messagesLoading || state.markedTargets,
- endTime: state.endTime,
- live: state.live,
- logList: state.logList,
- exceptionsList: state.exceptionsList,
- resourceList: state.resourceList,
- stackList: state.stackList,
- fetchList: state.fetchList,
+ playing: state.playing,
+ time: state.time,
+ skipIntervals: state.skipIntervals,
+ events: state.eventList,
+ skip: state.skip,
+ // not updating properly rn
+ // skipToIssue: state.skipToIssue,
+ disabled: state.cssLoading || state.messagesLoading || state.markedTargets,
+ endTime: state.endTime,
+ live: state.live,
+ logList: state.logList,
+ exceptionsList: state.exceptionsList,
+ resourceList: state.resourceList,
+ stackList: state.stackList,
+ fetchList: state.fetchList,
}))
@connect(
- (state) => ({
- issues: state.getIn(['sessions', 'current', 'issues']),
- clickRageTime: state.getIn(['sessions', 'current', 'clickRage']) && state.getIn(['sessions', 'current', 'clickRageTime']),
- returningLocationTime:
- state.getIn(['sessions', 'current', 'returningLocation']) && state.getIn(['sessions', 'current', 'returningLocationTime']),
- tooltipVisible: state.getIn(['sessions', 'timeLineTooltip', 'isVisible']),
- }),
- { setTimelinePointer, setTimelineHoverTime }
+ (state) => ({
+ issues: state.getIn(['sessions', 'current', 'issues']),
+ startedAt: state.getIn(['sessions', 'current', 'startedAt']),
+ clickRageTime:
+ state.getIn(['sessions', 'current', 'clickRage']) &&
+ state.getIn(['sessions', 'current', 'clickRageTime']),
+ returningLocationTime:
+ state.getIn(['sessions', 'current', 'returningLocation']) &&
+ state.getIn(['sessions', 'current', 'returningLocationTime']),
+ tooltipVisible: state.getIn(['sessions', 'timeLineTooltip', 'isVisible']),
+ }),
+ { setTimelinePointer, setTimelineHoverTime }
)
export default class Timeline extends React.PureComponent {
- progressRef = React.createRef();
- timelineRef = React.createRef();
- wasPlaying = false;
+ progressRef = React.createRef();
+ timelineRef = React.createRef();
+ wasPlaying = false;
- seekProgress = (e) => {
- const time = this.getTime(e);
- this.props.jump(time);
- this.hideTimeTooltip();
- };
+ seekProgress = (e) => {
+ const time = this.getTime(e);
+ this.props.jump(time);
+ this.hideTimeTooltip();
+ };
- getTime = (e) => {
- const { endTime } = this.props;
- const p = e.nativeEvent.offsetX / e.target.offsetWidth;
- const time = Math.max(Math.round(p * endTime), 0);
+ loadAndSeek = async (e) => {
+ e.persist();
+ await toggleTimetravel();
- return time;
- };
+ setTimeout(() => {
+ this.seekProgress(e);
+ });
+ };
- createEventClickHandler = (pointer) => (e) => {
- e.stopPropagation();
- this.props.jump(pointer.time);
- this.props.setTimelinePointer(pointer);
- };
+ jumpToTime = (e) => {
+ if (this.props.live && !this.props.liveTimeTravel) {
+ this.loadAndSeek(e);
+ } else {
+ this.seekProgress(e);
+ }
+ };
- componentDidMount() {
- const { issues } = this.props;
- const skipToIssue = Controls.updateSkipToIssue();
- const firstIssue = issues.get(0);
- deboucneJump = debounce(this.props.jump, 500);
- debounceTooltipChange = debounce(this.props.setTimelineHoverTime, 50);
+ getTime = (e, customEndTime) => {
+ const { endTime } = this.props;
+ const p = e.nativeEvent.offsetX / e.target.offsetWidth;
+ const targetTime = customEndTime || endTime;
+ const time = Math.max(Math.round(p * targetTime), 0);
- if (firstIssue && skipToIssue) {
- this.props.jump(firstIssue.time);
- }
+ return time;
+ };
+
+ createEventClickHandler = (pointer) => (e) => {
+ e.stopPropagation();
+ this.props.jump(pointer.time);
+ this.props.setTimelinePointer(pointer);
+ };
+
+ componentDidMount() {
+ const { issues } = this.props;
+ const skipToIssue = Controls.updateSkipToIssue();
+ const firstIssue = issues.get(0);
+ deboucneJump = debounce(this.props.jump, 500);
+ debounceTooltipChange = debounce(this.props.setTimelineHoverTime, 50);
+
+ if (firstIssue && skipToIssue) {
+ this.props.jump(firstIssue.time);
+ }
+ }
+
+ onDragEnd = () => {
+ const { live, liveTimeTravel } = this.props;
+ if (live && !liveTimeTravel) return;
+
+ if (this.wasPlaying) {
+ this.props.togglePlay();
+ }
+ };
+
+ onDrag = (offset) => {
+ const { endTime, live, liveTimeTravel } = this.props;
+ if (live && !liveTimeTravel) return;
+
+ const p = (offset.x - BOUNDRY) / this.progressRef.current.offsetWidth;
+ const time = Math.max(Math.round(p * endTime), 0);
+ deboucneJump(time);
+ this.hideTimeTooltip();
+ if (this.props.playing) {
+ this.wasPlaying = true;
+ this.props.pause();
+ }
+ };
+
+ getLiveTime = (e) => {
+ const { startedAt } = this.props;
+ const duration = new Date().getTime() - startedAt;
+ const p = e.nativeEvent.offsetX / e.target.offsetWidth;
+ const time = Math.max(Math.round(p * duration), 0);
+
+ return [time, duration];
+ };
+
+ showTimeTooltip = (e) => {
+ if (e.target !== this.progressRef.current && e.target !== this.timelineRef.current) {
+ return this.props.tooltipVisible && this.hideTimeTooltip();
}
- onDragEnd = () => {
- if (this.wasPlaying) {
- this.props.togglePlay();
- }
- };
+ const { live } = this.props;
+ let timeLineTooltip;
- onDrag = (offset) => {
- const { endTime } = this.props;
-
- const p = (offset.x - BOUNDRY) / this.progressRef.current.offsetWidth;
- const time = Math.max(Math.round(p * endTime), 0);
- deboucneJump(time);
- this.hideTimeTooltip();
- if (this.props.playing) {
- this.wasPlaying = true;
- this.props.pause();
- }
- };
-
- showTimeTooltip = (e) => {
- if (e.target !== this.progressRef.current && e.target !== this.timelineRef.current) {
- return this.props.tooltipVisible && this.hideTimeTooltip();
- }
- const time = this.getTime(e);
- const { endTime, liveTimeTravel } = this.props;
-
- const timeLineTooltip = {
- time: liveTimeTravel ? endTime - time : time,
- offset: e.nativeEvent.offsetX,
- isVisible: true,
- };
- debounceTooltipChange(timeLineTooltip);
- };
-
- hideTimeTooltip = () => {
- const timeLineTooltip = { isVisible: false };
- debounceTooltipChange(timeLineTooltip);
- };
-
- render() {
- const {
- events,
- skip,
- skipIntervals,
- disabled,
- endTime,
- exceptionsList,
- resourceList,
- clickRageTime,
- stackList,
- fetchList,
- issues,
- liveTimeTravel,
- } = this.props;
-
- const scale = 100 / endTime;
-
- return (
-
-
-
- {/* custo color is live */}
-
-
-
-
- {skip &&
- skipIntervals.map((interval) => (
-
- ))}
-
-
- {events.map((e) => (
-
- ))}
- {/* {issues.map((iss) => (
-
-
- {iss.name}
-
- }
- >
-
-
-
- ))}
- {events
- .filter((e) => e.type === TYPES.CLICKRAGE)
- .map((e) => (
-
-
- {'Click Rage'}
-
- }
- >
-
-
-
- ))}
- {typeof clickRageTime === 'number' && (
-
-
- {'Click Rage'}
-
- }
- >
-
-
-
- )}
- {exceptionsList.map((e) => (
-
-
- {'Exception'}
-
- {e.message}
-
- }
- >
-
-
-
- ))}
- {resourceList
- .filter((r) => r.isRed() || r.isYellow())
- .map((r) => (
-
-
- {r.success ? 'Slow resource: ' : 'Missing resource:'}
-
- {r.name}
-
- }
- >
-
-
-
- ))}
- {fetchList
- .filter((e) => e.isRed())
- .map((e) => (
-
-
- Failed Fetch
-
- {e.name}
-
- }
- >
-
-
-
- ))}
- {stackList
- .filter((e) => e.isRed())
- .map((e) => (
-
-
- Stack Event
-
- {e.name}
-
- }
- >
-
-
-
- ))} */}
-
-
- );
+ if (live) {
+ const [time, duration] = this.getLiveTime(e);
+ timeLineTooltip = {
+ time: duration - time,
+ offset: e.nativeEvent.offsetX,
+ isVisible: true,
+ };
+ } else {
+ const time = this.getTime(e);
+ timeLineTooltip = {
+ time: time,
+ offset: e.nativeEvent.offsetX,
+ isVisible: true,
+ };
}
+
+ debounceTooltipChange(timeLineTooltip);
+ };
+
+ hideTimeTooltip = () => {
+ const timeLineTooltip = { isVisible: false };
+ debounceTooltipChange(timeLineTooltip);
+ };
+
+ render() {
+ const { events, skip, skipIntervals, disabled, endTime, live } = this.props;
+
+ const scale = 100 / endTime;
+
+ return (
+
+
+
+ {/* custo color is live */}
+
+
+
+
+ {!live && skip ?
+ skipIntervals.map((interval) => (
+
+ )) : null}
+
+
+ {events.map((e) => (
+
+ ))}
+
+
+ );
+ }
}
diff --git a/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx b/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx
index 2c90fcc1d..19396fbc6 100644
--- a/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx
+++ b/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx
@@ -3,11 +3,11 @@ import TimeTooltip from '../TimeTooltip';
import store from 'App/store';
import { Provider } from 'react-redux';
-function TooltipContainer({ liveTimeTravel }: { liveTimeTravel: boolean }) {
+function TooltipContainer({ live }: { live: boolean }) {
return (
-
+
)
}
diff --git a/frontend/app/components/Session_/Player/Controls/timeTracker.module.css b/frontend/app/components/Session_/Player/Controls/timeTracker.module.css
index 86dd5bd16..8fdd7c6f6 100644
--- a/frontend/app/components/Session_/Player/Controls/timeTracker.module.css
+++ b/frontend/app/components/Session_/Player/Controls/timeTracker.module.css
@@ -21,3 +21,8 @@
height: 10px;
z-index: 1;
}
+
+
+.liveTime {
+ background-color: rgba(66, 174, 94, 0.3)!important;
+}
diff --git a/frontend/app/components/Session_/Player/Overlay.tsx b/frontend/app/components/Session_/Player/Overlay.tsx
index 812eb5d88..14a2f2a17 100644
--- a/frontend/app/components/Session_/Player/Overlay.tsx
+++ b/frontend/app/components/Session_/Player/Overlay.tsx
@@ -1,14 +1,15 @@
-import React, {useEffect} from 'react';
-import { connectPlayer, markTargets } from 'Player';
+import React from 'react';
+import { connectPlayer } from 'Player';
import { getStatusText } from 'Player/MessageDistributor/managers/AssistManager';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
-import { ConnectionStatus } from 'Player/MessageDistributor/managers/AssistManager';
+import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager';
import AutoplayTimer from './Overlay/AutoplayTimer';
import PlayIconLayer from './Overlay/PlayIconLayer';
import LiveStatusText from './Overlay/LiveStatusText';
import Loader from './Overlay/Loader';
import ElementsMarker from './Overlay/ElementsMarker';
+import RequestingWindow, { WindowType } from 'App/components/Assist/RequestingWindow';
interface Props {
playing: boolean,
@@ -21,6 +22,8 @@ interface Props {
autoplay: boolean,
markedTargets: MarkedTarget[] | null,
activeTargetIndex: number,
+ calling: CallingState,
+ remoteControl: RemoteControlStatus
nextId: string,
togglePlay: () => void,
@@ -43,13 +46,19 @@ function Overlay({
togglePlay,
closedLive,
livePlay,
+ calling,
+ remoteControl,
}: Props) {
const showAutoplayTimer = !live && completed && autoplay && nextId
const showPlayIconLayer = !live && !markedTargets && !inspectorMode && !loading && !showAutoplayTimer;
const showLiveStatusText = live && livePlay && liveStatusText && !loading;
+ const showRequestWindow = live && (calling === CallingState.Connecting || remoteControl === RemoteControlStatus.Requesting)
+ const requestWindowType = calling === CallingState.Connecting ? WindowType.Call : remoteControl === RemoteControlStatus.Requesting ? WindowType.Control : null
+
return (
<>
+ {showRequestWindow ? : null}
{ showAutoplayTimer && }
{ showLiveStatusText &&
@@ -77,4 +86,6 @@ export default connectPlayer(state => ({
markedTargets: state.markedTargets,
activeTargetIndex: state.activeTargetIndex,
livePlay: state.livePlay,
+ calling: state.calling,
+ remoteControl: state.remoteControl,
}))(Overlay);
diff --git a/frontend/app/components/Session_/StackEvents/UserEvent/JsonViewer.js b/frontend/app/components/Session_/StackEvents/UserEvent/JsonViewer.js
index bb22ebb37..1a46046b1 100644
--- a/frontend/app/components/Session_/StackEvents/UserEvent/JsonViewer.js
+++ b/frontend/app/components/Session_/StackEvents/UserEvent/JsonViewer.js
@@ -5,15 +5,24 @@ export default class JsonViewer extends React.PureComponent {
render() {
const { data, title, icon } = this.props;
const isObjectData = typeof data === 'object' && !Array.isArray(data) && data !== null;
+ // TODO this has to be fixed in the data @Mehdi
+ if (Array.isArray(data) && data.length === 1) {
+ data[0] = '';
+ }
return (
{title}
- {isObjectData ? (
-
- ) : (
+ {isObjectData &&
}
+ {!isObjectData && Array.isArray(data) && (
+
+ )}
+ {typeof data === 'string' && (
<>
Payload:
{data}
diff --git a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js
index c0cda02a3..5890c5e1a 100644
--- a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js
+++ b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js
@@ -41,7 +41,7 @@ export default class UserEvent extends React.PureComponent {
// onClick={ this.props.switchOpen } //
onClick={this.props.onJump} //
className={cn('group flex py-2 px-4 ', stl.userEvent, this.getLevelClassname(), {
- [stl.inactive]: inactive,
+ // [stl.inactive]: inactive,
[stl.selected]: selected,
})}
>
diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js
index b1cf53dfc..9bd6c0174 100644
--- a/frontend/app/components/Session_/Storage/Storage.js
+++ b/frontend/app/components/Session_/Storage/Storage.js
@@ -121,6 +121,9 @@ export default class Storage extends React.PureComponent {
const { type, listNow, list } = this.props;
let src;
let name;
+
+ // ZUSTAND TODO
+ console.log(item, type)
switch(type) {
case STORAGE_TYPES.REDUX:
case STORAGE_TYPES.NGRX:
@@ -135,16 +138,23 @@ export default class Storage extends React.PureComponent {
src = item.payload;
name = `@${item.type} ${src && src.type}`;
break;
+ case STORAGE_TYPES.ZUSTAND:
+ src = null;
+ name = item.mutation.join('')
}
return (
-
+ {src === null ? (
+
{name}
+ ) : (
+
+ )}
{ i + 1 < listNow.length &&
Redux{', '}
VueX {', '}
+ {/* ZUSTAND TODO */}
MobX {' and '}
NgRx .
diff --git a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.js b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.js
index 47d7d487d..6f3ad549e 100644
--- a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.js
+++ b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.js
@@ -137,19 +137,26 @@ export default class FetchDetailsModal extends React.PureComponent {
} = this.props;
const { activeTab, tabs } = this.state;
+ const _duration = parseInt(duration)
+ console.log('_duration', _duration);
+
return (
{'URL'}
{url}
-
-
-
-
Duration
-
{parseInt(duration)} ms
-
+
+ {method && (
+
+ )}
+ {!!_duration && (
+
+
Duration
+
{_duration } ms
+
+ )}
diff --git a/frontend/app/components/shared/SessionItem/Counter.tsx b/frontend/app/components/shared/SessionItem/Counter.tsx
index fd923e6c0..ffde966cb 100644
--- a/frontend/app/components/shared/SessionItem/Counter.tsx
+++ b/frontend/app/components/shared/SessionItem/Counter.tsx
@@ -10,6 +10,7 @@ interface Props {
function Counter({ startTime, className }: Props) {
let intervalId: NodeJS.Timer;
const [duration, setDuration] = useState(convertTimestampToUtcTimestamp(new Date().getTime()) - convertTimestampToUtcTimestamp(startTime));
+
const formattedDuration = durationFormatted(Duration.fromMillis(duration));
useEffect(() => {
diff --git a/frontend/app/components/shared/SessionSettings/components/CaptureRate.tsx b/frontend/app/components/shared/SessionSettings/components/CaptureRate.tsx
index ef9568a4e..b4b847950 100644
--- a/frontend/app/components/shared/SessionSettings/components/CaptureRate.tsx
+++ b/frontend/app/components/shared/SessionSettings/components/CaptureRate.tsx
@@ -41,7 +41,7 @@ function CaptureRate({ isAdmin = false }) {
return (
- Recordings
+ Capture Rate
The percentage of session you want to capture
diff --git a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js
index 5a7a1848f..496d7aec8 100644
--- a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js
+++ b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js
@@ -156,7 +156,7 @@ const ProjectCodeSnippet = props => {
export default connect(state => ({
// siteId: state.getIn([ 'site', 'siteId' ]),
- site: state.getIn([ 'site', 'instance' ]),
+ // site: state.getIn([ 'site', 'instance' ]),
gdpr: state.getIn([ 'site', 'instance', 'gdpr' ]),
saving: state.getIn([ 'site', 'saveGDPR', 'loading' ])
}), { editGDPR, saveGDPR })(ProjectCodeSnippet)
diff --git a/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js b/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
index 586cc8742..9534d875f 100644
--- a/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
+++ b/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
@@ -25,7 +25,7 @@ class TrackingCodeModal extends React.PureComponent {
const { site } = this.props;
switch (this.state.activeTab) {
case PROJECT:
- return
;
+ return
;
case DOCUMENTATION:
return
;
}
@@ -46,32 +46,14 @@ class TrackingCodeModal extends React.PureComponent {
{this.renderActiveTab()}
- // displayed &&
- //
- //
- // { title } { subTitle && {subTitle} }
- //
- //
- //
- //
- //
- //
- //
- // { this.renderActiveTab() }
- //
- //
- //
);
}
}
export default connect(
(state) => ({
- site: state.getIn(['site', 'instance']),
- gdpr: state.getIn(['site', 'instance', 'gdpr']),
+ // site: state.getIn(['site', 'instance']),
+ // gdpr: state.getIn(['site', 'instance', 'gdpr']),
saving: state.getIn(['site', 'saveGDPR', 'loading']),
}),
{
diff --git a/frontend/app/components/ui/ErrorDetails/ErrorDetails.tsx b/frontend/app/components/ui/ErrorDetails/ErrorDetails.tsx
index 8f8ba65b4..4acd355c2 100644
--- a/frontend/app/components/ui/ErrorDetails/ErrorDetails.tsx
+++ b/frontend/app/components/ui/ErrorDetails/ErrorDetails.tsx
@@ -18,6 +18,7 @@ function ErrorDetails(props: Props) {
const { error, sessionId, message = '', errorStack = [], sourcemapUploaded = false } = props;
const [showRaw, setShowRaw] = useState(false);
const firstFunc = errorStack.first() && errorStack.first().function;
+
const openDocs = () => {
window.open(docLink, '_blank');
@@ -77,7 +78,8 @@ function ErrorDetails(props: Props) {
ErrorDetails.displayName = 'ErrorDetails';
export default connect(
(state: any) => ({
- errorStack: state.getIn(['sessions', 'errorStack']),
+ // errorStack: state.getIn(['sessions', 'errorStack']),
+ errorStack: state.getIn(['errors', 'instanceTrace']),
sessionId: state.getIn(['sessions', 'current', 'sessionId']),
}),
{ fetchErrorStackList }
diff --git a/frontend/app/components/ui/ErrorItem/ErrorItem.js b/frontend/app/components/ui/ErrorItem/ErrorItem.js
index c74dfbc36..db6a7e6e8 100644
--- a/frontend/app/components/ui/ErrorItem/ErrorItem.js
+++ b/frontend/app/components/ui/ErrorItem/ErrorItem.js
@@ -3,8 +3,15 @@ import cn from 'classnames';
import { IconButton } from 'UI';
import stl from './errorItem.module.css';
import { Duration } from 'luxon';
+import { useModal } from 'App/components/Modal';
+import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal';
-function ErrorItem({ error = {}, onErrorClick, onJump, inactive, selected }) {
+function ErrorItem({ error = {}, onJump, inactive, selected }) {
+ const { showModal } = useModal();
+
+ const onErrorClick = () => {
+ showModal(
, { right: true });
+ }
return (
{
case 'calendar-check': return
;
case 'calendar-day': return
;
case 'calendar': return
;
+ case 'call': return
;
case 'camera-alt': return
;
case 'camera-video-off': return
;
case 'camera-video': return
;
@@ -229,6 +230,7 @@ const SVG = (props: Props) => {
case 'graph-up': return
;
case 'grid-3x3': return
;
case 'grid-check': return
;
+ case 'grid-horizontal': return
;
case 'grip-horizontal': return
;
case 'hash': return
;
case 'hdd-stack': return
;
@@ -331,7 +333,7 @@ const SVG = (props: Props) => {
case 'quote-right': return
;
case 'redo-back': return
;
case 'redo': return
;
- case 'remote-control': return
;
+ case 'remote-control': return
;
case 'replay-10': return
;
case 'resources-icon': return
;
case 'safe-fill': return
;
diff --git a/frontend/app/components/ui/SideMenuitem/SideMenuitem.js b/frontend/app/components/ui/SideMenuitem/SideMenuitem.js
index c9dd32b46..7c8e8272a 100644
--- a/frontend/app/components/ui/SideMenuitem/SideMenuitem.js
+++ b/frontend/app/components/ui/SideMenuitem/SideMenuitem.js
@@ -29,7 +29,7 @@ function SideMenuitem({
className={ cn(
className,
stl.menuItem,
- "flex items-center py-2 justify-between",
+ "flex items-center py-2 justify-between shrink-0",
{ [stl.active] : active }
)}
onClick={disabled ? null : onClick}
diff --git a/frontend/app/constants/zindex.ts b/frontend/app/constants/zindex.ts
index 114871b83..807223caf 100644
--- a/frontend/app/constants/zindex.ts
+++ b/frontend/app/constants/zindex.ts
@@ -1,6 +1,7 @@
export const INDEXES = {
POPUP_GUIDE_BG: 99998,
POPUP_GUIDE_BTN: 99999,
+ PLAYER_REQUEST_WINDOW: 10,
}
export const getHighest = () => {
diff --git a/frontend/app/duck/integrations/reducer.js b/frontend/app/duck/integrations/reducer.js
index 56c531610..425b1cc50 100644
--- a/frontend/app/duck/integrations/reducer.js
+++ b/frontend/app/duck/integrations/reducer.js
@@ -27,7 +27,7 @@ export const createIntegrationReducer = (name, Config) => {
.set('list', Array.isArray(action.data) ? List(action.data).map(Config) : List([new Config(action.data)]))
.set(action.name + 'Fetched', true);
case FETCH.success:
- return state.set('instance', Config(action.data));
+ return state.set('instance', Config(action.data || {}));
case SAVE.success:
const config = Config(action.data);
return state.update('list', itemInListUpdater(config)).set('instance', config);
diff --git a/frontend/app/duck/site.js b/frontend/app/duck/site.js
index f8be1a89f..08d04b70e 100644
--- a/frontend/app/duck/site.js
+++ b/frontend/app/duck/site.js
@@ -4,7 +4,8 @@ import {
mergeReducers,
createItemInListUpdater,
success,
- array
+ array,
+ createListUpdater,
} from './funcTools/tools';
import {
createCRUDReducer,
@@ -15,6 +16,7 @@ import {
createRemove,
createUpdate,
createSave,
+ saveType,
} from './funcTools/crud';
import { createRequestReducer } from './funcTools/request';
import { Map, List, fromJS } from "immutable";
@@ -25,6 +27,7 @@ const storedSiteId = localStorage.getItem(SITE_ID_STORAGE_KEY);
const name = 'project';
const idKey = 'id';
const itemInListUpdater = createItemInListUpdater(idKey)
+const updateItemInList = createListUpdater(idKey);
const EDIT_GDPR = 'sites/EDIT_GDPR';
const SAVE_GDPR = 'sites/SAVE_GDPR';
@@ -34,6 +37,7 @@ const SET_SITE_ID = 'sites/SET_SITE_ID';
const FETCH_GDPR_SUCCESS = success(FETCH_GDPR);
const SAVE_GDPR_SUCCESS = success(SAVE_GDPR);
const FETCH_LIST_SUCCESS = success(FETCH_LIST);
+const SAVE = saveType('sites/SAVE');
const initialState = Map({
list: List(),
@@ -49,6 +53,12 @@ const reducer = (state = initialState, action = {}) => {
return state.mergeIn([ 'instance', 'gdpr' ], action.gdpr);
case FETCH_GDPR_SUCCESS:
return state.mergeIn([ 'instance', 'gdpr' ], action.data);
+ case success(SAVE):
+ console.log(action)
+ const newSite = Site(action.data);
+ return updateItemInList(state, newSite)
+ .set('siteId', newSite.get('id'))
+ .set('active', newSite);
case SAVE_GDPR_SUCCESS:
const gdpr = GDPR(action.data);
return state.setIn([ 'instance', 'gdpr' ], gdpr);
@@ -99,10 +109,17 @@ export function fetchList() {
};
}
+export function save(site) {
+ return {
+ types: array(SAVE),
+ call: client => client.post(`/projects`, site.toData()),
+ }
+}
+
// export const fetchList = createFetchList(name);
export const init = createInit(name);
export const edit = createEdit(name);
-export const save = createSave(name);
+// export const save = createSave(name);
export const update = createUpdate(name);
export const remove = createRemove(name);
diff --git a/frontend/app/mstore/dashboardStore.ts b/frontend/app/mstore/dashboardStore.ts
index 6eebcb04e..2fc6f696c 100644
--- a/frontend/app/mstore/dashboardStore.ts
+++ b/frontend/app/mstore/dashboardStore.ts
@@ -185,7 +185,7 @@ export default class DashboardStore {
new Dashboard().fromJson(_dashboard)
);
} else {
- toast.success("Dashboard updated successfully");
+ toast.success("Dashboard successfully updated ");
this.updateDashboard(
new Dashboard().fromJson(_dashboard)
);
diff --git a/frontend/app/mstore/types/dashboard.ts b/frontend/app/mstore/types/dashboard.ts
index cb631ad9c..eed78a02b 100644
--- a/frontend/app/mstore/types/dashboard.ts
+++ b/frontend/app/mstore/types/dashboard.ts
@@ -1,5 +1,5 @@
import { makeAutoObservable, observable, action, runInAction } from "mobx"
-import Widget, { IWidget } from "./widget"
+import Widget from "./widget"
import { dashboardService } from "App/services"
import { toast } from 'react-toastify';
import { DateTime } from 'luxon';
@@ -10,10 +10,10 @@ export default class Dashboard {
name: string = "Untitled Dashboard"
description: string = ""
isPublic: boolean = true
- widgets: IWidget[] = []
+ widgets: Widget[] = []
metrics: any[] = []
isValid: boolean = false
- currentWidget: IWidget = new Widget()
+ currentWidget: Widget = new Widget()
config: any = {}
createdAt: Date = new Date()
@@ -48,7 +48,19 @@ export default class Dashboard {
this.description = json.description
this.isPublic = json.isPublic
this.createdAt = DateTime.fromMillis(new Date(json.createdAt).getTime())
- this.widgets = json.widgets ? json.widgets.map((w: Widget) => new Widget().fromJson(w)).sort((a: Widget, b: Widget) => a.position - b.position) : []
+ if (json.widgets) {
+ const smallWidgets: any[] = json.widgets.filter(wi => wi.config.col === 1)
+ const otherWidgets: any[] = json.widgets.filter(wi => wi.config.col !== 1)
+ const widgets = [...smallWidgets.sort((a,b) => a.config.position - b.config.position), ...otherWidgets.sort((a,b) => a.config.position - b.config.position)]
+
+ widgets.forEach((widget, index) => {
+ widget.config.position = index
+ })
+
+ this.widgets = widgets.map((w: Widget) => new Widget().fromJson(w))
+ } else {
+ this.widgets = []
+ }
})
return this
}
@@ -57,7 +69,7 @@ export default class Dashboard {
return this.isValid = this.name.length > 0
}
- addWidget(widget: IWidget) {
+ addWidget(widget: Widget) {
this.widgets.push(widget)
}
@@ -65,7 +77,7 @@ export default class Dashboard {
this.widgets = this.widgets.filter(w => w.widgetId !== widgetId)
}
- updateWidget(widget: IWidget) {
+ updateWidget(widget: Widget) {
const index = this.widgets.findIndex(w => w.widgetId === widget.widgetId)
if (index >= 0) {
this.widgets[index] = widget
@@ -106,7 +118,7 @@ export default class Dashboard {
dashboardService.saveWidget(this.dashboardId, widgetA),
dashboardService.saveWidget(this.dashboardId, widgetB)
]).then(() => {
- toast.success("Dashboard updated successfully")
+ toast.success("Dashboard successfully updated")
resolve()
}).catch(() => {
toast.error("Error updating widget position")
diff --git a/frontend/app/mstore/types/sessionSettings.ts b/frontend/app/mstore/types/sessionSettings.ts
index bcb57a9f2..95005b85d 100644
--- a/frontend/app/mstore/types/sessionSettings.ts
+++ b/frontend/app/mstore/types/sessionSettings.ts
@@ -7,6 +7,12 @@ export type Timezone = {
value: string;
};
+const defaultDurationFilter = {
+ operator: '<',
+ count: '0',
+ countType: 'sec'
+}
+
export const generateGMTZones = (): Timezone[] => {
const timezones: Timezone[] = [];
@@ -35,7 +41,7 @@ export default class SessionSettings {
defaultTimezones = [...generateGMTZones()]
skipToIssue: boolean = localStorage.getItem(SKIP_TO_ISSUE) === 'true';
timezone: Timezone;
- durationFilter: any = JSON.parse(localStorage.getItem(DURATION_FILTER) || '{}');
+ durationFilter: any = JSON.parse(localStorage.getItem(DURATION_FILTER) || JSON.stringify(defaultDurationFilter));
captureRate: string = '0';
captureAll: boolean = false;
diff --git a/frontend/app/mstore/types/widget.ts b/frontend/app/mstore/types/widget.ts
index d0a50800f..01afcdc30 100644
--- a/frontend/app/mstore/types/widget.ts
+++ b/frontend/app/mstore/types/widget.ts
@@ -12,6 +12,7 @@ export default class Widget {
public static get ID_KEY():string { return "metricId" }
metricId: any = undefined
widgetId: any = undefined
+ category?: string = undefined
name: string = "Untitled Metric"
// metricType: string = "timeseries"
metricType: string = "timeseries"
@@ -30,7 +31,7 @@ export default class Widget {
page: number = 1
limit: number = 5
params: any = { density: 70 }
-
+
period: Record
= Period({ rangeName: LAST_24_HOURS }) // temp value in detail view
hasChanged: boolean = false
@@ -50,7 +51,7 @@ export default class Widget {
dashboardId: any = undefined
colSpan: number = 2
predefinedKey: string = ''
-
+
constructor() {
makeAutoObservable(this)
@@ -90,6 +91,7 @@ export default class Widget {
this.config = json.config
this.position = json.config.position
this.predefinedKey = json.predefinedKey
+ this.category = json.category
if (period) {
this.period = period
diff --git a/frontend/app/player/MessageDistributor/Lists.ts b/frontend/app/player/MessageDistributor/Lists.ts
index 0173b2667..a65eff52f 100644
--- a/frontend/app/player/MessageDistributor/Lists.ts
+++ b/frontend/app/player/MessageDistributor/Lists.ts
@@ -1,7 +1,7 @@
import type { Message } from './messages'
import ListWalker from './managers/ListWalker';
-export const LIST_NAMES = ["redux", "mobx", "vuex", "ngrx", "graphql", "exceptions", "profiles", "longtasks"] as const;
+export const LIST_NAMES = ["redux", "mobx", "vuex", "zustand", "ngrx", "graphql", "exceptions", "profiles", "longtasks"] as const;
export const INITIAL_STATE = {}
LIST_NAMES.forEach(name => {
diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts
index 2d73081fb..7d4ed9f33 100644
--- a/frontend/app/player/MessageDistributor/MessageDistributor.ts
+++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts
@@ -6,7 +6,7 @@ import Resource, { TYPES } from 'Types/session/resource'; // MBTODO: player type
import { TYPES as EVENT_TYPES } from 'Types/session/event';
import Log from 'Types/session/log';
-import { update, getState } from '../store';
+import { update } from '../store';
import { toast } from 'react-toastify';
import {
@@ -123,7 +123,7 @@ export default class MessageDistributor extends StatedScreen {
// TODO: fix types for events, remove immutable js
eventList.forEach((e: Record) => {
if (e.type === EVENT_TYPES.LOCATION) { //TODO type system
- this.locationEventManager.append(e);
+ this.locationEventManager.append(e);
}
});
this.session.errors.forEach((e: Record) => {
@@ -233,7 +233,7 @@ export default class MessageDistributor extends StatedScreen {
this.waitingForFiles = false
this.setMessagesLoading(false)
})
-
+
})
}
@@ -248,7 +248,6 @@ export default class MessageDistributor extends StatedScreen {
const onData = (byteArray: Uint8Array) => {
const onReadCallback = () => this.setLastRecordedMessageTime(this.lastMessageTime)
const msgs = this.readAndDistributeMessages(byteArray, onReadCallback)
- this.sessionStart = msgs[0].time
this.processStateUpdates(msgs)
}
@@ -275,6 +274,8 @@ export default class MessageDistributor extends StatedScreen {
this.waitingForFiles = false
this.setMessagesLoading(false)
}
+
+
}
private reloadMessageManagers() {
@@ -479,6 +480,12 @@ export default class MessageDistributor extends StatedScreen {
this.lists.vuex.append(decoded);
}
break;
+ case "zustand":
+ decoded = this.decodeMessage(msg, ["state", "mutation"])
+ logger.log(decoded)
+ if (decoded != null) {
+ this.lists.zustand.append(decoded)
+ }
case "mob_x":
decoded = this.decodeMessage(msg, ["payload"]);
logger.log(decoded)
@@ -519,6 +526,7 @@ export default class MessageDistributor extends StatedScreen {
this.performanceTrackManager.setCurrentNodesCount(this.windowNodeCounter.count);
break;
}
+ this.performanceTrackManager.addNodeCountPointIfNeed(msg.time)
this.pagesManager.appendMessage(msg);
break;
}
diff --git a/frontend/app/player/MessageDistributor/StatedScreen/Screen/Cursor.ts b/frontend/app/player/MessageDistributor/StatedScreen/Screen/Cursor.ts
index cd583c05b..54ea414fd 100644
--- a/frontend/app/player/MessageDistributor/StatedScreen/Screen/Cursor.ts
+++ b/frontend/app/player/MessageDistributor/StatedScreen/Screen/Cursor.ts
@@ -4,6 +4,7 @@ import styles from './cursor.module.css';
export default class Cursor {
private readonly cursor: HTMLDivElement;
+ private nameElement: HTMLDivElement;
private readonly position: Point = { x: -1, y: -1 }
constructor(overlay: HTMLDivElement) {
this.cursor = document.createElement('div');
@@ -19,6 +20,32 @@ export default class Cursor {
}
}
+ toggleUserName(name?: string) {
+ if (!this.nameElement) {
+ this.nameElement = document.createElement('div')
+ Object.assign(this.nameElement.style, {
+ position: 'absolute',
+ padding: '4px 6px',
+ borderRadius: '8px',
+ backgroundColor: '#3EAAAF',
+ color: 'white',
+ bottom: '-25px',
+ left: '80%',
+ fontSize: '12px',
+ whiteSpace: 'nowrap',
+ })
+ this.cursor.appendChild(this.nameElement)
+ }
+
+ if (!name) {
+ this.nameElement.style.display = 'none'
+ } else {
+ this.nameElement.style.display = 'block'
+ const nameStr = name ? name.length > 10 ? name.slice(0, 9) + '...' : name : 'User'
+ this.nameElement.innerHTML = `${nameStr} `
+ }
+ }
+
move({ x, y }: Point) {
this.position.x = x;
this.position.y = y;
@@ -41,4 +68,4 @@ export default class Cursor {
return { x: this.position.x, y: this.position.y };
}
-}
\ No newline at end of file
+}
diff --git a/frontend/app/player/MessageDistributor/StatedScreen/Screen/cursor.module.css b/frontend/app/player/MessageDistributor/StatedScreen/Screen/cursor.module.css
index f6ffc1852..7a94c99b8 100644
--- a/frontend/app/player/MessageDistributor/StatedScreen/Screen/cursor.module.css
+++ b/frontend/app/player/MessageDistributor/StatedScreen/Screen/cursor.module.css
@@ -1,7 +1,7 @@
.cursor {
display: block;
position: absolute;
- width: 20px;
+ width: 13px;
height: 20px;
background-image: url('data:image/svg+xml;utf8, ');
background-repeat: no-repeat;
@@ -12,7 +12,7 @@
}
/* ====== *
- Source: https://github.com/codrops/ClickEffects/blob/master/css/component.css
+ Source: https://github.com/codrops/ClickEffects/blob/master/css/component.css
* ======= */
.cursor::after {
position: absolute;
diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts
index f08d70775..a72c9c0c8 100644
--- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts
+++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts
@@ -323,6 +323,12 @@ export default class AssistManager {
}
}
+ releaseRemoteControl = () => {
+ if (!this.socket) { return }
+ this.socket.emit("release_control")
+ this.toggleRemoteControl(false)
+ }
+
/* ==== PeerJS Call ==== */
@@ -406,9 +412,14 @@ export default class AssistManager {
this.toggleAnnotation(false)
}
- private initiateCallEnd = async () => {
+ public initiateCallEnd = async () => {
this.socket?.emit("call_end", store.getState().getIn([ 'user', 'account', 'name']))
this.handleCallEnd()
+ const remoteControl = getState().remoteControl
+ if (remoteControl === RemoteControlStatus.Enabled) {
+ this.socket.emit("release_control")
+ this.toggleRemoteControl(false)
+ }
}
private onRemoteCallEnd = () => {
diff --git a/frontend/app/player/MessageDistributor/managers/DOM/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOM/DOMManager.ts
index 6ce7762e2..39c1dab87 100644
--- a/frontend/app/player/MessageDistributor/managers/DOM/DOMManager.ts
+++ b/frontend/app/player/MessageDistributor/managers/DOM/DOMManager.ts
@@ -5,7 +5,15 @@ import type { Message, SetNodeScroll, CreateElementNode } from '../../messages';
import ListWalker from '../ListWalker';
import StylesManager, { rewriteNodeStyleSheet } from './StylesManager';
-import { VElement, VText, VShadowRoot, VDocument, VNode, VStyleElement } from './VirtualDOM';
+import {
+ VElement,
+ VText,
+ VShadowRoot,
+ VDocument,
+ VNode,
+ VStyleElement,
+ PostponedStyleSheet,
+} from './VirtualDOM';
import type { StyleElement } from './VirtualDOM';
@@ -24,20 +32,21 @@ const ATTR_NAME_REGEXP = /([^\t\n\f \/>"'=]+)/; // regexp costs ~
// .replace(/\-webkit\-/g, "")
// }
-function insertRule(sheet: CSSStyleSheet, msg: { rule: string, index: number }) {
+function insertRule(sheet: CSSStyleSheet | PostponedStyleSheet, msg: { rule: string, index: number }) {
try {
sheet.insertRule(msg.rule, msg.index)
} catch (e) {
logger.warn(e, msg)
try {
- sheet.insertRule(msg.rule)
+ sheet.insertRule(msg.rule, 0)
+ logger.warn("Inserting rule into 0-index", e, msg)
} catch (e) {
logger.warn("Cannot insert rule.", e, msg)
}
}
}
-function deleteRule(sheet: CSSStyleSheet, msg: { index: number }) {
+function deleteRule(sheet: CSSStyleSheet | PostponedStyleSheet, msg: { index: number }) {
try {
sheet.deleteRule(msg.index)
} catch (e) {
@@ -49,8 +58,10 @@ export default class DOMManager extends ListWalker {
private vTexts: Map = new Map() // map vs object here?
private vElements: Map = new Map()
private vRoots: Map = new Map()
+ private activeIframeRoots: Map = new Map()
private styleSheets: Map = new Map()
-
+ private ppStyleSheets: Map = new Map()
+
private upperBodyId: number = -1;
private nodeScrollManagers: Map> = new Map()
@@ -81,7 +92,7 @@ export default class DOMManager extends ListWalker {
if(m.tag === "BODY" && this.upperBodyId === -1) {
this.upperBodyId = m.id
}
- } else if (m.tp === "set_node_attribute" &&
+ } else if (m.tp === "set_node_attribute" &&
(IGNORED_ATTRS.includes(m.name) || !ATTR_NAME_REGEXP.test(m.name))) {
logger.log("Ignorring message: ", m)
return; // Ignoring
@@ -95,7 +106,7 @@ export default class DOMManager extends ListWalker {
}
}
- // May be make it as a message on message add?
+ // May be make it as a message on message add?
private removeAutocomplete(node: Element): boolean {
const tag = node.tagName
if ([ "FORM", "TEXTAREA", "SELECT" ].includes(tag)) {
@@ -123,7 +134,7 @@ export default class DOMManager extends ListWalker {
const pNode = parent.node
if ((pNode instanceof HTMLStyleElement) && // TODO: correct ordering OR filter in tracker
- pNode.sheet &&
+ pNode.sheet &&
pNode.sheet.cssRules &&
pNode.sheet.cssRules.length > 0 &&
pNode.innerText &&
@@ -140,12 +151,12 @@ export default class DOMManager extends ListWalker {
let node: Node | undefined
let vn: VNode | undefined
let doc: Document | null
- let styleSheet: CSSStyleSheet | undefined
+ let styleSheet: CSSStyleSheet | PostponedStyleSheet | undefined
switch (msg.tp) {
case "create_document":
doc = this.screen.document;
if (!doc) {
- logger.error("No iframe document found", msg)
+ logger.error("No root iframe document found", msg)
return;
}
doc.open();
@@ -160,8 +171,9 @@ export default class DOMManager extends ListWalker {
vDoc.insertChildAt(vn, 0)
this.vRoots = new Map([[0, vDoc]]) // watchout: id==0 for both Document and documentElement
// this is done for the AdoptedCSS logic
- // todo: start from 0 (sync logic with tracker)
+ // todo: start from 0 (sync logic with tracker)
this.stylesManager.reset()
+ this.activeIframeRoots.clear()
return
case "create_text_node":
vn = new VText()
@@ -265,6 +277,8 @@ export default class DOMManager extends ListWalker {
vn.applyChanges()
}
return
+
+ // @depricated since 4.0.2 in favor of adopted_ss_insert/delete_rule + add_owner as being common case for StyleSheets
case "css_insert_rule":
vn = this.vElements.get(msg.id)
if (!vn) { logger.error("Node not found", msg); return }
@@ -283,20 +297,26 @@ export default class DOMManager extends ListWalker {
}
vn.onStyleSheet(sheet => deleteRule(sheet, msg))
return
+ // end @depricated
+
case "create_i_frame_document":
vn = this.vElements.get(msg.frameID)
if (!vn) { logger.error("Node not found", msg); return }
vn.enforceInsertion()
const host = vn.node
if (host instanceof HTMLIFrameElement) {
- const vDoc = new VDocument()
- this.vRoots.set(msg.id, vDoc)
const doc = host.contentDocument
if (!doc) {
- logger.warn("No iframe doc onload", msg, host)
+ logger.warn("No default iframe doc", msg, host)
return
}
- vDoc.setDocument(doc)
+ // remove old root of the same iframe if present
+ const oldRootId = this.activeIframeRoots.get(msg.frameID)
+ oldRootId != null && this.vRoots.delete(oldRootId)
+
+ const vDoc = new VDocument(doc)
+ this.activeIframeRoots.set(msg.frameID, msg.id)
+ this.vRoots.set(msg.id, vDoc)
return;
} else if (host instanceof Element) { // shadow DOM
try {
@@ -311,7 +331,7 @@ export default class DOMManager extends ListWalker {
}
return
case "adopted_ss_insert_rule":
- styleSheet = this.styleSheets.get(msg.sheetID)
+ styleSheet = this.styleSheets.get(msg.sheetID) || this.ppStyleSheets.get(msg.sheetID)
if (!styleSheet) {
logger.warn("No stylesheet was created for ", msg)
return
@@ -319,13 +339,14 @@ export default class DOMManager extends ListWalker {
insertRule(styleSheet, msg)
return
case "adopted_ss_delete_rule":
- styleSheet = this.styleSheets.get(msg.sheetID)
+ styleSheet = this.styleSheets.get(msg.sheetID) || this.ppStyleSheets.get(msg.sheetID)
if (!styleSheet) {
logger.warn("No stylesheet was created for ", msg)
return
}
deleteRule(styleSheet, msg)
return
+
case "adopted_ss_replace":
styleSheet = this.styleSheets.get(msg.sheetID)
if (!styleSheet) {
@@ -337,7 +358,14 @@ export default class DOMManager extends ListWalker {
return
case "adopted_ss_add_owner":
vn = this.vRoots.get(msg.id)
- if (!vn) { logger.error("Node not found", msg); return }
+ if (!vn) {
+ // non-constructed case
+ vn = this.vElements.get(msg.id)
+ if (!vn) { logger.error("Node not found", msg); return }
+ if (!(vn instanceof VStyleElement)) { logger.error("Non-style owner", msg); return }
+ this.ppStyleSheets.set(msg.sheetID, new PostponedStyleSheet(vn.node))
+ return
+ }
styleSheet = this.styleSheets.get(msg.sheetID)
if (!styleSheet) {
let context: typeof globalThis
@@ -364,11 +392,11 @@ export default class DOMManager extends ListWalker {
//@ts-ignore
vn.node.adoptedStyleSheets = [...vn.node.adoptedStyleSheets].filter(s => s !== styleSheet)
return
- }
+ }
}
moveReady(t: number): Promise {
- // MBTODO (back jump optimisation):
+ // MBTODO (back jump optimisation):
// - store intemediate virtual dom state
// - cancel previous moveReady tasks (is it possible?) if new timestamp is less
this.moveApply(t, this.applyMessage) // This function autoresets pointer if necessary (better name?)
@@ -382,13 +410,15 @@ export default class DOMManager extends ListWalker {
this.nodeScrollManagers.forEach(manager => {
const msg = manager.moveGetLast(t)
if (msg) {
- const vElm = this.vElements.get(msg.id)
- if (vElm) {
- vElm.node.scrollLeft = msg.x
- vElm.node.scrollTop = msg.y
+ let vNode: VNode
+ if (vNode = this.vElements.get(msg.id)) {
+ vNode.node.scrollLeft = msg.x
+ vNode.node.scrollTop = msg.y
+ } else if ((vNode = this.vRoots.get(msg.id)) && vNode instanceof VDocument){
+ vNode.node.defaultView?.scrollTo(msg.x, msg.y)
}
}
})
})
}
-}
\ No newline at end of file
+}
diff --git a/frontend/app/player/MessageDistributor/managers/DOM/StylesManager.ts b/frontend/app/player/MessageDistributor/managers/DOM/StylesManager.ts
index c6b674c4e..f26d8e5ea 100644
--- a/frontend/app/player/MessageDistributor/managers/DOM/StylesManager.ts
+++ b/frontend/app/player/MessageDistributor/managers/DOM/StylesManager.ts
@@ -1,9 +1,8 @@
import type StatedScreen from '../../StatedScreen';
-import type { CssInsertRule, CssDeleteRule } from '../../messages';
+import type { CssInsertRule, CssDeleteRule, ReplaceVcss } from '../../messages';
-type CSSRuleMessage = CssInsertRule | CssDeleteRule;
+type CSSRuleMessage = CssInsertRule | CssDeleteRule | ReplaceVcss;
-import logger from 'App/logger';
import ListWalker from '../ListWalker';
@@ -72,7 +71,7 @@ export default class StylesManager extends ListWalker {
private manageRule = (msg: CSSRuleMessage):void => {
// if (msg.tp === "css_insert_rule") {
// let styleSheet = this.#screen.document.styleSheets[ msg.stylesheetID ];
- // if (!styleSheet) {
+ // if (!styleSheet) {
// logger.log("No stylesheet with corresponding ID found: ", msg)
// styleSheet = this.#screen.document.styleSheets[0];
// if (!styleSheet) {
@@ -91,9 +90,9 @@ export default class StylesManager extends ListWalker {
// if (msg.tp === "css_delete_rule") {
// // console.warn('Warning: STYLESHEET_DELETE_RULE msg')
// const styleSheet = this.#screen.document.styleSheets[msg.stylesheetID];
- // if (!styleSheet) {
+ // if (!styleSheet) {
// logger.log("No stylesheet with corresponding ID found: ", msg)
- // return;
+ // return;
// }
// styleSheet.deleteRule(msg.index);
// }
@@ -103,4 +102,4 @@ export default class StylesManager extends ListWalker {
return Promise.all(this.linkLoadPromises)
.then(() => this.moveApply(t, this.manageRule));
}
-}
\ No newline at end of file
+}
diff --git a/frontend/app/player/MessageDistributor/managers/DOM/VirtualDOM.ts b/frontend/app/player/MessageDistributor/managers/DOM/VirtualDOM.ts
index d1c31b13a..5efa2ddf1 100644
--- a/frontend/app/player/MessageDistributor/managers/DOM/VirtualDOM.ts
+++ b/frontend/app/player/MessageDistributor/managers/DOM/VirtualDOM.ts
@@ -54,10 +54,7 @@ abstract class VParent {
}
export class VDocument extends VParent {
- constructor(public node: Document | null = null) { super() }
- setDocument(doc: Document) {
- this.node = doc
- }
+ constructor(public readonly node: Document) { super() }
applyChanges() {
if (this.children.length > 1) {
// log err
@@ -122,6 +119,8 @@ export class VElement extends VParent {
type StyleSheetCallback = (s: CSSStyleSheet) => void
export type StyleElement = HTMLStyleElement | SVGStyleElement
+
+// @Depricated TODO: remove in favor of PostponedStyleSheet
export class VStyleElement extends VElement {
private loaded = false
private stylesheetCallbacks: StyleSheetCallback[] = []
@@ -152,6 +151,45 @@ export class VStyleElement extends VElement {
}
}
+
+export class PostponedStyleSheet {
+ private loaded = false
+ private stylesheetCallbacks: StyleSheetCallback[] = []
+
+ constructor(private readonly node: StyleElement) {
+ node.onload = () => {
+ const sheet = node.sheet
+ if (sheet) {
+ this.stylesheetCallbacks.forEach(cb => cb(sheet))
+ this.stylesheetCallbacks = []
+ } else {
+ console.warn("Style node onload: sheet is null")
+ }
+ this.loaded = true
+ }
+ }
+
+ private applyCallback(cb: StyleSheetCallback) {
+ if (this.loaded) {
+ if (!this.node.sheet) {
+ console.warn("Style tag is loaded, but sheet is null")
+ return
+ }
+ cb(this.node.sheet)
+ } else {
+ this.stylesheetCallbacks.push(cb)
+ }
+ }
+
+ insertRule(rule: string, index: number) {
+ this.applyCallback(s => s.insertRule(rule, index))
+ }
+
+ deleteRule(index: number) {
+ this.applyCallback(s => s.deleteRule(index))
+ }
+}
+
export class VText {
parentNode: VParent | null = null
constructor(public readonly node: Text = new Text()) {}
diff --git a/frontend/app/player/MessageDistributor/managers/PerformanceTrackManager.ts b/frontend/app/player/MessageDistributor/managers/PerformanceTrackManager.ts
index 9c036c0d2..c4a4a8e63 100644
--- a/frontend/app/player/MessageDistributor/managers/PerformanceTrackManager.ts
+++ b/frontend/app/player/MessageDistributor/managers/PerformanceTrackManager.ts
@@ -65,11 +65,34 @@ export default class PerformanceTrackManager extends ListWalker 0) {
- this.chart[ this.chart.length - 1 ].nodesCount = count;
- }
+ // if (this.chart.length > 0) {
+ // this.chart[ this.chart.length - 1 ].nodesCount = count;
+ // }
}
handleVisibility(msg: SetPageVisibility):void {
diff --git a/frontend/app/player/MessageDistributor/messages/JSONRawMessageReader.ts b/frontend/app/player/MessageDistributor/messages/JSONRawMessageReader.ts
index 0a5677824..33bf9d2c5 100644
--- a/frontend/app/player/MessageDistributor/messages/JSONRawMessageReader.ts
+++ b/frontend/app/player/MessageDistributor/messages/JSONRawMessageReader.ts
@@ -64,7 +64,7 @@ const resolvers = {
...msg,
text: resolveCSS(msg.baseURL, msg.text),
tp: "adopted_ss_replace"
- })
+ }),
} as const
type ResolvableType = keyof typeof resolvers
@@ -84,7 +84,7 @@ export default class JSONRawMessageReader {
readMessage(): RawMessage | null {
let msg = this.messages.shift()
if (!msg) { return null }
- const rawMsg = Array.isArray(msg)
+ const rawMsg = Array.isArray(msg)
? translate(msg)
: legacyTranslate(msg)
if (!rawMsg) {
@@ -97,4 +97,4 @@ export default class JSONRawMessageReader {
return rawMsg
}
-}
\ No newline at end of file
+}
diff --git a/frontend/app/player/MessageDistributor/messages/RawMessageReader.ts b/frontend/app/player/MessageDistributor/messages/RawMessageReader.ts
index 4536a7c0e..f459c9369 100644
--- a/frontend/app/player/MessageDistributor/messages/RawMessageReader.ts
+++ b/frontend/app/player/MessageDistributor/messages/RawMessageReader.ts
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
import PrimitiveReader from './PrimitiveReader'
import type { RawMessage } from './raw'
@@ -18,50 +19,50 @@ export default class RawMessageReader extends PrimitiveReader {
switch (tp) {
case 0: {
- const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
+ const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
return {
- tp: "timestamp",
- timestamp,
+ tp: "timestamp",
+ timestamp,
};
}
case 4: {
const url = this.readString(); if (url === null) { return resetPointer() }
const referrer = this.readString(); if (referrer === null) { return resetPointer() }
- const navigationStart = this.readUint(); if (navigationStart === null) { return resetPointer() }
+ const navigationStart = this.readUint(); if (navigationStart === null) { return resetPointer() }
return {
- tp: "set_page_location",
+ tp: "set_page_location",
url,
referrer,
- navigationStart,
+ navigationStart,
};
}
case 5: {
const width = this.readUint(); if (width === null) { return resetPointer() }
- const height = this.readUint(); if (height === null) { return resetPointer() }
+ const height = this.readUint(); if (height === null) { return resetPointer() }
return {
- tp: "set_viewport_size",
+ tp: "set_viewport_size",
width,
- height,
+ height,
};
}
case 6: {
const x = this.readInt(); if (x === null) { return resetPointer() }
- const y = this.readInt(); if (y === null) { return resetPointer() }
+ const y = this.readInt(); if (y === null) { return resetPointer() }
return {
- tp: "set_viewport_scroll",
+ tp: "set_viewport_scroll",
x,
- y,
+ y,
};
}
case 7: {
-
+
return {
- tp: "create_document",
-
+ tp: "create_document",
+
};
}
@@ -70,164 +71,164 @@ export default class RawMessageReader extends PrimitiveReader {
const parentID = this.readUint(); if (parentID === null) { return resetPointer() }
const index = this.readUint(); if (index === null) { return resetPointer() }
const tag = this.readString(); if (tag === null) { return resetPointer() }
- const svg = this.readBoolean(); if (svg === null) { return resetPointer() }
+ const svg = this.readBoolean(); if (svg === null) { return resetPointer() }
return {
- tp: "create_element_node",
+ tp: "create_element_node",
id,
parentID,
index,
tag,
- svg,
+ svg,
};
}
case 9: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const parentID = this.readUint(); if (parentID === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "create_text_node",
+ tp: "create_text_node",
id,
parentID,
- index,
+ index,
};
}
case 10: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const parentID = this.readUint(); if (parentID === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "move_node",
+ tp: "move_node",
id,
parentID,
- index,
+ index,
};
}
case 11: {
- const id = this.readUint(); if (id === null) { return resetPointer() }
+ const id = this.readUint(); if (id === null) { return resetPointer() }
return {
- tp: "remove_node",
- id,
+ tp: "remove_node",
+ id,
};
}
case 12: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const name = this.readString(); if (name === null) { return resetPointer() }
- const value = this.readString(); if (value === null) { return resetPointer() }
+ const value = this.readString(); if (value === null) { return resetPointer() }
return {
- tp: "set_node_attribute",
+ tp: "set_node_attribute",
id,
name,
- value,
+ value,
};
}
case 13: {
const id = this.readUint(); if (id === null) { return resetPointer() }
- const name = this.readString(); if (name === null) { return resetPointer() }
+ const name = this.readString(); if (name === null) { return resetPointer() }
return {
- tp: "remove_node_attribute",
+ tp: "remove_node_attribute",
id,
- name,
+ name,
};
}
case 14: {
const id = this.readUint(); if (id === null) { return resetPointer() }
- const data = this.readString(); if (data === null) { return resetPointer() }
+ const data = this.readString(); if (data === null) { return resetPointer() }
return {
- tp: "set_node_data",
+ tp: "set_node_data",
id,
- data,
+ data,
};
}
case 15: {
const id = this.readUint(); if (id === null) { return resetPointer() }
- const data = this.readString(); if (data === null) { return resetPointer() }
+ const data = this.readString(); if (data === null) { return resetPointer() }
return {
- tp: "set_css_data",
+ tp: "set_css_data",
id,
- data,
+ data,
};
}
case 16: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const x = this.readInt(); if (x === null) { return resetPointer() }
- const y = this.readInt(); if (y === null) { return resetPointer() }
+ const y = this.readInt(); if (y === null) { return resetPointer() }
return {
- tp: "set_node_scroll",
+ tp: "set_node_scroll",
id,
x,
- y,
+ y,
};
}
case 18: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const value = this.readString(); if (value === null) { return resetPointer() }
- const mask = this.readInt(); if (mask === null) { return resetPointer() }
+ const mask = this.readInt(); if (mask === null) { return resetPointer() }
return {
- tp: "set_input_value",
+ tp: "set_input_value",
id,
value,
- mask,
+ mask,
};
}
case 19: {
const id = this.readUint(); if (id === null) { return resetPointer() }
- const checked = this.readBoolean(); if (checked === null) { return resetPointer() }
+ const checked = this.readBoolean(); if (checked === null) { return resetPointer() }
return {
- tp: "set_input_checked",
+ tp: "set_input_checked",
id,
- checked,
+ checked,
};
}
case 20: {
const x = this.readUint(); if (x === null) { return resetPointer() }
- const y = this.readUint(); if (y === null) { return resetPointer() }
+ const y = this.readUint(); if (y === null) { return resetPointer() }
return {
- tp: "mouse_move",
+ tp: "mouse_move",
x,
- y,
+ y,
};
}
case 22: {
const level = this.readString(); if (level === null) { return resetPointer() }
- const value = this.readString(); if (value === null) { return resetPointer() }
+ const value = this.readString(); if (value === null) { return resetPointer() }
return {
- tp: "console_log",
+ tp: "console_log",
level,
- value,
+ value,
};
}
case 37: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const rule = this.readString(); if (rule === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "css_insert_rule",
+ tp: "css_insert_rule",
id,
rule,
- index,
+ index,
};
}
case 38: {
const id = this.readUint(); if (id === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "css_delete_rule",
+ tp: "css_delete_rule",
id,
- index,
+ index,
};
}
@@ -238,16 +239,16 @@ export default class RawMessageReader extends PrimitiveReader {
const response = this.readString(); if (response === null) { return resetPointer() }
const status = this.readUint(); if (status === null) { return resetPointer() }
const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
- const duration = this.readUint(); if (duration === null) { return resetPointer() }
+ const duration = this.readUint(); if (duration === null) { return resetPointer() }
return {
- tp: "fetch",
+ tp: "fetch",
method,
url,
request,
response,
status,
timestamp,
- duration,
+ duration,
};
}
@@ -255,67 +256,67 @@ export default class RawMessageReader extends PrimitiveReader {
const name = this.readString(); if (name === null) { return resetPointer() }
const duration = this.readUint(); if (duration === null) { return resetPointer() }
const args = this.readString(); if (args === null) { return resetPointer() }
- const result = this.readString(); if (result === null) { return resetPointer() }
+ const result = this.readString(); if (result === null) { return resetPointer() }
return {
- tp: "profiler",
+ tp: "profiler",
name,
duration,
args,
- result,
+ result,
};
}
case 41: {
const key = this.readString(); if (key === null) { return resetPointer() }
- const value = this.readString(); if (value === null) { return resetPointer() }
+ const value = this.readString(); if (value === null) { return resetPointer() }
return {
- tp: "o_table",
+ tp: "o_table",
key,
- value,
+ value,
};
}
case 44: {
const action = this.readString(); if (action === null) { return resetPointer() }
const state = this.readString(); if (state === null) { return resetPointer() }
- const duration = this.readUint(); if (duration === null) { return resetPointer() }
+ const duration = this.readUint(); if (duration === null) { return resetPointer() }
return {
- tp: "redux",
+ tp: "redux",
action,
state,
- duration,
+ duration,
};
}
case 45: {
const mutation = this.readString(); if (mutation === null) { return resetPointer() }
- const state = this.readString(); if (state === null) { return resetPointer() }
+ const state = this.readString(); if (state === null) { return resetPointer() }
return {
- tp: "vuex",
+ tp: "vuex",
mutation,
- state,
+ state,
};
}
case 46: {
const type = this.readString(); if (type === null) { return resetPointer() }
- const payload = this.readString(); if (payload === null) { return resetPointer() }
+ const payload = this.readString(); if (payload === null) { return resetPointer() }
return {
- tp: "mob_x",
+ tp: "mob_x",
type,
- payload,
+ payload,
};
}
case 47: {
const action = this.readString(); if (action === null) { return resetPointer() }
const state = this.readString(); if (state === null) { return resetPointer() }
- const duration = this.readUint(); if (duration === null) { return resetPointer() }
+ const duration = this.readUint(); if (duration === null) { return resetPointer() }
return {
- tp: "ng_rx",
+ tp: "ng_rx",
action,
state,
- duration,
+ duration,
};
}
@@ -323,13 +324,13 @@ export default class RawMessageReader extends PrimitiveReader {
const operationKind = this.readString(); if (operationKind === null) { return resetPointer() }
const operationName = this.readString(); if (operationName === null) { return resetPointer() }
const variables = this.readString(); if (variables === null) { return resetPointer() }
- const response = this.readString(); if (response === null) { return resetPointer() }
+ const response = this.readString(); if (response === null) { return resetPointer() }
return {
- tp: "graph_ql",
+ tp: "graph_ql",
operationKind,
operationName,
variables,
- response,
+ response,
};
}
@@ -337,31 +338,31 @@ export default class RawMessageReader extends PrimitiveReader {
const frames = this.readInt(); if (frames === null) { return resetPointer() }
const ticks = this.readInt(); if (ticks === null) { return resetPointer() }
const totalJSHeapSize = this.readUint(); if (totalJSHeapSize === null) { return resetPointer() }
- const usedJSHeapSize = this.readUint(); if (usedJSHeapSize === null) { return resetPointer() }
+ const usedJSHeapSize = this.readUint(); if (usedJSHeapSize === null) { return resetPointer() }
return {
- tp: "performance_track",
+ tp: "performance_track",
frames,
ticks,
totalJSHeapSize,
- usedJSHeapSize,
+ usedJSHeapSize,
};
}
case 54: {
const downlink = this.readUint(); if (downlink === null) { return resetPointer() }
- const type = this.readString(); if (type === null) { return resetPointer() }
+ const type = this.readString(); if (type === null) { return resetPointer() }
return {
- tp: "connection_information",
+ tp: "connection_information",
downlink,
- type,
+ type,
};
}
case 55: {
- const hidden = this.readBoolean(); if (hidden === null) { return resetPointer() }
+ const hidden = this.readBoolean(); if (hidden === null) { return resetPointer() }
return {
- tp: "set_page_visibility",
- hidden,
+ tp: "set_page_visibility",
+ hidden,
};
}
@@ -372,16 +373,16 @@ export default class RawMessageReader extends PrimitiveReader {
const containerType = this.readUint(); if (containerType === null) { return resetPointer() }
const containerSrc = this.readString(); if (containerSrc === null) { return resetPointer() }
const containerId = this.readString(); if (containerId === null) { return resetPointer() }
- const containerName = this.readString(); if (containerName === null) { return resetPointer() }
+ const containerName = this.readString(); if (containerName === null) { return resetPointer() }
return {
- tp: "long_task",
+ tp: "long_task",
timestamp,
duration,
context,
containerType,
containerSrc,
containerId,
- containerName,
+ containerName,
};
}
@@ -389,25 +390,25 @@ export default class RawMessageReader extends PrimitiveReader {
const id = this.readUint(); if (id === null) { return resetPointer() }
const name = this.readString(); if (name === null) { return resetPointer() }
const value = this.readString(); if (value === null) { return resetPointer() }
- const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
+ const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
return {
- tp: "set_node_attribute_url_based",
+ tp: "set_node_attribute_url_based",
id,
name,
value,
- baseURL,
+ baseURL,
};
}
case 61: {
const id = this.readUint(); if (id === null) { return resetPointer() }
const data = this.readString(); if (data === null) { return resetPointer() }
- const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
+ const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
return {
- tp: "set_css_data_url_based",
+ tp: "set_css_data_url_based",
id,
data,
- baseURL,
+ baseURL,
};
}
@@ -415,13 +416,13 @@ export default class RawMessageReader extends PrimitiveReader {
const id = this.readUint(); if (id === null) { return resetPointer() }
const rule = this.readString(); if (rule === null) { return resetPointer() }
const index = this.readUint(); if (index === null) { return resetPointer() }
- const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
+ const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
return {
- tp: "css_insert_rule_url_based",
+ tp: "css_insert_rule_url_based",
id,
rule,
index,
- baseURL,
+ baseURL,
};
}
@@ -429,45 +430,45 @@ export default class RawMessageReader extends PrimitiveReader {
const id = this.readUint(); if (id === null) { return resetPointer() }
const hesitationTime = this.readUint(); if (hesitationTime === null) { return resetPointer() }
const label = this.readString(); if (label === null) { return resetPointer() }
- const selector = this.readString(); if (selector === null) { return resetPointer() }
+ const selector = this.readString(); if (selector === null) { return resetPointer() }
return {
- tp: "mouse_click",
+ tp: "mouse_click",
id,
hesitationTime,
label,
- selector,
+ selector,
};
}
case 70: {
const frameID = this.readUint(); if (frameID === null) { return resetPointer() }
- const id = this.readUint(); if (id === null) { return resetPointer() }
+ const id = this.readUint(); if (id === null) { return resetPointer() }
return {
- tp: "create_i_frame_document",
+ tp: "create_i_frame_document",
frameID,
- id,
+ id,
};
}
case 71: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
const text = this.readString(); if (text === null) { return resetPointer() }
- const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
+ const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
return {
- tp: "adopted_ss_replace_url_based",
+ tp: "adopted_ss_replace_url_based",
sheetID,
text,
- baseURL,
+ baseURL,
};
}
case 72: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
- const text = this.readString(); if (text === null) { return resetPointer() }
+ const text = this.readString(); if (text === null) { return resetPointer() }
return {
- tp: "adopted_ss_replace",
+ tp: "adopted_ss_replace",
sheetID,
- text,
+ text,
};
}
@@ -475,55 +476,65 @@ export default class RawMessageReader extends PrimitiveReader {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
const rule = this.readString(); if (rule === null) { return resetPointer() }
const index = this.readUint(); if (index === null) { return resetPointer() }
- const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
+ const baseURL = this.readString(); if (baseURL === null) { return resetPointer() }
return {
- tp: "adopted_ss_insert_rule_url_based",
+ tp: "adopted_ss_insert_rule_url_based",
sheetID,
rule,
index,
- baseURL,
+ baseURL,
};
}
case 74: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
const rule = this.readString(); if (rule === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "adopted_ss_insert_rule",
+ tp: "adopted_ss_insert_rule",
sheetID,
rule,
- index,
+ index,
};
}
case 75: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
- const index = this.readUint(); if (index === null) { return resetPointer() }
+ const index = this.readUint(); if (index === null) { return resetPointer() }
return {
- tp: "adopted_ss_delete_rule",
+ tp: "adopted_ss_delete_rule",
sheetID,
- index,
+ index,
};
}
case 76: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
- const id = this.readUint(); if (id === null) { return resetPointer() }
+ const id = this.readUint(); if (id === null) { return resetPointer() }
return {
- tp: "adopted_ss_add_owner",
+ tp: "adopted_ss_add_owner",
sheetID,
- id,
+ id,
};
}
case 77: {
const sheetID = this.readUint(); if (sheetID === null) { return resetPointer() }
- const id = this.readUint(); if (id === null) { return resetPointer() }
+ const id = this.readUint(); if (id === null) { return resetPointer() }
return {
- tp: "adopted_ss_remove_owner",
+ tp: "adopted_ss_remove_owner",
sheetID,
- id,
+ id,
+ };
+ }
+
+ case 79: {
+ const mutation = this.readString(); if (mutation === null) { return resetPointer() }
+ const state = this.readString(); if (state === null) { return resetPointer() }
+ return {
+ tp: "zustand",
+ mutation,
+ state,
};
}
@@ -537,9 +548,9 @@ export default class RawMessageReader extends PrimitiveReader {
const userOSVersion = this.readString(); if (userOSVersion === null) { return resetPointer() }
const userDevice = this.readString(); if (userDevice === null) { return resetPointer() }
const userDeviceType = this.readString(); if (userDeviceType === null) { return resetPointer() }
- const userCountry = this.readString(); if (userCountry === null) { return resetPointer() }
+ const userCountry = this.readString(); if (userCountry === null) { return resetPointer() }
return {
- tp: "ios_session_start",
+ tp: "ios_session_start",
timestamp,
projectID,
trackerVersion,
@@ -549,7 +560,7 @@ export default class RawMessageReader extends PrimitiveReader {
userOSVersion,
userDevice,
userDeviceType,
- userCountry,
+ userCountry,
};
}
@@ -557,13 +568,13 @@ export default class RawMessageReader extends PrimitiveReader {
const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
const length = this.readUint(); if (length === null) { return resetPointer() }
const name = this.readString(); if (name === null) { return resetPointer() }
- const payload = this.readString(); if (payload === null) { return resetPointer() }
+ const payload = this.readString(); if (payload === null) { return resetPointer() }
return {
- tp: "ios_custom_event",
+ tp: "ios_custom_event",
timestamp,
length,
name,
- payload,
+ payload,
};
}
@@ -573,15 +584,15 @@ export default class RawMessageReader extends PrimitiveReader {
const x = this.readUint(); if (x === null) { return resetPointer() }
const y = this.readUint(); if (y === null) { return resetPointer() }
const width = this.readUint(); if (width === null) { return resetPointer() }
- const height = this.readUint(); if (height === null) { return resetPointer() }
+ const height = this.readUint(); if (height === null) { return resetPointer() }
return {
- tp: "ios_screen_changes",
+ tp: "ios_screen_changes",
timestamp,
length,
x,
y,
width,
- height,
+ height,
};
}
@@ -590,14 +601,14 @@ export default class RawMessageReader extends PrimitiveReader {
const length = this.readUint(); if (length === null) { return resetPointer() }
const label = this.readString(); if (label === null) { return resetPointer() }
const x = this.readUint(); if (x === null) { return resetPointer() }
- const y = this.readUint(); if (y === null) { return resetPointer() }
+ const y = this.readUint(); if (y === null) { return resetPointer() }
return {
- tp: "ios_click_event",
+ tp: "ios_click_event",
timestamp,
length,
label,
x,
- y,
+ y,
};
}
@@ -605,13 +616,13 @@ export default class RawMessageReader extends PrimitiveReader {
const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
const length = this.readUint(); if (length === null) { return resetPointer() }
const name = this.readString(); if (name === null) { return resetPointer() }
- const value = this.readUint(); if (value === null) { return resetPointer() }
+ const value = this.readUint(); if (value === null) { return resetPointer() }
return {
- tp: "ios_performance_event",
+ tp: "ios_performance_event",
timestamp,
length,
name,
- value,
+ value,
};
}
@@ -619,13 +630,13 @@ export default class RawMessageReader extends PrimitiveReader {
const timestamp = this.readUint(); if (timestamp === null) { return resetPointer() }
const length = this.readUint(); if (length === null) { return resetPointer() }
const severity = this.readString(); if (severity === null) { return resetPointer() }
- const content = this.readString(); if (content === null) { return resetPointer() }
+ const content = this.readString(); if (content === null) { return resetPointer() }
return {
- tp: "ios_log",
+ tp: "ios_log",
timestamp,
length,
severity,
- content,
+ content,
};
}
@@ -638,9 +649,9 @@ export default class RawMessageReader extends PrimitiveReader {
const url = this.readString(); if (url === null) { return resetPointer() }
const success = this.readBoolean(); if (success === null) { return resetPointer() }
const method = this.readString(); if (method === null) { return resetPointer() }
- const status = this.readUint(); if (status === null) { return resetPointer() }
+ const status = this.readUint(); if (status === null) { return resetPointer() }
return {
- tp: "ios_network_call",
+ tp: "ios_network_call",
timestamp,
length,
duration,
@@ -649,7 +660,7 @@ export default class RawMessageReader extends PrimitiveReader {
url,
success,
method,
- status,
+ status,
};
}
diff --git a/frontend/app/player/MessageDistributor/messages/message.ts b/frontend/app/player/MessageDistributor/messages/message.ts
index 490f817ea..4b82c27e7 100644
--- a/frontend/app/player/MessageDistributor/messages/message.ts
+++ b/frontend/app/player/MessageDistributor/messages/message.ts
@@ -1,8 +1,9 @@
// Auto-generated, do not edit
+/* eslint-disable */
import type { Timed } from './timed'
import type { RawMessage } from './raw'
-import type {
+import type {
RawTimestamp,
RawSetPageLocation,
RawSetViewportSize,
@@ -47,6 +48,7 @@ import type {
RawAdoptedSsDeleteRule,
RawAdoptedSsAddOwner,
RawAdoptedSsRemoveOwner,
+ RawZustand,
RawIosSessionStart,
RawIosCustomEvent,
RawIosScreenChanges,
@@ -147,6 +149,8 @@ export type AdoptedSsAddOwner = RawAdoptedSsAddOwner & Timed
export type AdoptedSsRemoveOwner = RawAdoptedSsRemoveOwner & Timed
+export type Zustand = RawZustand & Timed
+
export type IosSessionStart = RawIosSessionStart & Timed
export type IosCustomEvent = RawIosCustomEvent & Timed
@@ -160,3 +164,4 @@ export type IosPerformanceEvent = RawIosPerformanceEvent & Timed
export type IosLog = RawIosLog & Timed
export type IosNetworkCall = RawIosNetworkCall & Timed
+
diff --git a/frontend/app/player/MessageDistributor/messages/raw.ts b/frontend/app/player/MessageDistributor/messages/raw.ts
index a546ca799..69ef5877f 100644
--- a/frontend/app/player/MessageDistributor/messages/raw.ts
+++ b/frontend/app/player/MessageDistributor/messages/raw.ts
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
export interface RawTimestamp {
@@ -300,6 +301,12 @@ export interface RawAdoptedSsRemoveOwner {
id: number,
}
+export interface RawZustand {
+ tp: "zustand",
+ mutation: string,
+ state: string,
+}
+
export interface RawIosSessionStart {
tp: "ios_session_start",
timestamp: number,
@@ -371,4 +378,4 @@ export interface RawIosNetworkCall {
}
-export type RawMessage = RawTimestamp | RawSetPageLocation | RawSetViewportSize | RawSetViewportScroll | RawCreateDocument | RawCreateElementNode | RawCreateTextNode | RawMoveNode | RawRemoveNode | RawSetNodeAttribute | RawRemoveNodeAttribute | RawSetNodeData | RawSetCssData | RawSetNodeScroll | RawSetInputValue | RawSetInputChecked | RawMouseMove | RawConsoleLog | RawCssInsertRule | RawCssDeleteRule | RawFetch | RawProfiler | RawOTable | RawRedux | RawVuex | RawMobX | RawNgRx | RawGraphQl | RawPerformanceTrack | RawConnectionInformation | RawSetPageVisibility | RawLongTask | RawSetNodeAttributeURLBased | RawSetCssDataURLBased | RawCssInsertRuleURLBased | RawMouseClick | RawCreateIFrameDocument | RawAdoptedSsReplaceURLBased | RawAdoptedSsReplace | RawAdoptedSsInsertRuleURLBased | RawAdoptedSsInsertRule | RawAdoptedSsDeleteRule | RawAdoptedSsAddOwner | RawAdoptedSsRemoveOwner | RawIosSessionStart | RawIosCustomEvent | RawIosScreenChanges | RawIosClickEvent | RawIosPerformanceEvent | RawIosLog | RawIosNetworkCall;
+export type RawMessage = RawTimestamp | RawSetPageLocation | RawSetViewportSize | RawSetViewportScroll | RawCreateDocument | RawCreateElementNode | RawCreateTextNode | RawMoveNode | RawRemoveNode | RawSetNodeAttribute | RawRemoveNodeAttribute | RawSetNodeData | RawSetCssData | RawSetNodeScroll | RawSetInputValue | RawSetInputChecked | RawMouseMove | RawConsoleLog | RawCssInsertRule | RawCssDeleteRule | RawFetch | RawProfiler | RawOTable | RawRedux | RawVuex | RawMobX | RawNgRx | RawGraphQl | RawPerformanceTrack | RawConnectionInformation | RawSetPageVisibility | RawLongTask | RawSetNodeAttributeURLBased | RawSetCssDataURLBased | RawCssInsertRuleURLBased | RawMouseClick | RawCreateIFrameDocument | RawAdoptedSsReplaceURLBased | RawAdoptedSsReplace | RawAdoptedSsInsertRuleURLBased | RawAdoptedSsInsertRule | RawAdoptedSsDeleteRule | RawAdoptedSsAddOwner | RawAdoptedSsRemoveOwner | RawZustand | RawIosSessionStart | RawIosCustomEvent | RawIosScreenChanges | RawIosClickEvent | RawIosPerformanceEvent | RawIosLog | RawIosNetworkCall;
diff --git a/frontend/app/player/MessageDistributor/messages/tracker-legacy.ts b/frontend/app/player/MessageDistributor/messages/tracker-legacy.ts
index c89f8a47c..1cc6af93d 100644
--- a/frontend/app/player/MessageDistributor/messages/tracker-legacy.ts
+++ b/frontend/app/player/MessageDistributor/messages/tracker-legacy.ts
@@ -1,4 +1,5 @@
// @ts-nocheck
+/* eslint-disable */
// Auto-generated, do not edit
export const TP_MAP = {
@@ -60,6 +61,7 @@ export const TP_MAP = {
75: "adopted_ss_delete_rule",
76: "adopted_ss_add_owner",
77: "adopted_ss_remove_owner",
+ 79: "zustand",
90: "ios_session_start",
93: "ios_custom_event",
96: "ios_screen_changes",
@@ -68,5 +70,3 @@ export const TP_MAP = {
103: "ios_log",
105: "ios_network_call",
} as const
-
-
diff --git a/frontend/app/player/MessageDistributor/messages/tracker.ts b/frontend/app/player/MessageDistributor/messages/tracker.ts
index 34493f32c..7381670c7 100644
--- a/frontend/app/player/MessageDistributor/messages/tracker.ts
+++ b/frontend/app/player/MessageDistributor/messages/tracker.ts
@@ -1,6 +1,7 @@
// Auto-generated, do not edit
+/* eslint-disable */
-import type { RawMessage } from './raw'
+import type { RawMessage } from './raw'
type TrBatchMetadata = [
@@ -382,22 +383,28 @@ type TrAdoptedSSRemoveOwner = [
id: number,
]
+type TrZustand = [
+ type: 79,
+ mutation: string,
+ state: string,
+]
-export type TrackerMessage = TrBatchMetadata | TrPartitionedMessage | TrTimestamp | TrSetPageLocation | TrSetViewportSize | TrSetViewportScroll | TrCreateDocument | TrCreateElementNode | TrCreateTextNode | TrMoveNode | TrRemoveNode | TrSetNodeAttribute | TrRemoveNodeAttribute | TrSetNodeData | TrSetNodeScroll | TrSetInputTarget | TrSetInputValue | TrSetInputChecked | TrMouseMove | TrConsoleLog | TrPageLoadTiming | TrPageRenderTiming | TrJSException | TrRawCustomEvent | TrUserID | TrUserAnonymousID | TrMetadata | TrCSSInsertRule | TrCSSDeleteRule | TrFetch | TrProfiler | TrOTable | TrStateAction | TrRedux | TrVuex | TrMobX | TrNgRx | TrGraphQL | TrPerformanceTrack | TrResourceTiming | TrConnectionInformation | TrSetPageVisibility | TrLongTask | TrSetNodeAttributeURLBased | TrSetCSSDataURLBased | TrTechnicalInfo | TrCustomIssue | TrCSSInsertRuleURLBased | TrMouseClick | TrCreateIFrameDocument | TrAdoptedSSReplaceURLBased | TrAdoptedSSInsertRuleURLBased | TrAdoptedSSDeleteRule | TrAdoptedSSAddOwner | TrAdoptedSSRemoveOwner
+
+export type TrackerMessage = TrBatchMetadata | TrPartitionedMessage | TrTimestamp | TrSetPageLocation | TrSetViewportSize | TrSetViewportScroll | TrCreateDocument | TrCreateElementNode | TrCreateTextNode | TrMoveNode | TrRemoveNode | TrSetNodeAttribute | TrRemoveNodeAttribute | TrSetNodeData | TrSetNodeScroll | TrSetInputTarget | TrSetInputValue | TrSetInputChecked | TrMouseMove | TrConsoleLog | TrPageLoadTiming | TrPageRenderTiming | TrJSException | TrRawCustomEvent | TrUserID | TrUserAnonymousID | TrMetadata | TrCSSInsertRule | TrCSSDeleteRule | TrFetch | TrProfiler | TrOTable | TrStateAction | TrRedux | TrVuex | TrMobX | TrNgRx | TrGraphQL | TrPerformanceTrack | TrResourceTiming | TrConnectionInformation | TrSetPageVisibility | TrLongTask | TrSetNodeAttributeURLBased | TrSetCSSDataURLBased | TrTechnicalInfo | TrCustomIssue | TrCSSInsertRuleURLBased | TrMouseClick | TrCreateIFrameDocument | TrAdoptedSSReplaceURLBased | TrAdoptedSSInsertRuleURLBased | TrAdoptedSSDeleteRule | TrAdoptedSSAddOwner | TrAdoptedSSRemoveOwner | TrZustand
export default function translate(tMsg: TrackerMessage): RawMessage | null {
switch(tMsg[0]) {
case 0: {
return {
- tp: "timestamp",
+ tp: "timestamp",
timestamp: tMsg[1],
}
}
case 4: {
return {
- tp: "set_page_location",
+ tp: "set_page_location",
url: tMsg[1],
referrer: tMsg[2],
navigationStart: tMsg[3],
@@ -406,7 +413,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 5: {
return {
- tp: "set_viewport_size",
+ tp: "set_viewport_size",
width: tMsg[1],
height: tMsg[2],
}
@@ -414,7 +421,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 6: {
return {
- tp: "set_viewport_scroll",
+ tp: "set_viewport_scroll",
x: tMsg[1],
y: tMsg[2],
}
@@ -422,14 +429,14 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 7: {
return {
- tp: "create_document",
+ tp: "create_document",
}
}
case 8: {
return {
- tp: "create_element_node",
+ tp: "create_element_node",
id: tMsg[1],
parentID: tMsg[2],
index: tMsg[3],
@@ -440,7 +447,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 9: {
return {
- tp: "create_text_node",
+ tp: "create_text_node",
id: tMsg[1],
parentID: tMsg[2],
index: tMsg[3],
@@ -449,7 +456,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 10: {
return {
- tp: "move_node",
+ tp: "move_node",
id: tMsg[1],
parentID: tMsg[2],
index: tMsg[3],
@@ -458,14 +465,14 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 11: {
return {
- tp: "remove_node",
+ tp: "remove_node",
id: tMsg[1],
}
}
case 12: {
return {
- tp: "set_node_attribute",
+ tp: "set_node_attribute",
id: tMsg[1],
name: tMsg[2],
value: tMsg[3],
@@ -474,7 +481,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 13: {
return {
- tp: "remove_node_attribute",
+ tp: "remove_node_attribute",
id: tMsg[1],
name: tMsg[2],
}
@@ -482,7 +489,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 14: {
return {
- tp: "set_node_data",
+ tp: "set_node_data",
id: tMsg[1],
data: tMsg[2],
}
@@ -490,7 +497,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 16: {
return {
- tp: "set_node_scroll",
+ tp: "set_node_scroll",
id: tMsg[1],
x: tMsg[2],
y: tMsg[3],
@@ -499,7 +506,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 18: {
return {
- tp: "set_input_value",
+ tp: "set_input_value",
id: tMsg[1],
value: tMsg[2],
mask: tMsg[3],
@@ -508,7 +515,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 19: {
return {
- tp: "set_input_checked",
+ tp: "set_input_checked",
id: tMsg[1],
checked: tMsg[2],
}
@@ -516,7 +523,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 20: {
return {
- tp: "mouse_move",
+ tp: "mouse_move",
x: tMsg[1],
y: tMsg[2],
}
@@ -524,7 +531,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 22: {
return {
- tp: "console_log",
+ tp: "console_log",
level: tMsg[1],
value: tMsg[2],
}
@@ -532,7 +539,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 37: {
return {
- tp: "css_insert_rule",
+ tp: "css_insert_rule",
id: tMsg[1],
rule: tMsg[2],
index: tMsg[3],
@@ -541,7 +548,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 38: {
return {
- tp: "css_delete_rule",
+ tp: "css_delete_rule",
id: tMsg[1],
index: tMsg[2],
}
@@ -549,7 +556,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 39: {
return {
- tp: "fetch",
+ tp: "fetch",
method: tMsg[1],
url: tMsg[2],
request: tMsg[3],
@@ -562,7 +569,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 40: {
return {
- tp: "profiler",
+ tp: "profiler",
name: tMsg[1],
duration: tMsg[2],
args: tMsg[3],
@@ -572,7 +579,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 41: {
return {
- tp: "o_table",
+ tp: "o_table",
key: tMsg[1],
value: tMsg[2],
}
@@ -580,7 +587,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 44: {
return {
- tp: "redux",
+ tp: "redux",
action: tMsg[1],
state: tMsg[2],
duration: tMsg[3],
@@ -589,7 +596,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 45: {
return {
- tp: "vuex",
+ tp: "vuex",
mutation: tMsg[1],
state: tMsg[2],
}
@@ -597,7 +604,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 46: {
return {
- tp: "mob_x",
+ tp: "mob_x",
type: tMsg[1],
payload: tMsg[2],
}
@@ -605,7 +612,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 47: {
return {
- tp: "ng_rx",
+ tp: "ng_rx",
action: tMsg[1],
state: tMsg[2],
duration: tMsg[3],
@@ -614,7 +621,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 48: {
return {
- tp: "graph_ql",
+ tp: "graph_ql",
operationKind: tMsg[1],
operationName: tMsg[2],
variables: tMsg[3],
@@ -624,7 +631,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 49: {
return {
- tp: "performance_track",
+ tp: "performance_track",
frames: tMsg[1],
ticks: tMsg[2],
totalJSHeapSize: tMsg[3],
@@ -634,7 +641,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 54: {
return {
- tp: "connection_information",
+ tp: "connection_information",
downlink: tMsg[1],
type: tMsg[2],
}
@@ -642,14 +649,14 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 55: {
return {
- tp: "set_page_visibility",
+ tp: "set_page_visibility",
hidden: tMsg[1],
}
}
case 59: {
return {
- tp: "long_task",
+ tp: "long_task",
timestamp: tMsg[1],
duration: tMsg[2],
context: tMsg[3],
@@ -662,7 +669,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 60: {
return {
- tp: "set_node_attribute_url_based",
+ tp: "set_node_attribute_url_based",
id: tMsg[1],
name: tMsg[2],
value: tMsg[3],
@@ -672,7 +679,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 61: {
return {
- tp: "set_css_data_url_based",
+ tp: "set_css_data_url_based",
id: tMsg[1],
data: tMsg[2],
baseURL: tMsg[3],
@@ -681,7 +688,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 67: {
return {
- tp: "css_insert_rule_url_based",
+ tp: "css_insert_rule_url_based",
id: tMsg[1],
rule: tMsg[2],
index: tMsg[3],
@@ -691,7 +698,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 69: {
return {
- tp: "mouse_click",
+ tp: "mouse_click",
id: tMsg[1],
hesitationTime: tMsg[2],
label: tMsg[3],
@@ -701,7 +708,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 70: {
return {
- tp: "create_i_frame_document",
+ tp: "create_i_frame_document",
frameID: tMsg[1],
id: tMsg[2],
}
@@ -709,7 +716,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 71: {
return {
- tp: "adopted_ss_replace_url_based",
+ tp: "adopted_ss_replace_url_based",
sheetID: tMsg[1],
text: tMsg[2],
baseURL: tMsg[3],
@@ -718,7 +725,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 73: {
return {
- tp: "adopted_ss_insert_rule_url_based",
+ tp: "adopted_ss_insert_rule_url_based",
sheetID: tMsg[1],
rule: tMsg[2],
index: tMsg[3],
@@ -728,7 +735,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 75: {
return {
- tp: "adopted_ss_delete_rule",
+ tp: "adopted_ss_delete_rule",
sheetID: tMsg[1],
index: tMsg[2],
}
@@ -736,7 +743,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 76: {
return {
- tp: "adopted_ss_add_owner",
+ tp: "adopted_ss_add_owner",
sheetID: tMsg[1],
id: tMsg[2],
}
@@ -744,14 +751,22 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
case 77: {
return {
- tp: "adopted_ss_remove_owner",
+ tp: "adopted_ss_remove_owner",
sheetID: tMsg[1],
id: tMsg[2],
}
}
+ case 79: {
+ return {
+ tp: "zustand",
+ mutation: tMsg[1],
+ state: tMsg[2],
+ }
+ }
+
default:
return null
}
-}
\ No newline at end of file
+}
diff --git a/frontend/app/player/Player.ts b/frontend/app/player/Player.ts
index 4d4f40ed4..870c5046e 100644
--- a/frontend/app/player/Player.ts
+++ b/frontend/app/player/Player.ts
@@ -72,15 +72,15 @@ export default class Player extends MessageDistributor {
private _startAnimation() {
let prevTime = getState().time;
let animationPrevTime = performance.now();
-
+
const nextFrame = (animationCurrentTime: number) => {
- const {
- speed,
+ const {
+ speed,
skip,
- autoplay,
- skipIntervals,
- endTime,
- live,
+ autoplay,
+ skipIntervals,
+ endTime,
+ live,
livePlay,
disconnected,
messagesLoading,
@@ -93,7 +93,7 @@ export default class Player extends MessageDistributor {
let time = prevTime + diffTime;
- const skipInterval = skip && skipIntervals.find((si: Node) => si.contains(time)); // TODO: good skip by messages
+ const skipInterval = !live && skip && skipIntervals.find((si: Node) => si.contains(time)); // TODO: good skip by messages
if (skipInterval) time = skipInterval.end;
const fmt = super.getFirstMessageTime();
@@ -159,7 +159,7 @@ export default class Player extends MessageDistributor {
jump(time = getState().time, index: number) {
const { live, liveTimeTravel, endTime } = getState();
if (live && !liveTimeTravel) return;
-
+
if (getState().playing) {
cancelAnimationFrame(this._animationFrameRequestId);
// this._animationFrameRequestId = requestAnimationFrame(() => {
@@ -188,7 +188,7 @@ export default class Player extends MessageDistributor {
const { inspectorMode } = getState();
flag = !inspectorMode;
}
-
+
if (flag) {
this.pause();
update({ inspectorMode: true });
@@ -199,7 +199,7 @@ export default class Player extends MessageDistributor {
}
}
- markTargets(targets: { selector: string, count: number }[] | null) {
+ markTargets(targets: { selector: string, count: number }[] | null) {
this.pause();
this.setMarkedTargets(targets);
}
@@ -207,7 +207,7 @@ export default class Player extends MessageDistributor {
activeTarget(index: number) {
this.setActiveTarget(index);
}
-
+
toggleSkipToIssue() {
const skipToIssue = !getState().skipToIssue;
localStorage.setItem(SKIP_TO_ISSUE_STORAGE_KEY, `${skipToIssue}`);
@@ -219,13 +219,13 @@ export default class Player extends MessageDistributor {
update({ skipToIssue });
return skipToIssue;
}
-
+
toggleAutoplay() {
const autoplay = !getState().autoplay;
localStorage.setItem(AUTOPLAY_STORAGE_KEY, `${autoplay}`);
update({ autoplay });
}
-
+
toggleEvents(shouldShow?: boolean) {
const showEvents = shouldShow || !getState().showEvents;
localStorage.setItem(SHOW_EVENTS_STORAGE_KEY, `${showEvents}`);
@@ -252,19 +252,22 @@ export default class Player extends MessageDistributor {
this._updateSpeed(Math.max(1, speed/2));
}
- toggleTimetravel() {
+ async toggleTimetravel() {
if (!getState().liveTimeTravel) {
- this.reloadWithUnprocessedFile()
- this.play()
+ return await this.reloadWithUnprocessedFile()
}
}
-
+
jumpToLive() {
cancelAnimationFrame(this._animationFrameRequestId);
this._setTime(getState().endTime);
this._startAnimation();
update({ livePlay: true });
-}
+ }
+
+ toggleUserName(name?: string) {
+ this.cursor.toggleUserName(name)
+ }
clean() {
this.pause();
diff --git a/frontend/app/player/singletone.js b/frontend/app/player/singletone.js
index 81d6a6138..8bf470658 100644
--- a/frontend/app/player/singletone.js
+++ b/frontend/app/player/singletone.js
@@ -40,7 +40,7 @@ export function init(session, config, live = false) {
endTime, // : 0, //TODO: through initialState
session,
});
-
+
if (!document.hidden) {
instance.play();
}
@@ -73,12 +73,17 @@ export const toggleInspectorMode = initCheck((...args) => instance.toggleInspect
export const callPeer = initCheck((...args) => instance.assistManager.call(...args))
/** @type {Player.assistManager.setCallArgs} */
export const setCallArgs = initCheck((...args) => instance.assistManager.setCallArgs(...args))
+/** @type {Player.assistManager.initiateCallEnd} */
+export const initiateCallEnd = initCheck((...args) => instance.assistManager.initiateCallEnd(...args))
export const requestReleaseRemoteControl = initCheck((...args) => instance.assistManager.requestReleaseRemoteControl(...args))
+export const releaseRemoteControl = initCheck((...args) => instance.assistManager.releaseRemoteControl(...args))
export const markTargets = initCheck((...args) => instance.markTargets(...args))
export const activeTarget = initCheck((...args) => instance.activeTarget(...args))
export const toggleAnnotation = initCheck((...args) => instance.assistManager.toggleAnnotation(...args))
+/** @type {Player.toggleTimetravel} */
export const toggleTimetravel = initCheck((...args) => instance.toggleTimetravel(...args))
export const jumpToLive = initCheck((...args) => instance.jumpToLive(...args))
+export const toggleUserName = initCheck((...args) => instance.toggleUserName(...args))
export const Controls = {
jump,
diff --git a/frontend/app/player/store/selectors.js b/frontend/app/player/store/selectors.js
index 1a6f0dc2d..36750b56d 100644
--- a/frontend/app/player/store/selectors.js
+++ b/frontend/app/player/store/selectors.js
@@ -2,6 +2,7 @@ const REDUX = "redux";
const MOBX = "mobx";
const VUEX = "vuex";
const NGRX = "ngrx";
+const ZUSTAND = 'zustand';
const NONE = 0;
@@ -10,6 +11,7 @@ export const STORAGE_TYPES = {
MOBX,
VUEX,
NGRX,
+ ZUSTAND,
NONE,
};
@@ -24,6 +26,8 @@ export function selectStorageType(state) {
return MOBX;
} else if (state.ngrxList.length > 0) {
return NGRX;
+ } else if (state.zustandList.length > 0) {
+ return ZUSTAND;
}
return NONE;
}
@@ -41,4 +45,4 @@ export function selectStorageListNow(state) {
return state[`${key}ListNow`] || [];
}
return [];
-}
\ No newline at end of file
+}
diff --git a/frontend/app/styles/general.css b/frontend/app/styles/general.css
index e5b7731b1..7e072fcd4 100644
--- a/frontend/app/styles/general.css
+++ b/frontend/app/styles/general.css
@@ -36,7 +36,7 @@
scrollbar-width: thin;
&::-webkit-scrollbar {
width: 4px;
- }
+ }
}
.grecaptcha-badge{
@@ -190,7 +190,7 @@
/* font-family: 'FontAwesome'; */
top: 10px;
left: 0;
-
+
content: "\201C";
font-size: 140px;
color: rgba(0,0,0,0.1);
@@ -201,7 +201,7 @@
/* font-family: 'FontAwesome'; */
bottom: 10px;
right: 0;
-
+
content: "\201E";
font-size: 140px;
color: rgba(0,0,0,0.1);
@@ -226,7 +226,7 @@
.blink-border {
/* border: 1px #ff0000 solid; */
border-color: #CC0000;
-
+
animation: blink 1s;
animation-iteration-count: 3;
}
@@ -282,8 +282,14 @@ p {
padding: 0!important;
transition: none!important;
}
-.tippy-notransition {
+
+.tippy-tooltip[data-theme~='nopadding'] > .enter {
+ background-color: transparent!important;
+}
+
+.tippy-notransition, .tippy-notransition > * {
transition: none!important;
+ will-change: unset!important;
}
@media print {
diff --git a/frontend/app/svg/icons/call.svg b/frontend/app/svg/icons/call.svg
new file mode 100644
index 000000000..aff332ee5
--- /dev/null
+++ b/frontend/app/svg/icons/call.svg
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/frontend/app/svg/icons/grid-horizontal.svg b/frontend/app/svg/icons/grid-horizontal.svg
new file mode 100644
index 000000000..7049871a7
--- /dev/null
+++ b/frontend/app/svg/icons/grid-horizontal.svg
@@ -0,0 +1,3 @@
+
+
+
diff --git a/frontend/app/svg/icons/remote-control.svg b/frontend/app/svg/icons/remote-control.svg
index 64087850c..9a0fb6acc 100644
--- a/frontend/app/svg/icons/remote-control.svg
+++ b/frontend/app/svg/icons/remote-control.svg
@@ -1 +1,5 @@
-
\ No newline at end of file
+
+
+
diff --git a/frontend/app/svg/remote-control.svg b/frontend/app/svg/remote-control.svg
new file mode 100644
index 000000000..9e2925be5
--- /dev/null
+++ b/frontend/app/svg/remote-control.svg
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/mobs/messages.rb b/mobs/messages.rb
index 61f141121..6d21c2e05 100644
--- a/mobs/messages.rb
+++ b/mobs/messages.rb
@@ -1,7 +1,7 @@
-# Special one for Batch Metadata. Message id could define the version
+# Special one for Batch Metadata. Message id could define the version
# Depricated since tracker 3.6.0 in favor of BatchMetadata
-message 80, 'BatchMeta', :replayer => false, :tracker => false do
+message 80, 'BatchMeta', :replayer => false, :tracker => false do
uint 'PageNo'
uint 'FirstIndex'
int 'Timestamp'
@@ -44,7 +44,7 @@ message 1, 'SessionStart', :tracker => false, :replayer => false do
string 'UserCountry'
string 'UserID'
end
-# message 2, 'CreateDocument', do
+## message 2, 'CreateDocument', do
# end
message 3, 'SessionEnd', :tracker => false, :replayer => false do
uint 'Timestamp'
@@ -62,7 +62,7 @@ message 6, 'SetViewportScroll' do
int 'X'
int 'Y'
end
-# Depricated sinse tracker 3.6.0 in favor of CreateDocument(id=2)
+# (should be) Depricated sinse tracker ?.?.? in favor of CreateDocument(id=2)
# in order to use Document as a default root node instead of the documentElement
message 7, 'CreateDocument' do
end
@@ -238,13 +238,13 @@ message 36, 'CustomEvent', :tracker => false, :replayer => false do
string 'Name'
string 'Payload'
end
-
-
+# depricated since 4.0.2 in favor of AdoptedSSInsertRule + AdoptedSSAddOwner
message 37, 'CSSInsertRule' do
uint 'ID'
string 'Rule'
uint 'Index'
end
+# depricated since 4.0.2
message 38, 'CSSDeleteRule' do
uint 'ID'
uint 'Index'
@@ -265,7 +265,6 @@ message 40, 'Profiler' do
string 'Args'
string 'Result'
end
-
message 41, 'OTable' do
string 'Key'
string 'Value'
@@ -278,7 +277,6 @@ message 43, 'StateActionEvent', :tracker => false, :replayer => false do
uint 'Timestamp'
string 'Type'
end
-
message 44, 'Redux' do
string 'Action'
string 'State'
@@ -363,6 +361,7 @@ message 56, 'PerformanceTrackAggr', :tracker => false, :replayer => false do
uint 'AvgUsedJSHeapSize'
uint 'MaxUsedJSHeapSize'
end
+## 57 58
message 59, 'LongTask' do
uint 'Timestamp'
uint 'Duration'
@@ -400,6 +399,7 @@ message 64, 'CustomIssue', :replayer => false do
string 'Name'
string 'Payload'
end
+## 65
message 66, 'AssetCache', :replayer => false, :tracker => false do
string 'URL'
end
@@ -409,6 +409,7 @@ message 67, 'CSSInsertRuleURLBased' do
uint 'Index'
string 'BaseURL'
end
+## 68
message 69, 'MouseClick' do
uint 'ID'
uint 'HesitationTime'
@@ -416,13 +417,14 @@ message 69, 'MouseClick' do
string 'Selector'
end
-# Since 3.4.0
+# Since 3.4.0 //also used for ShadowDom. TODO:remane to CreateRoot
message 70, 'CreateIFrameDocument' do
uint 'FrameID'
uint 'ID'
end
-
-#Since 3.6.0 AdoptedStyleSheets
+
+#Since 4.0.0 AdoptedStyleSheets etc
+# TODO: rename to StyleSheets...
message 71, 'AdoptedSSReplaceURLBased' do
uint 'SheetID'
string 'Text'
@@ -432,26 +434,39 @@ message 72, 'AdoptedSSReplace', :tracker => false do
uint 'SheetID'
string 'Text'
end
-message 73, 'AdoptedSSInsertRuleURLBased' do
+message 73, 'AdoptedSSInsertRuleURLBased' do
uint 'SheetID'
string 'Rule'
uint 'Index'
string 'BaseURL'
end
-message 74, 'AdoptedSSInsertRule', :tracker => false do
+message 74, 'AdoptedSSInsertRule', :tracker => false do
uint 'SheetID'
string 'Rule'
uint 'Index'
end
-message 75, 'AdoptedSSDeleteRule' do
+message 75, 'AdoptedSSDeleteRule' do
uint 'SheetID'
uint 'Index'
end
-message 76, 'AdoptedSSAddOwner' do
+message 76, 'AdoptedSSAddOwner' do
uint 'SheetID'
uint 'ID'
end
-message 77, 'AdoptedSSRemoveOwner' do
+message 77, 'AdoptedSSRemoveOwner' do
uint 'SheetID'
uint 'ID'
end
+#Since 4.0.1
+# message 78, 'ReplaceVCSSURLBased' do
+# uint 'SheetID'
+# uint 'Index'
+# string 'Styles'
+# string 'BaseURL'
+# end
+message 79, 'Zustand' do
+ string 'Mutation'
+ string 'State'
+end
+
+# 80 -- 90 reserved
\ No newline at end of file
diff --git a/mobs/templates/frontend~app~player~MessageDistributor~messages~RawMessageReader.ts.erb b/mobs/templates/frontend~app~player~MessageDistributor~messages~RawMessageReader.ts.erb
index d1e533550..11bd38e1a 100644
--- a/mobs/templates/frontend~app~player~MessageDistributor~messages~RawMessageReader.ts.erb
+++ b/mobs/templates/frontend~app~player~MessageDistributor~messages~RawMessageReader.ts.erb
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
import PrimitiveReader from './PrimitiveReader'
import type { RawMessage } from './raw'
@@ -18,12 +19,12 @@ export default class RawMessageReader extends PrimitiveReader {
switch (tp) {
<% $messages.select { |msg| msg.replayer }.each do |msg| %>
case <%= msg.id %>: {
-<%= msg.attributes.map { |attr|
-" const #{attr.name.camel_case} = this.read#{attr.type.to_s.pascal_case}(); if (#{attr.name.camel_case} === null) { return resetPointer() }" }.join "\n" %>
+<%= msg.attributes.map { |attr|
+" const #{attr.name.camel_case} = this.read#{attr.type.to_s.pascal_case}(); if (#{attr.name.camel_case} === null) { return resetPointer() }" }.join "\n" %>
return {
- tp: "<%= msg.name.snake_case %>",
-<%= msg.attributes.map { |attr|
-" #{attr.name.camel_case}," }.join "\n" %>
+ tp: "<%= msg.name.snake_case %>",
+<%= msg.attributes.map { |attr|
+" #{attr.name.camel_case}," }.join "\n" %>
};
}
<% end %>
diff --git a/mobs/templates/frontend~app~player~MessageDistributor~messages~message.ts.erb b/mobs/templates/frontend~app~player~MessageDistributor~messages~message.ts.erb
index 91c2cb9a5..178fa3e44 100644
--- a/mobs/templates/frontend~app~player~MessageDistributor~messages~message.ts.erb
+++ b/mobs/templates/frontend~app~player~MessageDistributor~messages~message.ts.erb
@@ -1,8 +1,9 @@
// Auto-generated, do not edit
+/* eslint-disable */
import type { Timed } from './timed'
import type { RawMessage } from './raw'
-import type {
+import type {
<%= $messages.select { |msg| msg.replayer }.map { |msg| " Raw#{msg.name.snake_case.pascal_case}," }.join "\n" %>
} from './raw'
@@ -10,4 +11,4 @@ export type Message = RawMessage & Timed
<% $messages.select { |msg| msg.replayer }.each do |msg| %>
export type <%= msg.name.snake_case.pascal_case %> = Raw<%= msg.name.snake_case.pascal_case %> & Timed
-<% end %>
\ No newline at end of file
+<% end %>
diff --git a/mobs/templates/frontend~app~player~MessageDistributor~messages~raw.ts.erb b/mobs/templates/frontend~app~player~MessageDistributor~messages~raw.ts.erb
index b94ced335..97dc34be6 100644
--- a/mobs/templates/frontend~app~player~MessageDistributor~messages~raw.ts.erb
+++ b/mobs/templates/frontend~app~player~MessageDistributor~messages~raw.ts.erb
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
<% $messages.select { |msg| msg.replayer }.each do |msg| %>
export interface Raw<%= msg.name.snake_case.pascal_case %> {
diff --git a/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker-legacy.ts.erb b/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker-legacy.ts.erb
index 586ee8cf3..e0ba92069 100644
--- a/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker-legacy.ts.erb
+++ b/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker-legacy.ts.erb
@@ -1,8 +1,7 @@
// @ts-nocheck
+/* eslint-disable */
// Auto-generated, do not edit
export const TP_MAP = {
<%= $messages.select { |msg| msg.tracker || msg.replayer }.map { |msg| " #{msg.id}: \"#{msg.name.snake_case}\"," }.join "\n" %>
} as const
-
-
diff --git a/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker.ts.erb b/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker.ts.erb
index 0b3452b3b..f6e739c86 100644
--- a/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker.ts.erb
+++ b/mobs/templates/frontend~app~player~MessageDistributor~messages~tracker.ts.erb
@@ -1,6 +1,7 @@
// Auto-generated, do not edit
+/* eslint-disable */
-import type { RawMessage } from './raw'
+import type { RawMessage } from './raw'
<% $messages.select { |msg| msg.tracker }.each do |msg| %>
type Tr<%= msg.name %> = [
@@ -16,7 +17,7 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
<% $messages.select { |msg| msg.replayer & msg.tracker }.each do |msg| %>
case <%= msg.id %>: {
return {
- tp: "<%= msg.name.snake_case %>",
+ tp: "<%= msg.name.snake_case %>",
<%= msg.attributes.map.with_index { |attr, i| "#{attr.name.camel_case}: tMsg[#{i+1}]," }.join "\n " %>
}
}
@@ -25,4 +26,4 @@ export default function translate(tMsg: TrackerMessage): RawMessage | null {
return null
}
-}
\ No newline at end of file
+}
diff --git a/mobs/templates/tracker~tracker~src~common~messages.gen.ts.erb b/mobs/templates/tracker~tracker~src~common~messages.gen.ts.erb
index 893e3878f..6985bcf65 100644
--- a/mobs/templates/tracker~tracker~src~common~messages.gen.ts.erb
+++ b/mobs/templates/tracker~tracker~src~common~messages.gen.ts.erb
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
export declare const enum Type {
<%= $messages.select { |msg| msg.tracker }.map { |msg| "#{ msg.name } = #{ msg.id }," }.join "\n " %>
diff --git a/mobs/templates/tracker~tracker~src~main~app~messages.gen.ts.erb b/mobs/templates/tracker~tracker~src~main~app~messages.gen.ts.erb
index 1143bc5f4..d4c132f8a 100644
--- a/mobs/templates/tracker~tracker~src~main~app~messages.gen.ts.erb
+++ b/mobs/templates/tracker~tracker~src~main~app~messages.gen.ts.erb
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
import * as Messages from '../../common/messages.gen.js'
export { default } from '../../common/messages.gen.js'
@@ -7,7 +8,7 @@ export { default } from '../../common/messages.gen.js'
export function <%= msg.name %>(
<%= msg.attributes.map { |attr| "#{attr.name.camel_case}: #{attr.type_js}," }.join "\n " %>
): Messages.<%= msg.name %> {
- return [
+ return [
Messages.Type.<%= msg.name %>,
<%= msg.attributes.map { |attr| "#{attr.name.camel_case}," }.join "\n " %>
]
diff --git a/mobs/templates/tracker~tracker~src~webworker~MessageEncoder.gen.ts.erb b/mobs/templates/tracker~tracker~src~webworker~MessageEncoder.gen.ts.erb
index e65bbe2f2..503865443 100644
--- a/mobs/templates/tracker~tracker~src~webworker~MessageEncoder.gen.ts.erb
+++ b/mobs/templates/tracker~tracker~src~webworker~MessageEncoder.gen.ts.erb
@@ -1,4 +1,5 @@
// Auto-generated, do not edit
+/* eslint-disable */
import * as Messages from '../common/messages.gen.js'
import Message from '../common/messages.gen.js'
@@ -15,6 +16,5 @@ export default class MessageEncoder extends PrimitiveEncoder {
<% end %>
}
}
-
-}
+}
diff --git a/peers/package-lock.json b/peers/package-lock.json
index 043a3ba6d..a903cfd08 100644
--- a/peers/package-lock.json
+++ b/peers/package-lock.json
@@ -47,9 +47,9 @@
}
},
"node_modules/@types/express-serve-static-core": {
- "version": "4.17.28",
- "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz",
- "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==",
+ "version": "4.17.30",
+ "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz",
+ "integrity": "sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ==",
"dependencies": {
"@types/node": "*",
"@types/qs": "*",
@@ -57,14 +57,14 @@
}
},
"node_modules/@types/mime": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
- "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw=="
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz",
+ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA=="
},
"node_modules/@types/node": {
- "version": "17.0.21",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz",
- "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ=="
+ "version": "18.7.16",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.16.tgz",
+ "integrity": "sha512-EQHhixfu+mkqHMZl1R2Ovuvn47PUw18azMJOTwSZr9/fhzHNGXAJ0ma0dayRVchprpCj0Kc1K1xKoWaATWF1qg=="
},
"node_modules/@types/qs": {
"version": "6.9.7",
@@ -77,11 +77,11 @@
"integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw=="
},
"node_modules/@types/serve-static": {
- "version": "1.13.10",
- "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz",
- "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==",
+ "version": "1.15.0",
+ "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz",
+ "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==",
"dependencies": {
- "@types/mime": "^1",
+ "@types/mime": "*",
"@types/node": "*"
}
},
@@ -130,7 +130,7 @@
"node_modules/array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
- "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
+ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
},
"node_modules/body-parser": {
"version": "1.20.0",
@@ -175,6 +175,14 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/camelcase": {
+ "version": "5.3.1",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+ "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
+ "engines": {
+ "node": ">=6"
+ }
+ },
"node_modules/cliui": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
@@ -231,7 +239,7 @@
"node_modules/cookie-signature": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
- "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
+ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
},
"node_modules/cors": {
"version": "2.8.5",
@@ -256,7 +264,7 @@
"node_modules/decamelize": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
- "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
+ "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
"engines": {
"node": ">=0.10.0"
}
@@ -504,7 +512,7 @@
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
- "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=",
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
"engines": {
"node": ">= 0.6"
}
@@ -512,12 +520,12 @@
"node_modules/merge-descriptors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
- "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E="
+ "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="
},
"node_modules/methods": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
- "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=",
+ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
"engines": {
"node": ">= 0.6"
}
@@ -534,19 +542,19 @@
}
},
"node_modules/mime-db": {
- "version": "1.51.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz",
- "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==",
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
- "version": "2.1.34",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz",
- "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==",
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
- "mime-db": "1.51.0"
+ "mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
@@ -555,7 +563,7 @@
"node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
},
"node_modules/negotiator": {
"version": "0.6.3",
@@ -568,7 +576,7 @@
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
"engines": {
"node": ">=0.10.0"
}
@@ -644,7 +652,7 @@
"node_modules/path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
- "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
+ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"node_modules/peer": {
"version": "0.6.1",
@@ -668,15 +676,6 @@
"node": ">=10"
}
},
- "node_modules/peer/node_modules/uuid": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
- "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
- "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
- "bin": {
- "uuid": "bin/uuid"
- }
- },
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@@ -728,7 +727,7 @@
"node_modules/require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"engines": {
"node": ">=0.10.0"
}
@@ -807,7 +806,7 @@
"node_modules/set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
- "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
+ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
},
"node_modules/setprototypeof": {
"version": "1.2.0",
@@ -890,15 +889,24 @@
"node_modules/utils-merge": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
- "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=",
+ "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
"engines": {
"node": ">= 0.4.0"
}
},
+ "node_modules/uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
+ "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
+ "bin": {
+ "uuid": "bin/uuid"
+ }
+ },
"node_modules/vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
- "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=",
+ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
"engines": {
"node": ">= 0.8"
}
@@ -906,7 +914,7 @@
"node_modules/which-module": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
- "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho="
+ "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q=="
},
"node_modules/wrap-ansi": {
"version": "6.2.0",
@@ -922,9 +930,9 @@
}
},
"node_modules/ws": {
- "version": "7.5.7",
- "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz",
- "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==",
+ "version": "7.5.9",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz",
+ "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==",
"engines": {
"node": ">=8.3.0"
},
@@ -978,14 +986,6 @@
"engines": {
"node": ">=6"
}
- },
- "node_modules/yargs-parser/node_modules/camelcase": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
- "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
- "engines": {
- "node": ">=6"
- }
}
},
"dependencies": {
@@ -1023,9 +1023,9 @@
}
},
"@types/express-serve-static-core": {
- "version": "4.17.28",
- "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz",
- "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==",
+ "version": "4.17.30",
+ "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz",
+ "integrity": "sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ==",
"requires": {
"@types/node": "*",
"@types/qs": "*",
@@ -1033,14 +1033,14 @@
}
},
"@types/mime": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
- "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw=="
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz",
+ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA=="
},
"@types/node": {
- "version": "17.0.21",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz",
- "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ=="
+ "version": "18.7.16",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.16.tgz",
+ "integrity": "sha512-EQHhixfu+mkqHMZl1R2Ovuvn47PUw18azMJOTwSZr9/fhzHNGXAJ0ma0dayRVchprpCj0Kc1K1xKoWaATWF1qg=="
},
"@types/qs": {
"version": "6.9.7",
@@ -1053,11 +1053,11 @@
"integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw=="
},
"@types/serve-static": {
- "version": "1.13.10",
- "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz",
- "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==",
+ "version": "1.15.0",
+ "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz",
+ "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==",
"requires": {
- "@types/mime": "^1",
+ "@types/mime": "*",
"@types/node": "*"
}
},
@@ -1094,7 +1094,7 @@
"array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
- "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
+ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
},
"body-parser": {
"version": "1.20.0",
@@ -1129,6 +1129,11 @@
"get-intrinsic": "^1.0.2"
}
},
+ "camelcase": {
+ "version": "5.3.1",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+ "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg=="
+ },
"cliui": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
@@ -1173,7 +1178,7 @@
"cookie-signature": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
- "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
+ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
},
"cors": {
"version": "2.8.5",
@@ -1195,7 +1200,7 @@
"decamelize": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
- "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA="
+ "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA=="
},
"depd": {
"version": "2.0.0",
@@ -1382,17 +1387,17 @@
"media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
- "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g="
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="
},
"merge-descriptors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
- "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E="
+ "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="
},
"methods": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
- "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4="
+ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="
},
"mime": {
"version": "1.6.0",
@@ -1400,22 +1405,22 @@
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="
},
"mime-db": {
- "version": "1.51.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz",
- "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g=="
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
},
"mime-types": {
- "version": "2.1.34",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz",
- "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==",
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"requires": {
- "mime-db": "1.51.0"
+ "mime-db": "1.52.0"
}
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
},
"negotiator": {
"version": "0.6.3",
@@ -1425,7 +1430,7 @@
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="
},
"object-inspect": {
"version": "1.12.2",
@@ -1474,7 +1479,7 @@
"path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
- "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
+ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"peer": {
"version": "0.6.1",
@@ -1490,13 +1495,6 @@
"uuid": "^3.4.0",
"ws": "^7.2.3",
"yargs": "^15.3.1"
- },
- "dependencies": {
- "uuid": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
- "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
- }
}
},
"proxy-addr": {
@@ -1535,7 +1533,7 @@
"require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I="
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="
},
"require-main-filename": {
"version": "2.0.0",
@@ -1593,7 +1591,7 @@
"set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
- "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
+ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
},
"setprototypeof": {
"version": "1.2.0",
@@ -1655,17 +1653,22 @@
"utils-merge": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
- "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
+ "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="
+ },
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
},
"vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
- "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw="
+ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="
},
"which-module": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
- "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho="
+ "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q=="
},
"wrap-ansi": {
"version": "6.2.0",
@@ -1678,9 +1681,9 @@
}
},
"ws": {
- "version": "7.5.7",
- "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz",
- "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==",
+ "version": "7.5.9",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz",
+ "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==",
"requires": {}
},
"y18n": {
@@ -1713,13 +1716,6 @@
"requires": {
"camelcase": "^5.0.0",
"decamelize": "^1.2.0"
- },
- "dependencies": {
- "camelcase": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
- "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg=="
- }
}
}
}
diff --git a/peers/server.js b/peers/server.js
index 39f46d4f1..0eb99a9de 100644
--- a/peers/server.js
+++ b/peers/server.js
@@ -4,15 +4,19 @@ const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./serve
const express = require('express');
const {ExpressPeerServer} = require('peer');
-const HOST = '0.0.0.0';
-const PORT = 9000;
+const debug = process.env.debug === "1";
+const heapdump = process.env.heapdump === "1";
+const HOST = process.env.LISTEN_HOST || '0.0.0.0';
+const PORT = process.env.LISTEN_PORT || 9000;
+assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
+const P_KEY = process.env.ASSIST_KEY;
const app = express();
app.use(request_logger("[app]"));
-app.use(`/${process.env.S3_KEY}/assist`, peerRouter);
-app.use(`/${process.env.S3_KEY}/heapdump`, dumps.router);
+app.use(`/${P_KEY}/assist`, peerRouter);
+heapdump && app.use(`/${P_KEY}/heapdump`, dumps.router);
const server = app.listen(PORT, HOST, () => {
console.log(`App listening on http://${HOST}:${PORT}`);
@@ -30,4 +34,10 @@ peerServer.on('disconnect', peerDisconnect);
peerServer.on('error', peerError);
app.use('/', peerServer);
app.enable('trust proxy');
-module.exports = {server};
\ No newline at end of file
+module.exports = {server};
+
+process.on('uncaughtException', err => {
+ console.log(`Uncaught Exception: ${err.message}`);
+ debug && console.log(err.stack);
+ // process.exit(1);
+});
\ No newline at end of file
diff --git a/peers/servers/peerjs-server.js b/peers/servers/peerjs-server.js
index a99ec1665..fba50cb3c 100644
--- a/peers/servers/peerjs-server.js
+++ b/peers/servers/peerjs-server.js
@@ -37,7 +37,11 @@ const peerDisconnect = (client) => {
}
const peerError = (error) => {
- console.error('error fired');
+ //https://peerjs.com/docs/#peeron-error
+ console.error('Error detected in Peers');
+ console.error('Error type:');
+ console.error(error.type);
+ console.error('Error message:');
console.error(error);
}
diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql b/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
new file mode 100644
index 000000000..c621da9c7
--- /dev/null
+++ b/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
@@ -0,0 +1,38 @@
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.8.1'
+$$ LANGUAGE sql IMMUTABLE;
+
+
+INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
+ view_type)
+VALUES ('Fetch Calls with Errors', 'errors', '{
+ "col": 4,
+ "row": 2,
+ "position": 0
+}', true, true, true, 'calls_errors', 'predefined', 'table')
+ON CONFLICT (predefined_key) DO UPDATE
+ SET name=excluded.name,
+ category=excluded.category,
+ default_config=excluded.default_config,
+ is_predefined=excluded.is_predefined,
+ is_template=excluded.is_template,
+ is_public=excluded.is_public,
+ metric_type=excluded.metric_type,
+ view_type=excluded.view_type;
+
+ALTER TABLE IF EXISTS oauth_authentication
+ DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
+
+DROP INDEX IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
+
+ALTER TABLE IF EXISTS oauth_authentication
+ DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_key;
+
+DROP INDEX IF EXISTS oauth_authentication_user_id_provider_key;
+
+CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
+
+COMMIT;
\ No newline at end of file
diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index c172c1d76..94bfa04e2 100644
--- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
-SELECT 'v1.8.0'
+SELECT 'v1.8.1'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@@ -165,9 +165,9 @@ $$
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
provider oauth_provider NOT NULL,
provider_user_id text NOT NULL,
- token text NOT NULL,
- UNIQUE (user_id, provider)
+ token text NOT NULL
);
+ CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider);
-- --- projects.sql ---
@@ -1130,7 +1130,7 @@ VALUES ('Captured sessions', 'web vitals', '{
"position": 0
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{
- "col": 2,
+ "col": 4,
"row": 2,
"position": 0
}', true, true, true, 'calls_errors', 'predefined', 'table'),
diff --git a/scripts/helmcharts/build_deploy.sh b/scripts/helmcharts/build_deploy.sh
index c3fd89c0e..c5843a76a 100644
--- a/scripts/helmcharts/build_deploy.sh
+++ b/scripts/helmcharts/build_deploy.sh
@@ -5,6 +5,9 @@ set -e
# Usage: IMAGE_TAG=latest DOCKER_REPO=rg.fr-par.scw.cloud/foss bash build_deploy.sh
+# Removing local alpine:latest image
+docker rmi alpine
+
echo $DOCKER_REPO
[[ -z DOCKER_REPO ]] && {
echo Set DOCKER_REPO="your docker registry"
diff --git a/scripts/helmcharts/databases/values.yaml b/scripts/helmcharts/databases/values.yaml
index d2bacfba3..995ed29c7 100644
--- a/scripts/helmcharts/databases/values.yaml
+++ b/scripts/helmcharts/databases/values.yaml
@@ -103,7 +103,7 @@ postgresql:
# postgresqlPassword: asayerPostgres
fullnameOverride: postgresql
image:
- tag: 13.5.0-debian-10-r62
+ tag: 14.5.0
resources:
limits:
cpu: 1
@@ -128,6 +128,8 @@ minio:
memory: 128Mi
kafka:
+ image:
+ tag: 2.8.1
fullnameOverride: kafka
enabled: false
diff --git a/scripts/helmcharts/init.sh b/scripts/helmcharts/init.sh
index c204d5799..da9d171e7 100644
--- a/scripts/helmcharts/init.sh
+++ b/scripts/helmcharts/init.sh
@@ -15,7 +15,7 @@ fatal()
exit 1
}
-version="v1.8.0"
+version="v1.8.1"
usr=`whoami`
# Installing k3s
@@ -82,8 +82,8 @@ fatal 'DOMAIN_NAME variable is empty. Rerun the script `DOMAIN_NAME=openreplay.m
}
# Mac os doesn't have gnu sed, which will cause compatibility issues.
-# This wrapper will help to check the sed, and use the correct version="v1.8.0"
-# Ref: https://stackoverflow.com/questions/37639496/how-can-i-check-the-version="v1.8.0"
+# This wrapper will help to check the sed, and use the correct version="v1.8.1"
+# Ref: https://stackoverflow.com/questions/37639496/how-can-i-check-the-version="v1.8.1"
function is_gnu_sed(){
sed --version >/dev/null 2>&1
}
@@ -105,6 +105,7 @@ sed_i_wrapper -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword:
sed_i_wrapper -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"$(randomPass)\"/g" vars.yaml
sed_i_wrapper -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"$(randomPass)\"/g" vars.yaml
sed_i_wrapper -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"$(randomPass)\"/g" vars.yaml
+sed_i_wrapper -i "s/assistKey: \"SetARandomStringHere\"/assistKey: \"$(randomPass)\"/g" vars.yaml
sed_i_wrapper -i "s/domainName: \"\"/domainName: \"${DOMAIN_NAME}\"/g" vars.yaml
info "Setting proper permission for shared folder"
diff --git a/scripts/helmcharts/openreplay/Chart.yaml b/scripts/helmcharts/openreplay/Chart.yaml
index fbeb3b453..5c87334d6 100644
--- a/scripts/helmcharts/openreplay/Chart.yaml
+++ b/scripts/helmcharts/openreplay/Chart.yaml
@@ -22,7 +22,7 @@ version: 0.1.0
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
# Ref: https://github.com/helm/helm/issues/7858#issuecomment-608114589
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
dependencies:
- name: ingress-nginx
diff --git a/scripts/helmcharts/openreplay/charts/alerts/Chart.yaml b/scripts/helmcharts/openreplay/charts/alerts/Chart.yaml
index 54730a15d..1860c9115 100644
--- a/scripts/helmcharts/openreplay/charts/alerts/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/alerts/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/assets/Chart.yaml b/scripts/helmcharts/openreplay/charts/assets/Chart.yaml
index dab6e61fb..33f7dbdb2 100644
--- a/scripts/helmcharts/openreplay/charts/assets/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/assets/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/assist/Chart.yaml b/scripts/helmcharts/openreplay/charts/assist/Chart.yaml
index 75465880f..09a5ab446 100644
--- a/scripts/helmcharts/openreplay/charts/assist/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/assist/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml
index 08fb70ece..b5509775d 100644
--- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml
+++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml
@@ -42,6 +42,8 @@ spec:
{{- .Values.healthCheck | toYaml | nindent 10}}
{{- end}}
env:
+ - name: ASSIST_KEY
+ value: {{ .Values.global.assistKey }}
- name: AWS_DEFAULT_REGION
value: "{{ .Values.global.s3.region }}"
- name: S3_HOST
diff --git a/scripts/helmcharts/openreplay/charts/chalice/Chart.yaml b/scripts/helmcharts/openreplay/charts/chalice/Chart.yaml
index 95c3c126a..21427c490 100644
--- a/scripts/helmcharts/openreplay/charts/chalice/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/chalice/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/db/Chart.yaml b/scripts/helmcharts/openreplay/charts/db/Chart.yaml
index 5b437c1c3..5b59f4781 100644
--- a/scripts/helmcharts/openreplay/charts/db/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/db/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/ender/Chart.yaml b/scripts/helmcharts/openreplay/charts/ender/Chart.yaml
index 80b4efdd4..41732efc9 100644
--- a/scripts/helmcharts/openreplay/charts/ender/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/ender/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/frontend/Chart.yaml b/scripts/helmcharts/openreplay/charts/frontend/Chart.yaml
index 4064a7322..e0a7e1dd6 100644
--- a/scripts/helmcharts/openreplay/charts/frontend/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/frontend/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/heuristics/Chart.yaml b/scripts/helmcharts/openreplay/charts/heuristics/Chart.yaml
index 86026bc3f..cf70161ad 100644
--- a/scripts/helmcharts/openreplay/charts/heuristics/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/heuristics/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/http/Chart.yaml b/scripts/helmcharts/openreplay/charts/http/Chart.yaml
index 820c2ae7c..9f4d23868 100644
--- a/scripts/helmcharts/openreplay/charts/http/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/http/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/integrations/Chart.yaml b/scripts/helmcharts/openreplay/charts/integrations/Chart.yaml
index ed13e0a03..6e44cf1b3 100644
--- a/scripts/helmcharts/openreplay/charts/integrations/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/integrations/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/peers/Chart.yaml b/scripts/helmcharts/openreplay/charts/peers/Chart.yaml
index 7fc77f776..6e753e03d 100644
--- a/scripts/helmcharts/openreplay/charts/peers/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/peers/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml
index ac673fd08..361aee0e0 100644
--- a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml
+++ b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml
@@ -42,7 +42,9 @@ spec:
{{- .Values.healthCheck | toYaml | nindent 10}}
{{- end}}
env:
- - name: S3_KEY
+ - name: ASSIST_KEY
+ value: {{ .Values.global.assistKey }}
+ - name: S3_KEYASSIST_KEY
value: {{ .Values.global.s3.accessKey }}
{{- range $key, $val := .Values.env }}
- name: {{ $key }}
diff --git a/scripts/helmcharts/openreplay/charts/quickwit/Chart.yaml b/scripts/helmcharts/openreplay/charts/quickwit/Chart.yaml
index a43bf2224..0c2a70eb7 100644
--- a/scripts/helmcharts/openreplay/charts/quickwit/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/quickwit/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.3.1
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/quickwit/files/index-config-fetch.yaml b/scripts/helmcharts/openreplay/charts/quickwit/files/index-config-fetch.yaml
deleted file mode 100644
index 1d89f72c9..000000000
--- a/scripts/helmcharts/openreplay/charts/quickwit/files/index-config-fetch.yaml
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# Index config file for gh-archive dataset.
-#
-
-version: 0
-
-index_id: fetchevent
-
-doc_mapping:
- mode: strict
- field_mappings:
- - name: method
- type: text
- tokenizer: default
- record: position
- - name: url
- type: text
- tokenizer: default
- record: position
- - name: request
- type: text
- tokenizer: default
- record: position
- - name: response
- type: text
- tokenizer: default
- record: position
- - name: status
- type: i64
- indexed: true
- fast: true
- - name: timestamp
- type: i64
- fast: true
- - name: duration
- type: i64
- fast: true
-
-search_settings:
- default_search_fields: [url, request, response]
diff --git a/scripts/helmcharts/openreplay/charts/quickwit/files/source-fetch.yaml b/scripts/helmcharts/openreplay/charts/quickwit/files/source-fetch.yaml
deleted file mode 100644
index f562461b0..000000000
--- a/scripts/helmcharts/openreplay/charts/quickwit/files/source-fetch.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
-#
-# Source config file.
-#
-
-source_id: fetch-kafka
-source_type: kafka
-params:
- topic: quickwit
- client_params:
- group.id: fetch-consumer
- bootstrap.servers: '{{ .Values.global.kafka.kafkaHost }}:{{ .Values.global.kafka.kafkaPort }}'
- {{- if eq .Values.global.kafka.kafkaUseSsl "true" }}
- security.protocol: SSL
- {{- end}}
diff --git a/scripts/helmcharts/openreplay/charts/sink/Chart.yaml b/scripts/helmcharts/openreplay/charts/sink/Chart.yaml
index 4faedee18..869126d87 100644
--- a/scripts/helmcharts/openreplay/charts/sink/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/sink/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/storage/Chart.yaml b/scripts/helmcharts/openreplay/charts/storage/Chart.yaml
index 17eec19e2..09ebf3ef6 100644
--- a/scripts/helmcharts/openreplay/charts/storage/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/storage/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/utilities/Chart.yaml b/scripts/helmcharts/openreplay/charts/utilities/Chart.yaml
index 9a0796c4a..cde8fd180 100644
--- a/scripts/helmcharts/openreplay/charts/utilities/Chart.yaml
+++ b/scripts/helmcharts/openreplay/charts/utilities/Chart.yaml
@@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
-AppVersion: "v1.8.0"
+AppVersion: "v1.8.1"
diff --git a/scripts/helmcharts/openreplay/charts/utilities/templates/efs-cron.yaml b/scripts/helmcharts/openreplay/charts/utilities/templates/efs-cron.yaml
index 233025bf2..f4f3ea409 100644
--- a/scripts/helmcharts/openreplay/charts/utilities/templates/efs-cron.yaml
+++ b/scripts/helmcharts/openreplay/charts/utilities/templates/efs-cron.yaml
@@ -26,7 +26,7 @@ spec:
set -x
echo "Cleaning NFS strorage for data older than 7 days"
storage=`du -sh /mnt/efs`
- find /mnt/efs -type f -mtime +7 -delete
+ find /mnt/efs -type f -mtime +{{.Values.efsCleaner.retention}} -delete
echo "Storage before cleaning"
echo ${storage}
echo "Storage after cleaning"
diff --git a/scripts/helmcharts/openreplay/charts/utilities/values.yaml b/scripts/helmcharts/openreplay/charts/utilities/values.yaml
index 49838d774..90632b4c2 100644
--- a/scripts/helmcharts/openreplay/charts/utilities/values.yaml
+++ b/scripts/helmcharts/openreplay/charts/utilities/values.yaml
@@ -9,6 +9,7 @@ efsCleaner:
repository: "{{ .Values.global.openReplayContainerRegistry }}/alpine"
pullPolicy: Always
tag: 3.16.1
+ retention: 2
pvc:
# This can be either persistentVolumeClaim or hostPath.
# In case of pvc, you'll have to provide the pvc name.
@@ -81,5 +82,5 @@ fullnameOverride: "utilities-openreplay"
# 5 3 * * 1 “At 03:05 on Monday.”
# refer: https://crontab.guru/#5_3_*_*_1
-cron: "5 3 * * 1"
+cron: "5 3 */3 * *"
diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml
index 1652dedab..24f358283 100644
--- a/scripts/helmcharts/openreplay/values.yaml
+++ b/scripts/helmcharts/openreplay/values.yaml
@@ -113,6 +113,13 @@ storage:
fsGroup: 0
fsGroupChangePolicy: "OnRootMismatch"
+chalice:
+ podSecurityContext:
+ runAsUser: 0
+ runAsGroup: 0
+ fsGroup: 0
+ fsGroupChangePolicy: "OnRootMismatch"
+
ingress-nginx:
enabled: true
controller:
diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml
index f8b6b2f8b..dd1f36883 100644
--- a/scripts/helmcharts/vars.yaml
+++ b/scripts/helmcharts/vars.yaml
@@ -1,4 +1,4 @@
-fromVersion: "v1.8.0"
+fromVersion: "v1.8.1"
# Databases specific variables
postgresql: &postgres
# For generating passwords
@@ -99,6 +99,8 @@ global:
redis: *redis
quickwit: *quickwit
openReplayContainerRegistry: "public.ecr.aws/p1t3u8a3"
+ # secret key to inject to assist and peers service
+ assistKey: "SetARandomStringHere"
s3:
region: "us-east-1"
endpoint: "http://minio.db.svc.cluster.local:9000"
diff --git a/sourcemap-reader/.gitignore b/sourcemap-reader/.gitignore
index a4b05b411..09c49b304 100644
--- a/sourcemap-reader/.gitignore
+++ b/sourcemap-reader/.gitignore
@@ -4,3 +4,4 @@ npm-debug.log
.cache
test.html
/utils/
+mappings.wasm
diff --git a/sourcemap-reader/clean.sh b/sourcemap-reader/clean.sh
new file mode 100755
index 000000000..a0cb5c9ed
--- /dev/null
+++ b/sourcemap-reader/clean.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+rm -rf ./utils
\ No newline at end of file
diff --git a/sourcemap-reader/prepare-dev.sh b/sourcemap-reader/prepare-dev.sh
new file mode 100755
index 000000000..e057555db
--- /dev/null
+++ b/sourcemap-reader/prepare-dev.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+rsync -avr --exclude=".*" --ignore-existing ../utilities/utils ./
\ No newline at end of file
diff --git a/sourcemap-reader/run-dev.sh b/sourcemap-reader/run-dev.sh
new file mode 100755
index 000000000..3c80807dd
--- /dev/null
+++ b/sourcemap-reader/run-dev.sh
@@ -0,0 +1,3 @@
+#!/bin/zsh
+
+MAPPING_WASM=./mappings.wasm npm start
\ No newline at end of file
diff --git a/sourcemap-reader/server.js b/sourcemap-reader/server.js
index 073cb4cfc..b58128992 100644
--- a/sourcemap-reader/server.js
+++ b/sourcemap-reader/server.js
@@ -3,17 +3,24 @@ const sourcemapsReaderServer = require('./servers/sourcemaps-server');
const express = require('express');
const {request_logger} = require("./utils/helper");
-const HOST = '0.0.0.0';
-const PORT = 9000;
+const HOST = process.env.SMR_HOST || '127.0.0.1';
+const PORT = process.env.SMR_PORT || 9000;
+const PREFIX = process.env.PREFIX || process.env.prefix || ''
+const P_KEY = process.env.SMR_KEY || 'smr';
+const heapdump = process.env.heapdump === "1";
const app = express();
-app.use(request_logger("[wsapp]"));
-
-app.use('/sourcemaps', sourcemapsReaderServer);
-app.use('/heapdump', dumps.router);
+app.use(request_logger("[SR]"));
+app.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
+ res.statusCode = 200;
+ res.end("ok!");
+ }
+);
+app.use(`${PREFIX}/${P_KEY}/sourcemaps`, sourcemapsReaderServer);
+heapdump && app.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
const server = app.listen(PORT, HOST, () => {
- console.log(`WS App listening on http://${HOST}:${PORT}`);
+ console.log(`SR App listening on http://${HOST}:${PORT}`);
console.log('Press Ctrl+C to quit.');
});
module.exports = {server};
\ No newline at end of file
diff --git a/sourcemap-reader/servers/sourcemaps-handler.js b/sourcemap-reader/servers/sourcemaps-handler.js
index 25185ffa3..96e9efe01 100644
--- a/sourcemap-reader/servers/sourcemaps-handler.js
+++ b/sourcemap-reader/servers/sourcemaps-handler.js
@@ -3,23 +3,17 @@ const fs = require('fs');
const sourceMap = require('source-map');
const AWS = require('aws-sdk');
const URL = require('url');
-const wasm = fs.readFileSync('/mappings.wasm');
+const wasm = fs.readFileSync(process.env.MAPPING_WASM || '/mappings.wasm');
sourceMap.SourceMapConsumer.initialize({
"lib/mappings.wasm": wasm
});
+console.log(`>sourceMap initialised using ${process.env.MAPPING_WASM || '/mappings.wasm'}`);
+
module.exports.sourcemapReader = async event => {
let s3;
- if (event.S3_HOST) {
- s3 = new AWS.S3({
- endpoint: event.S3_HOST,
- accessKeyId: event.S3_KEY,
- secretAccessKey: event.S3_SECRET,
- region: event.region,
- s3ForcePathStyle: true, // needed with minio?
- signatureVersion: 'v4'
- });
- } else if (process.env.S3_HOST) {
+
+ if (process.env.S3_HOST) {
s3 = new AWS.S3({
endpoint: process.env.S3_HOST,
accessKeyId: process.env.S3_KEY,
@@ -40,12 +34,21 @@ module.exports.sourcemapReader = async event => {
Key: event.key
};
return new Promise(function (resolve, reject) {
+ const getObjectStart = Date.now();
s3.getObject(options, (err, data) => {
if (err) {
- console.log("Get S3 object failed");
- console.log(err);
+ console.error("[SR] Get S3 object failed");
+ console.error(err);
return reject(err);
}
+ const getObjectEnd = Date.now();
+ const fileSize = (data.ContentLength / 1024) / 1024;
+ options.fileSize = `${fileSize} Mb`;
+ const downloadTime = (getObjectEnd - getObjectStart) / 1000;
+ options.downloadTime = `${downloadTime} s`;
+ if (fileSize >= 3) {
+ console.log("[SR] large file:" + JSON.stringify(options));
+ }
let sourcemap = data.Body.toString();
return new sourceMap.SourceMapConsumer(sourcemap)
@@ -68,17 +71,15 @@ module.exports.sourcemapReader = async event => {
preview = preview.slice(start, original.line + event.padding);
}
} else {
- console.log("source not found, null preview for:");
- console.log(original.source);
+ console.log(`[SR] source not found, null preview for: ${original.source}`);
preview = []
}
url = URL.parse(original.source);
} else {
- console.log("couldn't find original position of:");
- console.log({
+ console.log("[SR] couldn't find original position of: " + JSON.stringify({
line: event.positions[i].line,
column: event.positions[i].column
- });
+ }));
}
let result = {
"absPath": url.href,
@@ -92,6 +93,12 @@ module.exports.sourcemapReader = async event => {
results.push(result);
}
consumer = undefined;
+
+ const sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000;
+ options.sourcemapProcessingTime = `${sourcemapProcessingTime} s`
+ if (fileSize >= 3 || sourcemapProcessingTime > 2) {
+ console.log("[SR] " + JSON.stringify(options));
+ }
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
return resolve(results);
})
diff --git a/sourcemap-reader/servers/sourcemaps-server.js b/sourcemap-reader/servers/sourcemaps-server.js
index ced43125c..7ac6da992 100644
--- a/sourcemap-reader/servers/sourcemaps-server.js
+++ b/sourcemap-reader/servers/sourcemaps-server.js
@@ -9,7 +9,7 @@ router.post('/', (req, res) => {
});
req.on('end', function () {
data = JSON.parse(data);
- console.log("Starting parser for: " + data.key);
+ console.log("[SR] Starting parser for: " + data.key);
// process.env = {...process.env, ...data.bucket_config};
handler.sourcemapReader(data)
.then((results) => {
@@ -18,7 +18,7 @@ router.post('/', (req, res) => {
res.end(JSON.stringify(results));
})
.catch((e) => {
- console.error("Something went wrong");
+ console.error("[SR] Something went wrong");
console.error(e);
res.statusCode(500);
res.end(e);
diff --git a/tracker/tracker-assist/layout/index-chat.html b/tracker/tracker-assist/layout/index-chat.html
new file mode 100644
index 000000000..ef87c6b06
--- /dev/null
+++ b/tracker/tracker-assist/layout/index-chat.html
@@ -0,0 +1,490 @@
+
+
+
+
+
+
+ OpenReplay | Assist
+
+
+
+
+
+
+
+
+
+
+
+
The agent is requesting remote control
+
+ Grant remote access
+ Reject
+
+
+
+
+
Answer the call so the agent can assist.
+
+
+
+
+
+ Answer
+
+
Reject
+
+
+
+ Connecting...
+
+
+
+
+
+
+
+
+
+
+
+
Hey, did you get the key?
+
+ Username
+ 00:00
+
+
+
+
+ Oui, merci!
+
+
+ Username
+ 00:00
+
+
+
+
+
+
+
+
+
diff --git a/tracker/tracker-assist/layout/index.html b/tracker/tracker-assist/layout/index.html
index e541fc1e3..6c1995e3d 100644
--- a/tracker/tracker-assist/layout/index.html
+++ b/tracker/tracker-assist/layout/index.html
@@ -1,490 +1,204 @@
-
-
-
-
- OpenReplay | Assist
-
-
-
-#chat-card .chat-input {
- margin: 10px;
- border-radius: 3px;
- box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.15);
- background-color: #DDDDDD;
- position: relative;
-}
+
+
-#chat-card .chat-input .input {
- width: 100%;
- border: none;
- border-radius: 0px;
- padding: 8px 16px;
- font-size: 16px;
- color: #333;
- background-color: transparent;
-}
-.send-btn {
- width: 26px;
- height: 26px;
- background-color: #AAA;
- position: absolute;
- right: 5px;
- top: 0;
- bottom: 0;
- border-radius: 50%;
- display: flex;
- align-items: center;
- justify-content: center;
- margin: auto;
- cursor: pointer;
-}
-.send-btn:hover {
- background-color: #999;
-}
-.send-btn svg {
- fill: #DDDDDD;
-}
+
+
+
+
Connecting...
+
+
+
+
-.confirm-window .title {
- margin-bottom: 10px;
-}
-.confirm-window {
- font: 14px 'Roboto', sans-serif;
- padding: 20px;
- background-color: #F3F3F3;
- border-radius: 3px;
- /* position: absolute; */
- width: fit-content;
- color: #666666;
- display: none;
-}
-.confirm-window .actions {
- background-color: white;
- padding: 10px;
- display: flex;
- box-shadow: 0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1);
- border-radius: 6px;
-}
+
+
-.btn-lg {
- font-size: 14px;
- padding: 10px 14px;
-}
+