Merge remote-tracking branch 'origin/api-v1.7.0' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-06-17 12:57:03 +02:00
commit 9e7e35769c
93 changed files with 6847 additions and 1125 deletions

View file

@ -36,7 +36,8 @@ pg_password=asayerPostgres
pg_port=5432
pg_user=postgres
pg_timeout=30
pg_minconn=45
pg_minconn=20
pg_maxconn=50
PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2
put_S3_TTL=20
@ -44,6 +45,6 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
stage=default-foss
version_number=1.4.0

View file

@ -1,20 +1,7 @@
FROM python:3.9.10-slim
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env
ENV APP_NAME chalice
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/*
RUN cd sourcemap-reader && \
npm install
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
@ -22,5 +9,23 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/*
WORKDIR /work_tmp
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
COPY sourcemap-reader/*.json /work_tmp/
RUN cd /work_tmp && npm install
WORKDIR /work
COPY . .
RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,13 +1,9 @@
FROM python:3.9.10-slim
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENV pg_minconn 2
ENV APP_NAME alerts
ENV pg_minconn 2
ENV pg_maxconn 10
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
@ -15,5 +11,13 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
WORKDIR /work
COPY . .
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,4 +1,4 @@
FROM python:3.9.10-slim
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
WORKDIR /work
COPY . .

View file

@ -19,10 +19,14 @@ class JWTAuth(HTTPBearer):
if not credentials.scheme == "Bearer":
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials)
auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
tenant_id=jwt_payload.get("tenantId", -1),
jwt_iat=jwt_payload.get("iat", 100),
jwt_aud=jwt_payload.get("aud", ""))
if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
or not auth_exists:
print("JWTAuth: Token issue")
if jwt_payload is not None:
print(jwt_payload)
@ -34,21 +38,19 @@ class JWTAuth(HTTPBearer):
print("JWTAuth: iat is None")
if jwt_payload is not None and jwt_payload.get("aud") is None:
print("JWTAuth: aud is None")
if jwt_payload is not None and \
not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
if jwt_payload is not None and not auth_exists:
print("JWTAuth: not users.auth_exists")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"])
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
if user is None:
print("JWTAuth: User not found.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
request.state.authorizer_identity = "jwt"
request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"],
user_id=jwt_payload["userId"],
request.state.currentContext = CurrentContext(tenant_id=jwt_payload.get("tenantId", -1),
user_id=jwt_payload.get("userId", -1),
email=user["email"])
return request.state.currentContext

View file

@ -99,10 +99,10 @@ def Build(a):
j_s = True
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = "DESC"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part= sessions.search_query_parts(
full_args, query_part = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value

View file

@ -1,6 +1,7 @@
import requests
from decouple import config
import schemas
from chalicelib.core import projects
SESSION_PROJECTION_COLS = """s.project_id,
@ -19,14 +20,32 @@ SESSION_PROJECTION_COLS = """s.project_id,
"""
def get_live_sessions_ws(project_id, user_id=None):
def get_live_sessions_ws_user_id(project_id, user_id):
data = {
"filter": {"userId": user_id} if user_id else {}
}
return __get_live_sessions_ws(project_id=project_id, data=data)
def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema):
data = {
"filter": {},
"pagination": {"limit": body.limit, "page": body.page},
"sort": {"key": body.sort, "order": body.order}
}
for f in body.filters:
if f.type == schemas.LiveFilterType.metadata:
data["filter"][f.source] = f.value
else:
data["filter"][f.type.value] = f.value
return __get_live_sessions_ws(project_id=project_id, data=data)
def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id)
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
try:
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params,
timeout=config("assistTimeout", cast=int, default=5))
connected_peers = requests.post(config("assist") % config("S3_KEY") + f"/{project_key}", json=data,
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -44,27 +63,19 @@ def get_live_sessions_ws(project_id, user_id=None):
except:
print("couldn't get response")
live_peers = []
for s in live_peers:
_live_peers = live_peers
if "sessions" in live_peers:
_live_peers = live_peers["sessions"]
for s in _live_peers:
s["live"] = True
s["projectId"] = project_id
live_peers = sorted(live_peers, key=lambda l: l.get("timestamp", 0), reverse=True)
return live_peers
def get_live_session_by_id(project_id, session_id):
all_live = get_live_sessions_ws(project_id)
for l in all_live:
if str(l.get("sessionID")) == str(session_id):
return l
return None
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}",
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
@ -83,7 +94,61 @@ def is_live(project_id, session_id, project_key=None):
except:
print("couldn't get response")
return False
return str(session_id) in connected_peers
return connected_peers
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data")
except requests.exceptions.Timeout:
print("Timeout getting Assist response")
return False
except Exception as e:
print("issue getting Assist response")
print(str(e))
print("expected JSON, received:")
try:
print(connected_peers.text)
except:
print("couldn't get response")
return False
return str(session_id) == connected_peers
def autocomplete(project_id, q: str, key: str = None):
project_key = projects.get_project_key(project_id)
params = {"q": q}
if key:
params["key"] = key
try:
results = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print("!! issue with the peer-server")
print(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e:
print("issue getting Assist response")
print(str(e))
print("expected JSON, received:")
try:
print(results.text)
except:
print("couldn't get response")
return {"errors": ["Something went wrong wile calling assist"]}
return {"data": results}
def get_ice_servers():

View file

@ -2,7 +2,7 @@ import json
from typing import Union
import schemas
from chalicelib.core import sessions
from chalicelib.core import sessions, funnels, errors
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
@ -42,7 +42,66 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return results
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
if len(data.series) == 0:
return {
"stages": [],
"totalDropDueToIssues": 0
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.errors
def __get_errors_list(project_id, user_id, data):
if len(data.series) == 0:
return {
"total": 0,
"errors": []
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.sessions
def __get_sessions_list(project_id, user_id, data):
if len(data.series) == 0:
print("empty series")
return {
"total": 0,
"sessions": []
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
if __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts
@ -75,15 +134,22 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
series_charts = __try_live(project_id=project_id, data=metric)
if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
return series_charts
results = [{}] * len(series_charts[0])
for i in range(len(results)):
for j, series_chart in enumerate(series_charts):
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
metric.series[j].name: series_chart[i]["count"]}
return results
return merged_live(project_id=project_id, data=metric, user_id=user_id)
# if __is_funnel_chart(metric):
# return __get_funnel_chart(project_id=project_id, data=metric)
# elif __is_errors_list(metric):
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
#
# series_charts = __try_live(project_id=project_id, data=metric)
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
# return series_charts
# results = [{}] * len(series_charts[0])
# for i in range(len(results)):
# for j, series_chart in enumerate(series_charts):
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
# metric.series[j].name: series_chart[i]["count"]}
# return results
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
@ -105,6 +171,38 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
results = []
if data.series is None:
@ -130,12 +228,16 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
params = {"user_id": user_id, "project_id": project_id,
"default_config": json.dumps(data.config.dict()),
**data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value, metric_format)
view_type, metric_type, metric_of, metric_value,
metric_format, default_config)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, %(metric_format)s)
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, name, filter)
@ -396,3 +498,32 @@ def change_state(project_id, metric_id, user_id, status):
{"metric_id": metric_id, "status": status, "user_id": user_id})
)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CustomMetricSessionsPayloadSchema
# , range_value=None, start_date=None, end_date=None
):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
issues = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None
for i in issues:
if i.get("issueId", "") == issue_id:
issue = i
break
return {"seriesId": s.series_id, "seriesName": s.name,
"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}

View file

@ -6,8 +6,9 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
# category name should be lower cased
CATEGORY_DESCRIPTION = {
'overview': 'High-level metrics and web vitals.',
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
'custom': 'Previously created custom metrics by me and my team.',
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
@ -33,17 +34,20 @@ def get_templates(project_id, user_id):
cur.execute(pg_query)
rows = cur.fetchall()
for r in rows:
r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "")
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
for w in r["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.list_to_camel_case(rows)
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s)
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *"""
params = {"userId": user_id, "projectId": project_id, **data.dict()}
if data.metrics is not None and len(data.metrics) > 0:
@ -134,7 +138,8 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
row = cur.fetchone()
offset = row["count"]
pg_query = f"""UPDATE dashboards
SET name = %(name)s
SET name = %(name)s,
description= %(description)s
{", is_public = %(is_public)s" if data.is_public is not None else ""}
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
WHERE dashboards.project_id = %(projectId)s

View file

@ -425,10 +425,9 @@ def __get_sort_key(key):
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
empty_response = {'total': 0,
'errors': []
}
platform = None
for f in data.filters:
@ -463,7 +462,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
order = schemas.SortOrderType.desc
if data.order is not None:
order = data.order
extra_join = ""
@ -544,7 +543,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
return {"count": total}
if total == 0:
rows = []
@ -592,10 +591,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}

View file

@ -28,8 +28,8 @@ def __merge_cells(rows, start, count, replacement):
return rows
def __get_grouped_clickrage(rows, session_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage")
def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0:
return rows
@ -63,7 +63,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
)
rows = cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id)
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify("""
SELECT
@ -435,7 +435,15 @@ def __get_autocomplete_table(value, project_id):
query = cur.mogrify(" UNION ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
cur.execute(query)
try:
cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- VALUE -----------")
print(value)
print("--------------------")
raise err
results = helper.list_to_camel_case(cur.fetchall())
return results
@ -464,14 +472,13 @@ def search(text, event_type, project_id, source, key):
return {"data": rows}
def get_errors_by_session_id(session_id):
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE
er.session_id = %(session_id)s
ORDER BY timestamp;""", {"session_id": session_id}))
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])

View file

@ -251,6 +251,22 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu
"totalDropDueToIssues": total_drop_due_to_issues}}
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
@ -280,6 +296,19 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe
last_stage=len(data.events)))}
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(

View file

@ -44,16 +44,18 @@ def get(project_id, issue_id):
return helper.dict_to_camel_case(data)
def get_by_session_id(session_id, issue_type=None):
def get_by_session_id(session_id, project_id, issue_type=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
SELECT *
FROM events_common.issues
INNER JOIN public.issues USING (issue_id)
WHERE session_id = %(session_id)s {"AND type = %(type)s" if issue_type is not None else ""}
WHERE session_id = %(session_id)s
AND project_id= %(project_id)s
{"AND type = %(type)s" if issue_type is not None else ""}
ORDER BY timestamp;""",
{"session_id": session_id, "type": issue_type})
{"session_id": session_id, "project_id": project_id, "type": issue_type})
)
return helper.list_to_camel_case(cur.fetchall())

View file

@ -1,21 +1,9 @@
from chalicelib.utils import pg_client
EDITION = 'foss'
def get_status(tenant_id=None):
with pg_client.PostgresClient() as cur:
cur.execute("SELECT * FROM public.tenants;")
r = cur.fetchone()
return {
"hasActivePlan": True,
"current": {
"edition": r.get("edition", "").upper(),
"versionNumber": r.get("version_number", ""),
"license": "",
"expirationDate": -1
},
"count": {
"teamMember": r.get("t_users"),
"projects": r.get("t_projects"),
"capturedSessions": r.get("t_sessions")
}
"edition": EDITION,
"expirationDate": -1
}

View file

@ -1,4 +1,5 @@
from elasticsearch import Elasticsearch, RequestsHttpConnection
# from elasticsearch import Elasticsearch, RequestsHttpConnection
from elasticsearch import Elasticsearch
from chalicelib.core import log_tools
import base64
import logging
@ -65,7 +66,7 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
"use_ssl": use_ssl,
"verify_certs": False,
"ca_certs": False,
"connection_class": RequestsHttpConnection,
# "connection_class": RequestsHttpConnection,
"timeout": timeout
}
if api_key_id is not None and len(api_key_id) > 0:

View file

@ -967,7 +967,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return row
@ -1069,11 +1069,11 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
pg_sub_query.append("pages.speed_index>0")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS avg
pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS value
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY sessions.user_country
ORDER BY avg,sessions.user_country;"""
ORDER BY value, sessions.user_country;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
avg = cur.fetchone()["avg"]
else:
avg = 0
return {"avg": avg, "chart": helper.list_to_camel_case(rows)}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1126,7 +1126,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
result = {"value": avg, "chart": rows}
helper.__time_value(result)
return result
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1169,7 +1171,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
else:
quantiles = [0 for i in range(len(quantiles_keys))]
result = {
"avg": avg,
"value": avg,
"total": sum(r["count"] for r in rows),
"chart": [],
"percentiles": [{
@ -1177,7 +1179,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
"responseTime": int(quantiles[i])
} for i, v in enumerate(quantiles_keys)
],
"extremeValues": [{"count": 0}]
"extremeValues": [{"count": 0}],
"unit": schemas.TemplatePredefinedUnits.millisecond
}
rows = helper.list_to_camel_case(rows)
_99 = result["percentiles"][-1]["responseTime"]
@ -1348,7 +1351,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return row
@ -1498,7 +1501,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_sub_query_chart.append("m_issues.type = 'crash'")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
COUNT(sessions) AS count
COUNT(sessions) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT sessions.session_id
@ -1556,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
versions.append({v["version"]: v["count"] / (r["total"] / 100)})
r["versions"] = versions
return {"chart": rows, "browsers": browsers}
return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count}
def __get_neutral(rows, add_All_if_empty=True):
@ -1719,7 +1722,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT
resources.url_host AS domain,
AVG(resources.duration) AS avg
AVG(resources.duration) AS value
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.url_host
@ -1738,7 +1741,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
avg = cur.fetchone()["avg"]
else:
avg = 0
return {"avg": avg, "partition": rows}
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2241,7 +2244,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2300,7 +2303,7 @@ def __get_application_activity_avg_page_load_time(cur, project_id, startTimestam
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return row
@ -2316,7 +2319,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2369,7 +2372,7 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return row
@ -2385,7 +2388,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2442,7 +2445,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU
row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2512,7 +2515,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2645,7 +2648,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2731,7 +2734,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return helper.dict_to_camel_case(row)
@ -2772,7 +2775,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return helper.dict_to_camel_case(row)
@ -2816,7 +2819,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return helper.dict_to_camel_case(row)
@ -2857,7 +2860,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return helper.dict_to_camel_case(row)
@ -2899,7 +2902,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(row)
return helper.dict_to_camel_case(row)

View file

@ -25,6 +25,22 @@ def get_all(tenant_id, user_id):
return rows
def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT COUNT(notifications.*) AS count
FROM public.notifications
LEFT JOIN (SELECT notification_id
FROM public.user_viewed_notifications
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"user_id": user_id})
)
row = cur.fetchone()
return row
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None:
return False

View file

@ -1,8 +1,10 @@
from chalicelib.utils import helper, pg_client
from decouple import config
def get_by_session_id(session_id, project_id):
def get_by_session_id(session_id, project_id, start_ts, duration):
with pg_client.PostgresClient() as cur:
delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000
ch_query = """\
SELECT
timestamp AS datetime,
@ -16,8 +18,13 @@ def get_by_session_id(session_id, project_id):
success,
COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE session_id = %(session_id)s AND project_id= %(project_id)s;"""
params = {"session_id": session_id, "project_id": project_id}
WHERE session_id = %(session_id)s
AND project_id= %(project_id)s
AND sessions.start_ts=%(start_ts)s
AND resources.timestamp>=%(res_start_ts)s
AND resources.timestamp>=%(res_end_ts)s;"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
cur.execute(cur.mogrify(ch_query, params))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -85,7 +85,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
@ -94,10 +94,12 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=data["startTs"],
duration=data["duration"])
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id)
data['issues'] = issues.get_by_session_id(session_id=session_id,project_id=project_id)
data['live'] = live and assist.is_live(project_id=project_id,
session_id=session_id,
project_key=data["projectKey"])
@ -201,12 +203,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = "DESC"
data.order = schemas.SortOrderType.desc
else:
data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})"
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
else:
sort = 'start_ts'
@ -230,7 +232,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
full_args)
else:
if data.order is None:
data.order = "DESC"
data.order = schemas.SortOrderType.desc
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -254,9 +256,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query)
print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data.dict())
print(data.json())
print("--------------------")
raise err
if errors_only:
@ -1199,7 +1201,7 @@ def get_session_ids_by_user_ids(project_id, user_ids):
def delete_sessions_by_session_ids(session_ids):
with pg_client.PostgresClient(long_query=True) as cur:
with pg_client.PostgresClient(unlimited_query=True) as cur:
query = cur.mogrify(
"""\
DELETE FROM public.sessions
@ -1213,7 +1215,7 @@ def delete_sessions_by_session_ids(session_ids):
def delete_sessions_by_user_ids(project_id, user_ids):
with pg_client.PostgresClient(long_query=True) as cur:
with pg_client.PostgresClient(unlimited_query=True) as cur:
query = cur.mogrify(
"""\
DELETE FROM public.sessions
@ -1227,6 +1229,6 @@ def delete_sessions_by_user_ids(project_id, user_ids):
def count_all():
with pg_client.PostgresClient(long_query=True) as cur:
with pg_client.PostgresClient(unlimited_query=True) as cur:
row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
return row.get("count", 0)

View file

@ -5,14 +5,23 @@ from chalicelib.utils.s3 import client
def get_web(sessionId):
return client.generate_presigned_url(
'get_object',
Params={
'Bucket': config("sessions_bucket"),
'Key': str(sessionId)
},
ExpiresIn=100000
)
return [
client.generate_presigned_url(
'get_object',
Params={
'Bucket': config("sessions_bucket"),
'Key': str(sessionId)
},
ExpiresIn=100000
),
client.generate_presigned_url(
'get_object',
Params={
'Bucket': config("sessions_bucket"),
'Key': str(sessionId) + "e"
},
ExpiresIn=100000
)]
def get_ios(sessionId):

View file

@ -24,7 +24,6 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
"""
Add minimal timestamp
@ -293,7 +292,6 @@ def pearson_corr(x: list, y: list):
return r, confidence, False
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
"""
Returns two lists with binary values 0/1:
@ -363,7 +361,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
return transitions, errors, all_errors, n_sess_affected
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
"""
@ -415,7 +412,6 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows):
@ -467,7 +463,6 @@ def get_stages(stages, rows):
return stages_list
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
"""
@ -544,7 +539,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d, project_id):
output = []
stages = filter_d.get("events", [])
@ -582,9 +576,8 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
output = dict({'critical_issues_count': 0})
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.get("events", [])
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)

View file

@ -67,8 +67,8 @@ def create_step1(data: schemas.UserSignupSchema):
}
query = f"""\
WITH t AS (
INSERT INTO public.tenants (name, version_number, edition)
VALUES (%(organizationName)s, (SELECT openreplay_version()), 'fos')
INSERT INTO public.tenants (name, version_number)
VALUES (%(organizationName)s, (SELECT openreplay_version()))
RETURNING api_key
),
u AS (
@ -77,8 +77,8 @@ def create_step1(data: schemas.UserSignupSchema):
RETURNING user_id,email,role,name
),
au AS (INSERT
INTO public.basic_authentication (user_id, password, generated_password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE)
INTO public.basic_authentication (user_id, password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)))
)
INSERT INTO public.projects (name, active)
VALUES (%(projectName)s, TRUE)

View file

@ -1,13 +1,15 @@
from chalicelib.utils import pg_client
import requests
from chalicelib.core import license
def process_data(data, edition='fos'):
def process_data(data):
return {
'edition': edition,
'edition': license.EDITION,
'tracking': data["opt_out"],
'version': data["version_number"],
'user_id': data["user_id"],
'user_id': data["tenant_key"],
'tenant_key': data["tenant_key"],
'owner_email': None if data["opt_out"] else data["email"],
'organization_name': None if data["opt_out"] else data["name"],
'users_count': data["t_users"],
@ -27,7 +29,7 @@ def compute():
t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0),
t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0),
t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0)
RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out,
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
(SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);"""
)
data = cur.fetchone()
@ -39,6 +41,7 @@ def new_client():
cur.execute(
f"""SELECT *,
(SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email
FROM public.tenants;""")
FROM public.tenants
LIMIT 1;""")
data = cur.fetchone()
requests.post('https://api.openreplay.com/os/signup', json=process_data(data))

View file

@ -1,7 +1,7 @@
import schemas
from chalicelib.utils import pg_client
from chalicelib.utils import helper
from chalicelib.core import users
from chalicelib.core import users, license
def get_by_tenant_id(tenant_id):
@ -13,7 +13,7 @@ def get_by_tenant_id(tenant_id):
name,
api_key,
created_at,
edition,
'{license.EDITION}' AS edition,
version_number,
opt_out
FROM public.tenants
@ -67,7 +67,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"error": "unauthorized"}
return {"errors": ["unauthorized, needs admin or owner"]}
if data.name is None and data.opt_out is None:
return {"errors": ["please provide 'name' of 'optOut' attribute for update"]}
changes = {}

View file

@ -4,6 +4,7 @@ import secrets
from decouple import config
from fastapi import BackgroundTasks
import schemas
from chalicelib.core import authorizers, metadata, projects
from chalicelib.core import tenants, assist
from chalicelib.utils import dev, email_helper
@ -21,10 +22,10 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
query = cur.mogrify(f"""\
WITH u AS (INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
RETURNING user_id,email,role,name,appearance
RETURNING user_id,email,role,name
),
au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now()))
au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now()))
RETURNING invitation_token
)
SELECT u.user_id,
@ -32,7 +33,6 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
u.email,
u.role,
u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -61,7 +61,6 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
email,
role,
name,
TRUE AS change_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
@ -73,8 +72,7 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
result = cur.fetchone()
query = cur.mogrify("""\
UPDATE public.basic_authentication
SET generated_password = TRUE,
invitation_token = %(invitation_token)s,
SET invitation_token = %(invitation_token)s,
invited_at = timezone('utc'::text, now()),
change_pwd_expire_at = NULL,
change_pwd_token = NULL
@ -132,11 +130,7 @@ def update(tenant_id, user_id, changes):
else:
sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s")
else:
if key == "appearance":
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
changes["appearance"] = json.dumps(changes[key])
else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
if len(sub_query_users) > 0:
@ -151,11 +145,9 @@ def update(tenant_id, user_id, changes):
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
{"user_id": user_id, **changes})
)
if len(sub_query_bauth) > 0:
@ -170,11 +162,9 @@ def update(tenant_id, user_id, changes):
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
{"user_id": user_id, **changes})
)
@ -244,16 +234,15 @@ def get(user_id, tenant_id):
cur.execute(
cur.mogrify(
f"""SELECT
users.user_id AS id,
users.user_id,
email,
role,
name,
basic_authentication.generated_password,
name,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance,
api_key
api_key,
TRUE AS has_password
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.user_id = %(userId)s
@ -262,7 +251,7 @@ def get(user_id, tenant_id):
{"userId": user_id})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r, ignore_keys=["appearance"])
return helper.dict_to_camel_case(r)
def generate_new_api_key(user_id):
@ -281,45 +270,39 @@ def generate_new_api_key(user_id):
return helper.dict_to_camel_case(r)
def edit(user_id_to_update, tenant_id, changes, editor_id):
ALLOW_EDIT = ["name", "email", "admin", "appearance"]
def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id):
user = get(user_id=user_id_to_update, tenant_id=tenant_id)
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]:
if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]:
admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]}
_changes = {}
if editor_id == user_id_to_update:
if user["superAdmin"]:
changes.pop("admin")
elif user["admin"] != changes["admin"]:
return {"errors": ["cannot change your own role"]}
if changes.admin is not None:
if user["superAdmin"]:
changes.admin = None
elif changes.admin != user["admin"]:
return {"errors": ["cannot change your own role"]}
keys = list(changes.keys())
for k in keys:
if k not in ALLOW_EDIT or changes[k] is None:
changes.pop(k)
keys = list(changes.keys())
if changes.email is not None and changes.email != user["email"]:
if email_exists(changes.email):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes.email) is not None:
return {"errors": ["email previously deleted."]}
_changes["email"] = changes.email
if len(keys) > 0:
if "email" in keys and changes["email"] != user["email"]:
if email_exists(changes["email"]):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes["email"]) is not None:
return {"errors": ["email previously deleted."]}
if "admin" in keys:
changes["role"] = "admin" if changes.pop("admin") else "member"
if len(changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes)
if changes.name is not None and len(changes.name) > 0:
_changes["name"] = changes.name
return {"data": updated_user}
if changes.admin is not None:
_changes["role"] = "admin" if changes.admin else "member"
if len(_changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes)
return {"data": updated_user}
return {"data": user}
def edit_appearance(user_id, tenant_id, changes):
updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
return {"data": updated_user}
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -329,8 +312,7 @@ def get_by_email_only(email):
1 AS tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.name,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
@ -353,8 +335,7 @@ def get_by_email_reset(email, reset_token):
1 AS tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.name,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
@ -377,7 +358,7 @@ def get_members(tenant_id):
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.created_at,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -393,6 +374,7 @@ def get_members(tenant_id):
if len(r):
r = helper.list_to_camel_case(r)
for u in r:
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
if u["invitationToken"]:
u["invitationLink"] = __get_invitation_link(u.pop("invitationToken"))
else:
@ -562,28 +544,26 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
{"userId": user_id})
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def authenticate(email, password, for_change_password=False, for_plugin=False):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id AS id,
users.user_id,
1 AS tenant_id,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users INNER JOIN public.basic_authentication USING(user_id)
WHERE users.email = %(email)s
AND basic_authentication.password = crypt(%(password)s, basic_authentication.password)
@ -597,16 +577,16 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
if r is not None:
if for_change_password:
return True
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
r = helper.dict_to_camel_case(r)
query = cur.mogrify(
f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now())
WHERE user_id = %(user_id)s
RETURNING jwt_iat;""",
{"user_id": r["id"]})
{"user_id": r["userId"]})
cur.execute(query)
return {
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'],
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
"email": email,

View file

@ -29,8 +29,12 @@ def edit_config(user_id, weekly_report):
def cron():
if not helper.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
return
with pg_client.PostgresClient(long_query=True) as cur:
params = {"3_days_ago": TimeUTC.midnight(delta_days=-3),
params = {"tomorrow": TimeUTC.midnight(delta_days=1),
"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)}
@ -43,18 +47,18 @@ def cron():
COALESCE(week_0_issues.count, 0) AS this_week_issues_count,
COALESCE(week_1_issues.count, 0) AS past_week_issues_count,
COALESCE(month_1_issues.count, 0) AS past_month_issues_count
FROM public.projects
FROM (SELECT project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects
INNER JOIN LATERAL (
SELECT sessions.project_id
FROM public.sessions
WHERE sessions.project_id = projects.project_id
AND start_ts >= %(3_days_ago)s
AND start_ts < %(tomorrow)s
LIMIT 1) AS recently_active USING (project_id)
INNER JOIN LATERAL (
SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails
FROM public.users
WHERE users.tenant_id = projects.tenant_id
AND users.deleted_at ISNULL
WHERE users.deleted_at ISNULL
AND users.weekly_report
) AS users ON (TRUE)
LEFT JOIN LATERAL (
@ -62,25 +66,25 @@ def cron():
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND issues.timestamp >= %(1_week_ago)s
AND issues.timestamp < %(tomorrow)s
) AS week_0_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT
AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(2_week_ago)s
) AS week_1_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT
) AS month_1_issues ON (TRUE)
WHERE projects.deleted_at ISNULL;"""), params)
AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(5_week_ago)s
) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall()
emails_to_send = []
for p in projects_data:

View file

@ -1,12 +1,13 @@
import math
import random
import re
import string
from typing import Union
import math
import requests
import schemas
from chalicelib.utils.TimeUTC import TimeUTC
local_prefix = 'local-'
from decouple import config
@ -364,10 +365,6 @@ def has_smtp():
return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0
def get_edition():
return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss"
def old_search_payload_to_flat(values):
# in case the old search body was passed
if values.get("events") is not None:
@ -384,3 +381,20 @@ def custom_alert_to_front(values):
if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom:
values["query"]["left"] = values["seriesId"]
return values
def __time_value(row):
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
factor = 1
if row["value"] > TimeUTC.MS_MINUTE:
row["value"] = row["value"] / TimeUTC.MS_MINUTE
row["unit"] = schemas.TemplatePredefinedUnits.minute
factor = TimeUTC.MS_MINUTE
elif row["value"] > 1 * 1000:
row["value"] = row["value"] / 1000
row["unit"] = schemas.TemplatePredefinedUnits.second
factor = 1000
if "chart" in row and factor > 1:
for r in row["chart"]:
r["value"] /= factor

View file

@ -52,7 +52,9 @@ def make_pool():
except (Exception, psycopg2.DatabaseError) as error:
print("Error while closing all connexions to PostgreSQL", error)
try:
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), 100, **PG_CONFIG)
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20),
config("pg_maxconn", cast=int, default=80),
**PG_CONFIG)
if (postgreSQL_pool):
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
@ -74,12 +76,17 @@ class PostgresClient:
cursor = None
long_query = False
def __init__(self, long_query=False):
def __init__(self, long_query=False, unlimited_query=False):
self.long_query = long_query
if long_query:
if unlimited_query:
long_config = dict(_PG_CONFIG)
long_config["application_name"] += "-UNLIMITED"
self.connection = psycopg2.connect(**long_config)
elif long_query:
long_config = dict(_PG_CONFIG)
long_config["application_name"] += "-LONG"
self.connection = psycopg2.connect(**_PG_CONFIG)
long_config["options"] = f"-c statement_timeout={config('pg_long_timeout', cast=int, default=5*60) * 1000}"
self.connection = psycopg2.connect(**long_config)
else:
self.connection = postgreSQL_pool.getconn()

View file

@ -5,11 +5,14 @@ import boto3
import botocore
from botocore.client import Config
client = boto3.client('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=config("sessions_region"))
if not config("S3_HOST", default=False):
client = boto3.client('s3')
else:
client = boto3.client('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=config("sessions_region"))
def exists(bucket, key):

View file

@ -33,7 +33,9 @@ class ORRoute(APIRoute):
if isinstance(response, JSONResponse):
response: JSONResponse = response
body = json.loads(response.body.decode('utf8'))
if response.status_code == 200 and body is not None and body.get("errors") is not None:
if response.status_code == 200 \
and body is not None and isinstance(body, dict) \
and body.get("errors") is not None:
if "not found" in body["errors"][0]:
response.status_code = status.HTTP_404_NOT_FOUND
else:

View file

@ -1,15 +1,15 @@
requests==2.26.0
urllib3==1.26.6
boto3==1.16.1
pyjwt==1.7.1
psycopg2-binary==2.8.6
elasticsearch==7.9.1
jira==3.1.1
requests==2.28.0
urllib3==1.26.9
boto3==1.24.11
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.2.3
jira==3.2.0
fastapi==0.75.0
uvicorn[standard]==0.17.5
fastapi==0.78.0
uvicorn[standard]==0.17.6
python-decouple==3.6
pydantic[email]==1.8.2
apscheduler==3.8.1
pydantic[email]==1.9.1
apscheduler==3.9.1

View file

@ -1,7 +1,8 @@
from typing import Union
from typing import Union, Optional
from decouple import config
from fastapi import Depends, Body, BackgroundTasks
from fastapi import Depends, Body, BackgroundTasks, HTTPException
from starlette import status
import schemas
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
@ -13,7 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \
custom_metrics, saved_search
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper
from chalicelib.utils import email_helper, helper, captcha
from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context
from routers.base import get_routers
@ -21,6 +22,34 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
if "errors" in r:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
r["smtp"] = helper.has_smtp()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r
}
}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
@ -107,10 +136,12 @@ def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType] = None,
key: str = None,
source: str = None, context: schemas.CurrentContext = Depends(OR_context)):
key: str = None, source: str = None, live: bool = False,
context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0:
return {"data": []}
if live:
return assist.autocomplete(project_id=projectId, q=q, key=key)
if type in [schemas.FetchFilterType._url]:
type = schemas.EventType.request
elif type in [schemas.GraphqlFilterType._name]:
@ -743,8 +774,8 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"])
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None:
return {"errors": ["issue not found"]}
@ -830,7 +861,14 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, user_id=userId)
data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, body=data)
return {'data': data}
@ -903,7 +941,7 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return errors.search(data, projectId, user_id=context.user_id)
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'])
@ -966,6 +1004,11 @@ def get_notifications(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/count', tags=['notifications'])
def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/{notificationId}/view', tags=['notifications'])
def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)}
@ -1071,17 +1114,10 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context
@app.put('/account', tags=["account"])
def edit_account(data: schemas.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data.dict(),
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
editor_id=context.user_id)
@app.post('/account/appearance', tags=["account"])
@app.put('/account/appearance', tags=["account"])
def edit_account_appearance(data: schemas.EditUserAppearanceSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_appearance(tenant_id=context.tenant_id, user_id=context.user_id, changes=data.dict())
@app.post('/account/password', tags=["account"])
@app.put('/account/password', tags=["account"])
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),

View file

@ -1,17 +1,15 @@
from typing import Optional
from decouple import config
from fastapi import Body, Depends, HTTPException, status, BackgroundTasks
from fastapi import Body, Depends, BackgroundTasks
from starlette.responses import RedirectResponse
import schemas
from chalicelib.core import assist
from chalicelib.core import integrations_manager
from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha
from chalicelib.utils import helper
from or_dependencies import OR_context
from routers.base import get_routers
@ -24,60 +22,23 @@ def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": None,
"ssoProvider": None,
"edition": helper.get_edition()}}
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]
r["iceServers"] = c["iceServers"]
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
"edition": license.EDITION}}
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
t.pop("createdAt")
t["tenantName"] = t.pop("name")
return {
'data': {
**r,
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
},
**t,
**license.get_status(context.tenant_id),
"smtp": helper.has_smtp(),
"iceServers": assist.get_ice_servers()
# "iceServers": assist.get_ice_servers()
}
}
@ -181,7 +142,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.post('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(),
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)
@ -199,29 +160,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)}
@app.get('/plans', tags=["plan"])
def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": license.get_status(context.tenant_id)
}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/client', tags=['projects'])
def get_client(context: schemas.CurrentContext = Depends(OR_context)):
r = tenants.get_by_tenant_id(context.tenant_id)
if r is not None:
r.pop("createdAt")
return {
'data': r
}
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True)}
@app.get('/limits', tags=['accounts'])
def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': {
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
},
}
}

View file

@ -1,7 +1,7 @@
from fastapi import Body, Depends
import schemas
from chalicelib.core import dashboards, custom_metrics
from chalicelib.core import dashboards, custom_metrics, funnels
from or_dependencies import OR_context
from routers.base import get_routers
@ -102,18 +102,29 @@ def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_c
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data)}
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data}
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0:
return {"data": []}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter)
return {"data": data}
@app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@ -149,6 +160,42 @@ def get_custom_metric_sessions(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),

View file

@ -12,7 +12,7 @@ def attribute_to_camel_case(snake_str):
def transform_email(email: str) -> str:
return email.lower() if isinstance(email, str) else email
return email.lower().strip() if isinstance(email, str) else email
class _Grecaptcha(BaseModel):
@ -37,16 +37,11 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
admin: Optional[bool] = Field(None)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: EmailStr = Field(...)
@ -132,13 +127,11 @@ class CreateMemberSchema(BaseModel):
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditMemberSchema(BaseModel):
class EditMemberSchema(EditUserSchema):
name: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
@ -486,6 +479,10 @@ class IssueType(str, Enum):
js_exception = 'js_exception'
class MetricFormatType(str, Enum):
session_count = 'sessionCount'
class __MixedSearchFilter(BaseModel):
is_event: bool = Field(...)
@ -618,17 +615,28 @@ class _PaginatedSchema(BaseModel):
page: int = Field(default=1, gt=0)
class SortOrderType(str, Enum):
asc = "ASC"
desc = "DESC"
class SessionsSearchPayloadSchema(_PaginatedSchema):
events: List[_SessionSearchEventSchema] = Field([])
filters: List[SessionSearchFilterSchema] = Field([])
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(default="startTs")
order: Literal["asc", "desc"] = Field(default="desc")
order: SortOrderType = Field(default=SortOrderType.desc)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
bookmarked: bool = Field(default=False)
@root_validator(pre=True)
def transform_order(cls, values):
if values.get("order") is not None:
values["order"] = values["order"].upper()
return values
class Config:
alias_generator = attribute_to_camel_case
@ -757,8 +765,7 @@ class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema):
# class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema):
class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema, SearchErrorsSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
@ -790,6 +797,8 @@ class MetricTableViewType(str, Enum):
class MetricType(str, Enum):
timeseries = "timeseries"
table = "table"
predefined = "predefined"
funnel = "funnel"
class TableMetricOfType(str, Enum):
@ -800,6 +809,8 @@ class TableMetricOfType(str, Enum):
user_id = FilterType.user_id.value
issues = FilterType.issue.value
visited_url = EventType.location.value
sessions = "SESSIONS"
errors = IssueType.js_exception.value
class TimeseriesMetricOfType(str, Enum):
@ -815,7 +826,7 @@ class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema):
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema):
density: int = Field(7)
class Config:
@ -830,7 +841,7 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
metric_type: MetricType = Field(MetricType.timeseries)
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
metric_value: List[IssueType] = Field([])
metric_format: Optional[str] = Field(None)
metric_format: Optional[MetricFormatType] = Field(None)
# metricFraction: float = Field(None, gt=0, lt=1)
# This is used to handle wrong values sent by the UI
@ -863,8 +874,23 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
alias_generator = attribute_to_camel_case
class CustomMetricsConfigSchema(BaseModel):
col: Optional[int] = Field(default=2)
row: Optional[int] = Field(default=2)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema())
@root_validator(pre=True)
def transform_series(cls, values):
if values.get("series") is not None and len(values["series"]) > 1 and values.get(
"metric_type") == MetricType.funnel.value:
values["series"] = [values["series"][0]]
return values
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
@ -888,6 +914,7 @@ class SavedSearchSchema(FunnelSchema):
class CreateDashboardSchema(BaseModel):
name: str = Field(..., min_length=1)
description: Optional[str] = Field(default='')
is_public: bool = Field(default=False)
is_pinned: bool = Field(default=False)
metrics: Optional[List[int]] = Field(default=[])
@ -966,6 +993,7 @@ class TemplatePredefinedKeys(str, Enum):
class TemplatePredefinedUnits(str, Enum):
millisecond = "ms"
second = "s"
minute = "min"
memory = "mb"
frame = "f/s"
@ -980,3 +1008,62 @@ class CustomMetricAndTemplate(BaseModel):
class Config:
alias_generator = attribute_to_camel_case
class LiveFilterType(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
user_device = FilterType.user_device.value
user_country = FilterType.user_country.value
user_id = FilterType.user_id.value
user_anonymous_id = FilterType.user_anonymous_id.value
rev_id = FilterType.rev_id.value
platform = FilterType.platform.value
page_title = "PAGETITLE"
session_id = "SESSIONID"
metadata = "METADATA"
user_UUID = "USERUUID"
tracker_version = "TRACKERVERSION"
user_browser_version = "USERBROWSERVERSION"
user_device_type = "USERDEVICETYPE",
class LiveSessionSearchFilterSchema(BaseModel):
value: Union[List[str], str] = Field(...)
type: LiveFilterType = Field(...)
source: Optional[str] = Field(None)
@root_validator
def validator(cls, values):
if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value:
assert values.get("source") is not None, "source should not be null for METADATA type"
assert len(values.get("source")) > 0, "source should not be empty for METADATA type"
return values
class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
filters: List[LiveSessionSearchFilterSchema] = Field([])
sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP")
order: SortOrderType = Field(default=SortOrderType.desc)
@root_validator(pre=True)
def transform(cls, values):
if values.get("order") is not None:
values["order"] = values["order"].upper()
if values.get("filters") is not None:
i = 0
while i < len(values["filters"]):
if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0:
del values["filters"][i]
else:
i += 1
for i in values["filters"]:
if i.get("type") == LiveFilterType.platform.value:
i["type"] = LiveFilterType.user_device_type.value
if values.get("sort") is not None:
if values["sort"].lower() == "startts":
values["sort"] = "TIMESTAMP"
return values
class Config:
alias_generator = attribute_to_camel_case

View file

@ -45,7 +45,8 @@ pg_password=asayerPostgres
pg_port=5432
pg_user=postgres
pg_timeout=30
pg_minconn=45
pg_minconn=20
pg_maxconn=50
PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2
put_S3_TTL=20
@ -53,6 +54,6 @@ sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
stage=default-ee
version_number=1.0.0

2
ee/api/.gitignore vendored
View file

@ -207,7 +207,6 @@ Pipfile
/chalicelib/core/mobile.py
/chalicelib/core/sessions.py
/chalicelib/core/sessions_assignments.py
/chalicelib/core/sessions_favorite_viewed.py
/chalicelib/core/sessions_metas.py
/chalicelib/core/sessions_mobs.py
/chalicelib/core/significance.py
@ -215,7 +214,6 @@ Pipfile
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps.py
/chalicelib/core/sourcemaps_parser.py
/chalicelib/core/weekly_report.py
/chalicelib/saml
/chalicelib/utils/html/
/chalicelib/utils/__init__.py

View file

@ -1,21 +1,8 @@
FROM python:3.9.10-slim
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/*
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env
ENV APP_NAME chalice
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/* && \
cd sourcemap-reader && \
npm install
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/*
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
@ -23,5 +10,23 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/*
WORKDIR /work_tmp
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
COPY sourcemap-reader/*.json /work_tmp/
RUN cd /work_tmp && npm install
WORKDIR /work
COPY . .
RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,13 +1,10 @@
FROM python:3.9.10-slim
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/*
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENV pg_minconn 2
ENV APP_NAME alerts
ENV pg_minconn 2
ENV pg_maxconn 10
# Add Tini
# Startup daemon
@ -16,5 +13,13 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
WORKDIR /work
COPY . .
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -16,7 +16,7 @@ from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.subs import dashboard, insights, metrics, v1_api_ee
app = FastAPI()
app = FastAPI(root_path="/api")
@app.middleware('http')

View file

@ -52,7 +52,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None):
key=config("jwt_secret"),
algorithm=config("jwt_algorithm")
)
return token.decode("utf-8")
return token
def api_key_authorizer(token):

View file

@ -83,7 +83,7 @@ def __rearrange_chart_details(start_at, end_at, density, chart):
for i in range(len(chart)):
chart[i] = {"timestamp": chart[i][0], "count": chart[i][1]}
chart = metrics.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density,
neutral={"count": 0})
neutral={"count": 0})
return chart
@ -466,10 +466,9 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
empty_response = {'total': 0,
'errors': []
}
platform = None
for f in data.filters:
@ -585,7 +584,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
return {"count": total}
if total == 0:
rows = []
@ -633,10 +632,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
@ -790,8 +787,8 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo
for i in range(len(r["chart"])):
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
r["chart"] = metrics.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
end_time=data.endDate,
density=data.density, neutral={"count": 0})
end_time=data.endDate,
density=data.density, neutral={"count": 0})
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1

View file

@ -1,27 +1,12 @@
from decouple import config
from chalicelib.core import unlock
from chalicelib.utils import pg_client
EDITION = 'ee'
def get_status(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id}))
r = cur.fetchone()
license = unlock.get_license()
return {
"hasActivePlan": unlock.is_valid(),
"current": {
"edition": r.get("edition", "").lower(),
"versionNumber": r.get("version_number", ""),
"license": license[0:2] + "*" * (len(license) - 4) + license[-2:],
"expirationDate": unlock.get_expiration_date(),
"teamMember": config("numberOfSeats", cast=int, default=0)
},
"count": {
"teamMember": r.get("t_users"),
"projects": r.get("t_projects"),
"capturedSessions": r.get("t_sessions")
}
"edition": EDITION,
"expirationDate": unlock.get_expiration_date()
}

View file

@ -943,11 +943,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
results = {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})}
helper.__time_value(results)
return results
def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1044,11 +1046,11 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS avg
ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY pages.user_country
ORDER BY avg,pages.user_country;"""
ORDER BY value ,pages.user_country;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
@ -1057,7 +1059,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"avg": avg, "chart": helper.list_to_camel_case(rows)}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1088,11 +1090,12 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
results = {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})}
helper.__time_value(results)
return results
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1130,7 +1133,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
result = {
"avg": avg,
"value": avg,
"total": sum(r["count"] for r in rows),
"chart": [],
"percentiles": [{
@ -1139,7 +1142,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
quantiles[0]["values"][i] if quantiles[0]["values"][i] is not None and not math.isnan(
quantiles[0]["values"][i]) else 0)} for i, v in enumerate(quantiles_keys)
],
"extremeValues": [{"count": 0}]
"extremeValues": [{"count": 0}],
"unit": schemas.TemplatePredefinedUnits.millisecond
}
if len(rows) > 0:
rows = helper.list_to_camel_case(rows)
@ -1288,10 +1292,11 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
results = {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})}
helper.__time_value(results)
return results
def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1456,7 +1461,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(sessions.session_id) AS count
COUNT(sessions.session_id) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1510,8 +1515,9 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
result = {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"count": 0}),
"browsers": browsers}
neutral={"value": 0}),
"browsers": browsers,
"unit": schemas.TemplatePredefinedUnits.count}
return result
@ -1657,7 +1663,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url_host AS domain,
COALESCE(avgOrNull(resources.duration),0) AS avg
COALESCE(avgOrNull(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.url_host
@ -1671,7 +1677,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"avg": avg, "partition": rows}
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2102,7 +2108,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2179,7 +2185,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2255,7 +2261,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2334,7 +2340,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2395,7 +2401,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2529,7 +2535,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2608,7 +2614,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
end_time=endTimestamp,
density=density, neutral={"value": 0})
results["chart"] = rows
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return helper.dict_to_camel_case(results)
@ -2684,7 +2690,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return helper.dict_to_camel_case(results)
@ -2726,7 +2732,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return results
@ -2768,7 +2774,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return helper.dict_to_camel_case(results)
@ -2810,5 +2816,5 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
helper.__time_value(results)
return helper.dict_to_camel_case(results)

View file

@ -26,6 +26,25 @@ def get_all(tenant_id, user_id):
return rows
def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT COUNT(notifications.*)
FROM public.notifications
LEFT JOIN (SELECT notification_id
FROM public.user_viewed_notifications
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
WHERE (notifications.tenant_id =%(tenant_id)s
OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"tenant_id": tenant_id, "user_id": user_id})
)
rows = helper.list_to_camel_case(cur.fetchall())
for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
return rows
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None:
return False

View file

@ -1,16 +1,22 @@
from chalicelib.utils import helper
from chalicelib.utils import ch_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
def get_by_session_id(session_id, project_id):
def get_by_session_id(session_id, project_id, start_ts, duration):
with ch_client.ClickHouseClient() as ch:
delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000
ch_query = """\
SELECT
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s) AND project_id=%(project_id)s;"""
params = {"session_id": session_id, "project_id": project_id}
WHERE session_id = toUInt64(%(session_id)s)
AND project_id=%(project_id)s
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
rows = ch.execute(query=ch_query, params=params)
results = []
for r in rows:

View file

@ -0,0 +1,74 @@
from chalicelib.core import sessions
from chalicelib.utils import pg_client, s3_extra
from decouple import config
def add_favorite_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s);""",
{"userId": user_id, "sessionId": session_id})
)
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
include_fav_viewed=True)
def remove_favorite_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
AND session_id = %(sessionId)s;""",
{"userId": user_id, "sessionId": session_id})
)
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
include_fav_viewed=True)
def add_viewed_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.user_viewed_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
def favorite_session(project_id, user_id, session_id):
if favorite_session_exists(user_id=user_id, session_id=session_id):
s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_D_VALUE', default='default'))
s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_D_VALUE', default='default'))
return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_L_VALUE', default='vault'))
s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_L_VALUE', default='vault'))
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
def view_session(project_id, user_id, session_id):
return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id)
def favorite_session_exists(user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT
session_id
FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
AND session_id = %(sessionId)s""",
{"userId": user_id, "sessionId": session_id})
)
r = cur.fetchone()
return r is not None

View file

@ -64,8 +64,8 @@ def create_step1(data: schemas.UserSignupSchema):
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}
query = """\
WITH t AS (
INSERT INTO public.tenants (name, version_number, edition)
VALUES (%(companyName)s, (SELECT openreplay_version()), 'ee')
INSERT INTO public.tenants (name, version_number)
VALUES (%(companyName)s, (SELECT openreplay_version()))
RETURNING tenant_id, api_key
),
r AS (
@ -80,8 +80,8 @@ def create_step1(data: schemas.UserSignupSchema):
RETURNING user_id,email,role,name,role_id
),
au AS (
INSERT INTO public.basic_authentication (user_id, password, generated_password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE)
INSERT INTO public.basic_authentication (user_id, password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)))
)
INSERT INTO public.projects (tenant_id, name, active)
VALUES ((SELECT t.tenant_id FROM t), %(projectName)s, TRUE)

View file

@ -1,13 +1,15 @@
from chalicelib.utils import pg_client
from chalicelib.core import license
import requests
def process_data(data, edition='fos'):
def process_data(data):
return {
'edition': edition,
'edition': license.EDITION,
'tracking': data["opt_out"],
'version': data["version_number"],
'user_id': data["user_id"],
'user_id': data["tenant_key"],
'tenant_key': data["tenant_key"],
'owner_email': None if data["opt_out"] else data["email"],
'organization_name': None if data["opt_out"] else data["name"],
'users_count': data["t_users"],
@ -50,21 +52,22 @@ def compute():
FROM public.tenants
) AS all_tenants
WHERE tenants.tenant_id = all_tenants.tenant_id
RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out,
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
(SELECT openreplay_version()) AS version_number,
(SELECT email FROM public.users WHERE role = 'owner' AND users.tenant_id=tenants.tenant_id LIMIT 1);"""
)
data = cur.fetchall()
requests.post('https://api.openreplay.com/os/telemetry',
json={"stats": [process_data(d, edition='ee') for d in data]})
json={"stats": [process_data(d) for d in data]})
def new_client(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""SELECT *,
(SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s) AS email
(SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s AND role='owner' LIMIT 1) AS email
FROM public.tenants
WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id}))
WHERE tenant_id=%(tenant_id)s
LIMIT 1;""", {"tenant_id": tenant_id}))
data = cur.fetchone()
requests.post('https://api.openreplay.com/os/signup', json=process_data(data, edition='ee'))
requests.post('https://api.openreplay.com/os/signup', json=process_data(data))

View file

@ -1,4 +1,4 @@
from chalicelib.core import users
from chalicelib.core import users, license
from chalicelib.utils import helper
from chalicelib.utils import pg_client
@ -12,13 +12,13 @@ def get_by_tenant_key(tenant_key):
t.name,
t.api_key,
t.created_at,
t.edition,
'{license.EDITION}' AS edition,
t.version_number,
t.opt_out
FROM public.tenants AS t
WHERE t.user_id = %(user_id)s AND t.deleted_at ISNULL
WHERE t.tenant_key = %(tenant_key)s AND t.deleted_at ISNULL
LIMIT 1;""",
{"user_id": tenant_key})
{"tenant_key": tenant_key})
)
return helper.dict_to_camel_case(cur.fetchone())
@ -32,10 +32,10 @@ def get_by_tenant_id(tenant_id):
t.name,
t.api_key,
t.created_at,
t.edition,
'{license.EDITION}' AS edition,
t.version_number,
t.opt_out,
t.user_id AS tenant_key
t.tenant_key
FROM public.tenants AS t
WHERE t.tenant_id = %(tenantId)s AND t.deleted_at ISNULL
LIMIT 1;""",
@ -90,7 +90,7 @@ def update(tenant_id, user_id, data):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"error": "unauthorized"}
return {"errors": ["unauthorized, needs admin or owner"]}
if "name" not in data and "optOut" not in data:
return {"errors": ["please provide 'name' of 'optOut' attribute for update"]}
changes = {}

View file

@ -9,7 +9,9 @@ from pydantic import BaseModel, Field
from starlette.background import BackgroundTask
import app as main_app
from chalicelib.utils import pg_client
import schemas
import schemas_ee
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from schemas import CurrentContext
@ -151,6 +153,53 @@ async def process_traces_queue():
await write_traces_batch(traces)
def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema):
with pg_client.PostgresClient() as cur:
conditions = ["traces.tenant_id=%(tenant_id)s",
"traces.created_at>=%(startDate)s",
"traces.created_at<=%(endDate)s"]
params = {"tenant_id": tenant_id,
"startDate": data.startDate,
"endDate": data.endDate,
"p_start": (data.page - 1) * data.limit,
"p_end": data.page * data.limit,
**data.dict()}
if data.user_id is not None:
conditions.append("user_id=%(user_id)s")
if data.action is not None:
conditions.append("action=%(action)s")
if data.query is not None and len(data.query) > 0:
conditions.append("users.name ILIKE %(query)s")
conditions.append("users.tenant_id = %(tenant_id)s")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
cur.execute(
cur.mogrify(
f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(full_traces ORDER BY rn)
FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions
FROM (SELECT traces.*,users.email,users.name AS username,
ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn
FROM traces LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
ORDER BY traces.created_at {data.order}) AS full_traces;""", params)
)
rows = cur.fetchone()
return helper.dict_to_camel_case(rows)
def get_available_actions(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(
f"""SELECT DISTINCT action
FROM traces
WHERE tenant_id=%(tenant_id)s
ORDER BY 1""",
{"tenant_id": tenant_id}))
rows = cur.fetchall()
return [r["action"] for r in rows]
cron_jobs = [
{"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60),
"misfire_grace_time": 20}

View file

@ -4,6 +4,8 @@ import secrets
from decouple import config
from fastapi import BackgroundTasks
import schemas
import schemas_ee
from chalicelib.core import authorizers, metadata, projects, roles
from chalicelib.core import tenants, assist
from chalicelib.utils import dev, SAML2_helper
@ -25,10 +27,10 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal
(SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
(SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
(SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1))))
RETURNING tenant_id,user_id,email,role,name,appearance, role_id
RETURNING tenant_id,user_id,email,role,name, role_id
),
au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now()))
au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now()))
RETURNING invitation_token
)
SELECT u.user_id AS id,
@ -36,7 +38,6 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal
u.email,
u.role,
u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -74,7 +75,6 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own
email,
role,
name,
TRUE AS change_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -88,8 +88,7 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own
result = cur.fetchone()
query = cur.mogrify("""\
UPDATE public.basic_authentication
SET generated_password = TRUE,
invitation_token = %(invitation_token)s,
SET invitation_token = %(invitation_token)s,
invited_at = timezone('utc'::text, now()),
change_pwd_expire_at = NULL,
change_pwd_token = NULL
@ -147,10 +146,7 @@ def update(tenant_id, user_id, changes):
else:
sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s")
else:
if key == "appearance":
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
changes["appearance"] = json.dumps(changes[key])
elif helper.key_to_snake_case(key) == "role_id":
if helper.key_to_snake_case(key) == "role_id":
sub_query_users.append("""role_id=(SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
(SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
(SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))""")
@ -171,11 +167,9 @@ def update(tenant_id, user_id, changes):
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
users.role_id;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)
@ -192,11 +186,9 @@ def update(tenant_id, user_id, changes):
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
users.role_id;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)
@ -272,12 +264,10 @@ def get(user_id, tenant_id):
users.user_id AS id,
email,
role,
users.name,
basic_authentication.generated_password,
users.name,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance,
api_key,
origin,
role_id,
@ -296,7 +286,7 @@ def get(user_id, tenant_id):
{"userId": user_id, "tenant_id": tenant_id})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r, ignore_keys=["appearance"])
return helper.dict_to_camel_case(r)
def generate_new_api_key(user_id):
@ -315,45 +305,47 @@ def generate_new_api_key(user_id):
return helper.dict_to_camel_case(r)
def edit(user_id_to_update, tenant_id, changes, editor_id):
ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"]
def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id):
user = get(user_id=user_id_to_update, tenant_id=tenant_id)
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]:
if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]:
admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]}
_changes = {}
if editor_id == user_id_to_update:
if user["superAdmin"]:
changes.pop("admin")
elif user["admin"] != changes["admin"]:
return {"errors": ["cannot change your own role"]}
if changes.admin is not None:
if user["superAdmin"]:
changes.admin = None
elif changes.admin != user["admin"]:
return {"errors": ["cannot change your own role"]}
if changes.roleId is not None:
if user["superAdmin"]:
changes.roleId = None
elif changes.roleId != user["roleId"]:
return {"errors": ["cannot change your own role"]}
keys = list(changes.keys())
for k in keys:
if k not in ALLOW_EDIT or changes[k] is None:
changes.pop(k)
keys = list(changes.keys())
if changes.email is not None and changes.email != user["email"]:
if email_exists(changes.email):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes.email) is not None:
return {"errors": ["email previously deleted."]}
_changes["email"] = changes.email
if len(keys) > 0:
if "email" in keys and changes["email"] != user["email"]:
if email_exists(changes["email"]):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes["email"]) is not None:
return {"errors": ["email previously deleted."]}
if "admin" in keys:
changes["role"] = "admin" if changes.pop("admin") else "member"
if len(changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes)
if changes.name is not None and len(changes.name) > 0:
_changes["name"] = changes.name
return {"data": updated_user}
if changes.admin is not None:
_changes["role"] = "admin" if changes.admin else "member"
if changes.roleId is not None:
_changes["roleId"] = changes.roleId
if len(_changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes)
return {"data": updated_user}
return {"data": user}
def edit_appearance(user_id, tenant_id, changes):
updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
return {"data": updated_user}
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -363,8 +355,7 @@ def get_by_email_only(email):
users.tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.name,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -389,8 +380,7 @@ def get_by_email_reset(email, reset_token):
users.tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.name,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
@ -414,7 +404,7 @@ def get_members(tenant_id):
users.email,
users.role,
users.name,
basic_authentication.generated_password,
users.created_at,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -435,6 +425,7 @@ def get_members(tenant_id):
if len(r):
r = helper.list_to_camel_case(r)
for u in r:
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
if u["invitationToken"]:
u["invitationLink"] = __get_invitation_link(u.pop("invitationToken"))
else:
@ -611,13 +602,13 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
{"userId": user_id, "tenant_id": tenant_id})
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id):
@ -636,15 +627,13 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id AS id,
users.user_id,
users.tenant_id,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
users.origin,
users.role_id,
roles.name AS role_name,
@ -676,10 +665,10 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
if r is not None:
if for_change_password:
return True
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
jwt_iat = change_jwt_iat(r['id'])
r = helper.dict_to_camel_case(r)
jwt_iat = change_jwt_iat(r['userId'])
return {
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'],
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(jwt_iat),
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
"email": email,
@ -692,15 +681,13 @@ def authenticate_sso(email, internal_id, exp=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id AS id,
users.user_id,
users.tenant_id,
users.role,
users.name,
False AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
origin,
role_id
FROM public.users AS users
@ -711,9 +698,9 @@ def authenticate_sso(email, internal_id, exp=None):
r = cur.fetchone()
if r is not None:
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id']))
return authorizers.generate_jwt(r['id'], r['tenantId'],
r = helper.dict_to_camel_case(r)
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId']))
return authorizers.generate_jwt(r['userId'], r['tenantId'],
jwt_iat, aud=f"front:{helper.get_stage_name()}",
exp=(exp + jwt_iat // 1000) if exp is not None else None)
return None
@ -738,11 +725,9 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=
u.email,
u.role,
u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
u.appearance,
origin
FROM u;""",
{"tenant_id": tenant_id, "email": email, "internal_id": internal_id,
@ -772,7 +757,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
created_at= default,
api_key= default,
jwt_iat= NULL,
appearance= default,
weekly_report= default
WHERE user_id = %(user_id)s
RETURNING *
@ -780,7 +764,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
au AS (
UPDATE public.basic_authentication
SET password= default,
generated_password= default,
invitation_token= default,
invited_at= default,
change_pwd_token= default,
@ -793,11 +776,9 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
u.email,
u.role,
u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
u.appearance,
origin
FROM u;""",
{"tenant_id": tenant_id, "email": email, "internal_id": internal_id,

View file

@ -0,0 +1,245 @@
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import get_issue_title
LOWEST_BAR_VALUE = 3
def get_config(user_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT users.weekly_report
FROM public.users
WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s
LIMIT 1;""", {"user_id": user_id}))
result = cur.fetchone()
return helper.dict_to_camel_case(result)
def edit_config(user_id, weekly_report):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
UPDATE public.users
SET weekly_report= %(weekly_report)s
WHERE users.deleted_at ISNULL
AND users.user_id=%(user_id)s
RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report}))
result = cur.fetchone()
return helper.dict_to_camel_case(result)
def cron():
if not helper.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
return
with pg_client.PostgresClient(long_query=True) as cur:
params = {"tomorrow": TimeUTC.midnight(delta_days=1),
"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)}
cur.execute(cur.mogrify("""\
SELECT project_id,
name AS project_name,
users.emails AS emails,
TO_CHAR(DATE_TRUNC('day', now()) - INTERVAL '1 week', 'Mon. DDth, YYYY') AS period_start,
TO_CHAR(DATE_TRUNC('day', now()), 'Mon. DDth, YYYY') AS period_end,
COALESCE(week_0_issues.count, 0) AS this_week_issues_count,
COALESCE(week_1_issues.count, 0) AS past_week_issues_count,
COALESCE(month_1_issues.count, 0) AS past_month_issues_count
FROM (SELECT tenant_id, project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects
INNER JOIN LATERAL (
SELECT sessions.project_id
FROM public.sessions
WHERE sessions.project_id = projects.project_id
AND start_ts >= %(3_days_ago)s
AND start_ts < %(tomorrow)s
LIMIT 1) AS recently_active USING (project_id)
INNER JOIN LATERAL (
SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails
FROM public.users
WHERE users.tenant_id = projects.tenant_id
AND users.deleted_at ISNULL
AND users.weekly_report
) AS users ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp >= %(1_week_ago)s
AND issues.timestamp < %(tomorrow)s
) AS week_0_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(2_week_ago)s
) AS week_1_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(5_week_ago)s
) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall()
emails_to_send = []
for p in projects_data:
params["project_id"] = p["project_id"]
print(f"checking {p['project_name']} : {p['project_id']}")
if len(p["emails"]) == 0 \
or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0:
print('ignore')
continue
print("valid")
p["past_week_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
p["past_month_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1)
cur.execute(cur.mogrify("""
SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
(
SELECT COUNT(*)
FROM events_common.issues INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s
AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT
AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT
) AS issues_count
FROM generate_series(
DATE_TRUNC('day', now()) - INTERVAL '7 days',
DATE_TRUNC('day', now()) - INTERVAL '1 day',
'1 day'::INTERVAL
) AS timestamp_i
ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall()
max_days_partition = max(x['issues_count'] for x in days_partition)
for d in days_partition:
if max_days_partition <= 0:
d["value"] = LOWEST_BAR_VALUE
else:
d["value"] = d["issues_count"] * 100 / max_days_partition
d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify("""\
SELECT type, COUNT(*) AS count
FROM events_common.issues INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s
AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT
GROUP BY type
ORDER BY count DESC, type
LIMIT 4;""", params))
issues_by_type = cur.fetchall()
max_issues_by_type = sum(i["count"] for i in issues_by_type)
for i in issues_by_type:
i["type"] = get_issue_title(i["type"])
if max_issues_by_type <= 0:
i["value"] = LOWEST_BAR_VALUE
else:
i["value"] = i["count"] * 100 / max_issues_by_type
cur.execute(cur.mogrify("""\
SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
COALESCE((SELECT JSONB_AGG(sub)
FROM (
SELECT type, COUNT(*) AS count
FROM events_common.issues
INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s
AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT
AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT
GROUP BY type
ORDER BY count
) AS sub), '[]'::JSONB) AS partition
FROM generate_series(
DATE_TRUNC('day', now()) - INTERVAL '7 days',
DATE_TRUNC('day', now()) - INTERVAL '1 day',
'1 day'::INTERVAL
) AS timestamp_i
GROUP BY timestamp_i
ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall()
for i in issues_breakdown_by_day:
i["sum"] = sum(x["count"] for x in i["partition"])
for j in i["partition"]:
j["type"] = get_issue_title(j["type"])
max_days_partition = max(i["sum"] for i in issues_breakdown_by_day)
for i in issues_breakdown_by_day:
for j in i["partition"]:
if max_days_partition <= 0:
j["value"] = LOWEST_BAR_VALUE
else:
j["value"] = j["count"] * 100 / max_days_partition
j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify("""
SELECT type,
COUNT(*) AS issue_count,
COUNT(DISTINCT session_id) AS sessions_count,
(SELECT COUNT(DISTINCT sessions.session_id)
FROM public.sessions
INNER JOIN events_common.issues AS sci USING (session_id)
INNER JOIN public.issues AS si USING (issue_id)
WHERE si.project_id = %(project_id)s
AND sessions.project_id = %(project_id)s
AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 weeks') * 1000)::BIGINT
AND si.type = mi.type
AND sessions.duration IS NOT NULL
) AS last_week_sessions_count,
(SELECT COUNT(DISTINCT sci.session_id)
FROM public.sessions
INNER JOIN events_common.issues AS sci USING (session_id)
INNER JOIN public.issues AS si USING (issue_id)
WHERE si.project_id = %(project_id)s
AND sessions.project_id = %(project_id)s
AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 weeks') * 1000)::BIGINT
AND si.type = mi.type
AND sessions.duration IS NOT NULL
) AS last_month_sessions_count
FROM events_common.issues
INNER JOIN public.issues AS mi USING (issue_id)
INNER JOIN public.sessions USING (session_id)
WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
GROUP BY type
ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall()
if len(issues_breakdown_list) > 4:
others = {"type": "Others",
"sessions_count": sum(i["sessions_count"] for i in issues_breakdown_list[4:]),
"issue_count": sum(i["issue_count"] for i in issues_breakdown_list[4:]),
"last_week_sessions_count": sum(
i["last_week_sessions_count"] for i in issues_breakdown_list[4:]),
"last_month_sessions_count": sum(
i["last_month_sessions_count"] for i in issues_breakdown_list[4:])}
issues_breakdown_list = issues_breakdown_list[:4]
issues_breakdown_list.append(others)
for i in issues_breakdown_list:
i["type"] = get_issue_title(i["type"])
i["last_week_sessions_evolution"] = helper.__decimal_limit(
helper.__progress(i["sessions_count"], i["last_week_sessions_count"]), 1)
i["last_month_sessions_evolution"] = helper.__decimal_limit(
helper.__progress(i["sessions_count"], i["last_month_sessions_count"]), 1)
i["sessions_count"] = f'{i["sessions_count"]:,}'
keep_types = [i["type"] for i in issues_breakdown_list]
for i in issues_breakdown_by_day:
keep = []
for j in i["partition"]:
if j["type"] in keep_types:
keep.append(j)
i["partition"] = keep
emails_to_send.append({"email": p.pop("emails"),
"data": {
**p,
"days_partition": days_partition,
"issues_by_type": issues_by_type,
"issues_breakdown_by_day": issues_breakdown_by_day,
"issues_breakdown_list": issues_breakdown_list
}})
print(f">>> Sending weekly report to {len(emails_to_send)} email-group")
for e in emails_to_send:
email_helper.weekly_report2(recipients=e["email"], data=e["data"])

View file

@ -26,7 +26,7 @@ class ClickHouseClient:
return self.__client
def format(self, query, params):
return self.__client.substitute_params(query, params)
return self.__client.substitute_params(query, params, self.__client.connection.context)
def __exit__(self, *args):
pass

View file

@ -0,0 +1,30 @@
from chalicelib.utils.s3 import client
from decouple import config
def tag_file( session_id, tag_key='retention', tag_value='vault'):
return client.put_object_tagging(
Bucket=config("sessions_bucket"),
Key=session_id,
# VersionId='string',
# ContentMD5='string',
# ChecksumAlgorithm='CRC32'|'CRC32C'|'SHA1'|'SHA256',
Tagging={
'TagSet': [
{
'Key': tag_key,
'Value': tag_value
},
]
},
# ExpectedBucketOwner='string',
# RequestPayer='requester'
)
# generate_presigned_url(
# 'put_object',
# Params={
# 'Bucket': bucket,
# 'Key': key
# },
# ExpiresIn=expires_in
# )

View file

@ -31,7 +31,6 @@ rm -rf ./chalicelib/core/metadata.py
rm -rf ./chalicelib/core/mobile.py
rm -rf ./chalicelib/core/sessions.py
rm -rf ./chalicelib/core/sessions_assignments.py
rm -rf ./chalicelib/core/sessions_favorite_viewed.py
rm -rf ./chalicelib/core/sessions_metas.py
rm -rf ./chalicelib/core/sessions_mobs.py
rm -rf ./chalicelib/core/significance.py
@ -39,7 +38,6 @@ rm -rf ./chalicelib/core/slack.py
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps.py
rm -rf ./chalicelib/core/sourcemaps_parser.py
rm -rf ./chalicelib/core/weekly_report.py
rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/html/
rm -rf ./chalicelib/utils/__init__.py

View file

@ -1,16 +1,16 @@
requests==2.26.0
urllib3==1.26.6
boto3==1.16.1
pyjwt==1.7.1
psycopg2-binary==2.8.6
elasticsearch==7.9.1
jira==3.1.1
clickhouse-driver==0.2.2
requests==2.28.0
urllib3==1.26.9
boto3==1.24.11
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.2.3
jira==3.2.0
clickhouse-driver==0.2.4
python3-saml==1.12.0
fastapi==0.75.0
fastapi==0.78.0
python-multipart==0.0.5
uvicorn[standard]==0.17.5
uvicorn[standard]==0.17.6
python-decouple==3.6
pydantic[email]==1.8.2
apscheduler==3.8.1
pydantic[email]==1.9.1
apscheduler==3.9.1

View file

@ -1,17 +1,17 @@
from typing import Optional
from decouple import config
from fastapi import Body, Depends, HTTPException, status, BackgroundTasks
from fastapi import Body, Depends, BackgroundTasks
from starlette.responses import RedirectResponse
import schemas
import schemas_ee
from chalicelib.core import integrations_manager
from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license, assist
from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha, SAML2_helper
from chalicelib.utils import SAML2_helper
from chalicelib.utils import helper
from or_dependencies import OR_context
from routers.base import get_routers
@ -24,66 +24,24 @@ def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": SAML2_helper.is_saml2_available(),
"ssoProvider": SAML2_helper.get_saml2_provider(),
"edition": helper.get_edition()}}
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
if "errors" in r:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]
r["iceServers"] = c["iceServers"]
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
"edition": license.EDITION}}
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
t.pop("createdAt")
t["tenantName"] = t.pop("name")
return {
'data': {
**r,
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
},
**t,
**license.get_status(context.tenant_id),
"smtp": helper.has_smtp(),
"saml2": SAML2_helper.is_saml2_available(),
"iceServers": assist.get_ice_servers()
# "iceServers": assist.get_ice_servers()
}
}
@ -187,7 +145,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.post('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas_ee.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(),
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)
@ -209,30 +167,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)}
@app.get('/plans', tags=["plan"])
def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": license.get_status(context.tenant_id)
}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/client', tags=['projects'])
def get_client(context: schemas.CurrentContext = Depends(OR_context)):
r = tenants.get_by_tenant_id(context.tenant_id)
if r is not None:
r.pop("createdAt")
return {
'data': r
}
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, user_id=context.user_id)}
@app.get('/limits', tags=['accounts'])
def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': {
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
}
}
}

View file

@ -1,6 +1,7 @@
from chalicelib.core import roles
from chalicelib.core import roles, traces
from chalicelib.core import unlock
from chalicelib.utils import assist_helper
from chalicelib.utils.TimeUTC import TimeUTC
unlock.check()
@ -58,3 +59,16 @@ def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_contex
@app.get('/assist/credentials', tags=["assist"])
def get_assist_credentials():
return {"data": assist_helper.get_full_config()}
@app.post('/trails', tags=["traces", "trails"])
def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': traces.get_all(tenant_id=context.tenant_id, data=data)
}
@app.post('/trails/actions', tags=["traces", "trails"])
def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)):
return {'data': traces.get_available_actions(tenant_id=context.tenant_id)}

View file

@ -1,8 +1,9 @@
from typing import Optional, List
from typing import Optional, List, Literal
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, EmailStr
import schemas
from chalicelib.utils.TimeUTC import TimeUTC
class RolePayloadSchema(BaseModel):
@ -20,5 +21,25 @@ class CreateMemberSchema(schemas.CreateMemberSchema):
roleId: Optional[int] = Field(None)
class EditMemberSchema(schemas.EditMemberSchema):
class EditUserSchema(schemas.EditUserSchema):
roleId: Optional[int] = Field(None)
class EditMemberSchema(EditUserSchema):
name: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
roleId: int = Field(...)
class TrailSearchPayloadSchema(schemas._PaginatedSchema):
limit: int = Field(default=200, gt=0)
startDate: int = Field(default=TimeUTC.now(-7))
endDate: int = Field(default=TimeUTC.now(1))
user_id: Optional[int] = Field(default=None)
query: Optional[str] = Field(default=None)
action: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc")
class Config:
alias_generator = schemas.attribute_to_camel_case

View file

@ -0,0 +1,258 @@
ALTER TABLE sessions
DROP COLUMN pages_count;
CREATE TABLE projects_metadata
(
project_id UInt32,
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id)
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS events_s
(
session_id UInt64,
project_id UInt32,
event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8),
datetime DateTime,
label Nullable(String),
hesitation_time Nullable(UInt32),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
message Nullable(String),
error_id Nullable(String),
duration Nullable(UInt16),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)),
container_id Nullable(String),
container_name Nullable(String),
container_src Nullable(String),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String) MATERIALIZED lower(pathFull(url)),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
dom_content_loaded_event_start Nullable(UInt16),
dom_content_loaded_event_end Nullable(UInt16),
load_event_start Nullable(UInt16),
load_event_end Nullable(UInt16),
first_paint Nullable(UInt16),
first_contentful_paint Nullable(UInt16),
speed_index Nullable(UInt16),
visually_complete Nullable(UInt16),
time_to_interactive Nullable(UInt16),
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
minus(response_start, request_start), Null),
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
minus(response_end, request_start), Null),
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
minus(response_end, response_start), Null),
dom_building_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_start, response_end),
minus(dom_content_loaded_event_start, response_end), Null),
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
minus(load_event_end, load_event_start), Null),
min_fps Nullable(UInt8),
avg_fps Nullable(UInt8),
max_fps Nullable(UInt8),
min_cpu Nullable(UInt8),
avg_cpu Nullable(UInt8),
max_cpu Nullable(UInt8),
min_total_js_heap_size Nullable(UInt64),
avg_total_js_heap_size Nullable(UInt64),
max_total_js_heap_size Nullable(UInt64),
min_used_js_heap_size Nullable(UInt64),
avg_used_js_heap_size Nullable(UInt64),
max_used_js_heap_size Nullable(UInt64),
type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)),
header_size Nullable(UInt16),
encoded_body_size Nullable(UInt32),
decoded_body_size Nullable(UInt32),
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
success Nullable(UInt8),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
_timestamp DateTime DEFAULT now()
) ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id)
TTL datetime + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS sessions
(
session_id UInt64,
project_id UInt32,
tracker_version LowCardinality(String),
rev_id LowCardinality(Nullable(String)),
user_uuid UUID,
user_os LowCardinality(String),
user_os_version LowCardinality(Nullable(String)),
user_browser LowCardinality(String),
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String),
user_id Nullable(String),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS autocomplete
(
project_id UInt32 NOT NULL,
type LowCardinality(String) NOT NULL,
value String NOT NULL,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE MATERIALIZED VIEW sessions_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
FROM massive_split.sessions_s
WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)
AND duration > 0;
CREATE MATERIALIZED VIEW events_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
FROM massive_split.events_s
WHERE datetime >= now() - INTERVAL 7 DAY;
CREATE MATERIALIZED VIEW sessions_info_l1m_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512
POPULATE
AS
SELECT project_id,
session_id,
datetime,
now() AS _timestamp,
toJSONString(map('project_id', toString(project_id),
'session_id', toString(session_id),
'user_uuid', toString(user_uuid),
'user_id', user_id,
'user_os', user_os,
'user_browser', user_browser,
'user_device', user_device,
--'user_device_type', user_device_type,
--'user_country', user_country,
'start_ts', toString(datetime),
'duration', toString(duration),
'events_count', toString(events_count),
'pages_count', toString(pages_count),
'errors_count', toString(errors_count),
-- 'user_anonymous_id', user_anonymous_id,
-- 'platform', platform,
-- 'issue_score', issue_score,
-- issue_types,
-- favorite,
-- viewed,
'metadata', CAST((arrayFilter(x->isNotNull(x),
arrayMap(
x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[1]),
NULL),
[
[projects_meta.metadata_1,sessions.metadata_1],
[projects_meta.metadata_2,sessions.metadata_2],
[projects_meta.metadata_3,sessions.metadata_3],
[projects_meta.metadata_4,sessions.metadata_4],
[projects_meta.metadata_5,sessions.metadata_5],
[projects_meta.metadata_6,sessions.metadata_6],
[projects_meta.metadata_7,sessions.metadata_7],
[projects_meta.metadata_8,sessions.metadata_8],
[projects_meta.metadata_9,sessions.metadata_9],
[projects_meta.metadata_10,sessions.metadata_10]
])),
arrayFilter(x->isNotNull(x),
arrayMap(
x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[2]),
NULL),
[
[projects_meta.metadata_1,sessions.metadata_1],
[projects_meta.metadata_2,sessions.metadata_2],
[projects_meta.metadata_3,sessions.metadata_3],
[projects_meta.metadata_4,sessions.metadata_4],
[projects_meta.metadata_5,sessions.metadata_5],
[projects_meta.metadata_6,sessions.metadata_6],
[projects_meta.metadata_7,sessions.metadata_7],
[projects_meta.metadata_8,sessions.metadata_8],
[projects_meta.metadata_9,sessions.metadata_9],
[projects_meta.metadata_10,sessions.metadata_10]
]))), 'Map(String,String)')
)) AS info
FROM massive_split.sessions
INNER JOIN projects_metadata USING (project_id)
WHERE datetime >= now() - INTERVAL 1 MONTH
AND isNotNull(duration)
AND duration > 0;
CREATE MATERIALIZED VIEW sessions_info_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
FROM sessions_info_l1m_mv
WHERE datetime >= now() - INTERVAL 7 DAY;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,983 @@
-- Q1
SELECT session_id
-- FROM massive2.events7
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.1
SELECT session_id
FROM massive2.events7
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.2
SELECT session_id
FROM
-- massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
events_l24h_mv
INNER JOIN metadata_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.2.1
SELECT session_id
FROM
-- massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- events_l7d_mv AS events_s
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
events_l24h_mv AS events_s
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events_s.project_id = 2460
AND events_s.datetime >= '2022-04-02 00:00:00'
AND events_s.datetime <= '2022-04-03 00:00:00'
-- AND events_s.datetime <= '2022-04-10 00:00:00'
-- AND events_s.datetime <= '2022-05-02 00:00:00'
AND metadata_s.project_id = 2460
AND metadata_s.datetime >= '2022-04-02 00:00:00'
AND metadata_s.datetime <= '2022-04-03 00:00:00'
-- AND metadata_s.datetime <= '2022-04-10 00:00:00'
-- AND metadata_s.datetime <= '2022-05-02 00:00:00'
AND metadata_s.user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.3
SELECT session_id
FROM
-- massive_split.events_s
-- events_l7d_mv
events_l24h_mv
INNER JOIN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.4
SELECT session_id
FROM (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta
-- INNER JOIN massive_split.events_s USING (session_id)
-- INNER JOIN events_l7d_mv USING (session_id)
INNER JOIN events_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q1.5
SELECT session_id
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive2.events7
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
)
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.1
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
FROM massive2.events7
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.2
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv
-- INNER JOIN metadata_l7d_mv USING (session_id)
FROM events_l24h_mv
INNER JOIN metadata_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.2.1
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv AS events_s
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
FROM events_l24h_mv AS events_s
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events_s.project_id = 2460
AND events_s.datetime >= '2022-04-02 00:00:00'
AND events_s.datetime <= '2022-04-03 00:00:00'
-- AND events_s.datetime <= '2022-04-10 00:00:00'
-- AND events_s.datetime <= '2022-05-02 00:00:00'
AND metadata_s.project_id = 2460
AND metadata_s.datetime >= '2022-04-02 00:00:00'
AND metadata_s.datetime <= '2022-04-03 00:00:00'
-- AND metadata_s.datetime <= '2022-04-10 00:00:00'
-- AND metadata_s.datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.3
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
INNER JOIN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
)
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.4
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
FROM (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta
-- INNER JOIN massive_split.events_s USING (session_id)
-- INNER JOIN events_l7d_mv USING (session_id)
INNER JOIN events_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
)
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q2.5
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'))
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3
SELECT session_id
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.1
SELECT session_id
FROM massive2.events7
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.2
SELECT session_id
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv
-- INNER JOIN metadata_l7d_mv USING (session_id)
FROM events_l24h_mv
INNER JOIN metadata_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.2.1
SELECT session_id
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv AS events_s
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
FROM events_l24h_mv AS events_s
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events_s.project_id = 2460
AND events_s.datetime >= '2022-04-02 00:00:00'
AND events_s.datetime <= '2022-04-03 00:00:00'
-- AND events_s.datetime <= '2022-04-10 00:00:00'
-- AND events_s.datetime <= '2022-05-02 00:00:00'
AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST')
AND metadata_s.project_id = 2460
AND metadata_s.datetime >= '2022-04-02 00:00:00'
AND metadata_s.datetime <= '2022-04-03 00:00:00'
-- AND metadata_s.datetime <= '2022-04-10 00:00:00'
-- AND metadata_s.datetime <= '2022-05-02 00:00:00'
AND metadata_s.user_id = 'uucUZvTpPd'
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.3
SELECT session_id
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
INNER JOIN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.4
SELECT session_id
FROM (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta
-- INNER JOIN massive_split.events_s USING (session_id)
-- INNER JOIN events_l7d_mv USING (session_id)
INNER JOIN events_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q3.5
SELECT session_id
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST'))
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.1
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
FROM massive2.events7
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.2
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv
-- INNER JOIN metadata_l7d_mv USING (session_id)
FROM events_l24h_mv
INNER JOIN metadata_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND user_id = 'uucUZvTpPd')
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.2.1
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv AS events_s
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
FROM events_l24h_mv AS events_s
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events_s.project_id = 2460
AND events_s.datetime >= '2022-04-02 00:00:00'
AND events_s.datetime <= '2022-04-03 00:00:00'
-- AND events_s.datetime <= '2022-04-10 00:00:00'
-- AND events_s.datetime <= '2022-05-02 00:00:00'
AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST')
AND metadata_s.user_id = 'uucUZvTpPd'
AND metadata_s.project_id = 2460
AND metadata_s.datetime >= '2022-04-02 00:00:00'
AND metadata_s.datetime <= '2022-04-03 00:00:00'
-- AND metadata_s.datetime <= '2022-04-10 00:00:00'
-- AND metadata_s.datetime <= '2022-05-02 00:00:00'
)
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.3
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
-- FROM massive_split.events_s
-- FROM events_l7d_mv
FROM events_l24h_mv
INNER JOIN (SELECT DISTINCT session_id
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST'))
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.4
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
FROM (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta
-- INNER JOIN massive_split.events_s USING (session_id)
-- INNER JOIN events_l7d_mv USING (session_id)
INNER JOIN events_l24h_mv USING (session_id)
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST'))
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- Q4.5
SELECT session_id
FROM (SELECT session_id,
datetime,
event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1,
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2
FROM events_l24h_mv
-- FROM events_l7d_mv
-- FROM massive_split.events_s
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND (event_type = 'CLICK' OR event_type = 'REQUEST')
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'))
GROUP BY session_id
HAVING windowFunnel(99999)(datetime, c1, c2) = 2
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id, session_id
FROM massive2.events7 AS events
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.1
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id, session_id
FROM massive2.events7 AS events
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.2
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv AS events
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
FROM events_l24h_mv AS events
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.3
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- INNER JOIN massive_split.metadata_s USING (session_id)
-- FROM events_l7d_mv AS events
-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id)
FROM events_l24h_mv AS events
INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.4
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- FROM events_l7d_mv AS events
FROM events_l24h_mv AS events
INNER JOIN (SELECT DISTINCT session_id,
user_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.4-A
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- FROM events_l7d_mv AS events
FROM events_l24h_mv AS events
INNER JOIN (SELECT DISTINCT session_id,
user_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
) AS meta USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.5
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id, session_id
FROM (SELECT DISTINCT session_id,
user_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta
-- INNER JOIN massive_split.events_s AS events USING (session_id)
-- INNER JOIN events_l7d_mv AS events USING (session_id)
INNER JOIN events_l24h_mv AS events USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.6
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- FROM events_l7d_mv AS events
FROM events_l24h_mv AS events
INNER JOIN (SELECT DISTINCT session_id,
user_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd') AS meta USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
AND user_id = 'uucUZvTpPd')
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QU1.6-A
SELECT user_id, COUNT(session_id)
FROM (SELECT user_id,
session_id
-- FROM massive_split.events_s AS events
-- FROM events_l7d_mv AS events
FROM events_l24h_mv AS events
INNER JOIN (SELECT DISTINCT session_id,
user_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
) AS meta USING (session_id)
WHERE events.project_id = 2460
AND events.datetime >= '2022-04-02 00:00:00'
AND events.datetime <= '2022-04-03 00:00:00'
-- AND events.datetime <= '2022-04-10 00:00:00'
-- AND events.datetime <= '2022-05-02 00:00:00'
AND session_id IN (SELECT DISTINCT session_id
-- FROM massive_split.metadata_s
-- FROM metadata_l7d_mv
FROM metadata_l24h_mv
WHERE project_id = 2460
AND datetime >= '2022-04-02 00:00:00'
AND datetime <= '2022-04-03 00:00:00'
-- AND datetime <= '2022-04-10 00:00:00'
-- AND datetime <= '2022-05-02 00:00:00'
)
GROUP BY user_id, session_id
HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%',
event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2
) AS filtred_sessions
GROUP BY user_id
LIMIT 10
SETTINGS
max_threads = 4;
-- QM4:
SELECT timestamp,
groupArray([toString(t.type), toString(t.count)]) AS types
FROM (SELECT toUnixTimestamp(toStartOfInterval(events7.datetime, INTERVAL 37565 second)) * 1000 AS timestamp,
events7.type,
COUNT(events7.session_id) AS count
-- FROM massive_split.events_s AS events7
-- FROM events_l7d_mv AS events7
FROM events_l24h_mv AS events7
WHERE events7.project_id = toUInt32(2460)
AND toStartOfInterval(events7.datetime, INTERVAL 37565 second) >= '2022-04-02 00:00:00'
AND events7.datetime <= '2022-04-03 00:00:00'
-- AND events7.datetime <= '2022-04-10 00:00:00'
-- AND events7.datetime < '2022-05-02 00:00:00'
AND events7.event_type = 'RESOURCE'
GROUP BY timestamp, events7.type
ORDER BY timestamp) AS t
GROUP BY timestamp
SETTINGS
max_threads = 4;

View file

@ -16,6 +16,6 @@ CREATE TABLE IF NOT EXISTS clicks
label String,
hesitation_time Nullable(UInt32)
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -17,6 +17,6 @@ CREATE TABLE IF NOT EXISTS customs
payload Nullable(String),
level Enum8('info'=0, 'error'=1) DEFAULT 'info'
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -18,6 +18,6 @@ CREATE TABLE IF NOT EXISTS errors
message String,
error_id String
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -15,6 +15,6 @@ CREATE TABLE IF NOT EXISTS inputs
datetime DateTime,
label String
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -20,7 +20,7 @@ CREATE TABLE IF NOT EXISTS longtasks
container_name String,
container_src String
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -35,6 +35,6 @@ CREATE TABLE IF NOT EXISTS pages
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(load_event_end, load_event_start), minus(load_event_end, load_event_start), Null)
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS performance
avg_used_js_heap_size UInt64,
max_used_js_heap_size UInt64
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -27,6 +27,6 @@ CREATE TABLE IF NOT EXISTS resources
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16)
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -14,13 +14,12 @@ CREATE TABLE IF NOT EXISTS sessions
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String)
) ENGINE = ReplacingMergeTree(duration)
PARTITION BY toDate(datetime)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS sessions_metadata
metadata_9 Nullable(String),
metadata_10 Nullable(String)
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
ORDER BY (session_id)
PARTITION BY toStartOfWeek(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -0,0 +1,192 @@
BEGIN;
CREATE OR REPLACE
FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.6.1-ee'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS dashboards
ADD COLUMN IF NOT
EXISTS description text NOT NULL DEFAULT '';
CREATE
INDEX IF NOT
EXISTS traces_created_at_idx ON traces (created_at);
CREATE
INDEX IF NOT
EXISTS traces_action_idx ON traces (action);
CREATE
INDEX IF NOT
EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops);
ALTER TABLE users
DROP COLUMN IF EXISTS appearance;
ALTER TABLE basic_authentication
DROP COLUMN IF EXISTS generated_password;
ALTER TABLE tenants
DROP COLUMN IF EXISTS edition;
ALTER TABLE dashboards
ALTER COLUMN user_id DROP NOT NULL;
DO
$$
BEGIN
IF EXISTS(SELECT *
FROM information_schema.columns
WHERE table_name = 'tenants'
and column_name = 'user_id')
THEN
ALTER TABLE tenants
RENAME COLUMN user_id TO tenant_key;
END IF;
END
$$;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel';
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_fps', 'predefined', 'overview')
ON CONFLICT (predefined_key) DO UPDATE
SET name =excluded.name,
category=excluded.category,
default_config=excluded.default_config,
is_predefined=excluded.is_predefined,
is_template=excluded.is_template,
is_public=excluded.is_public,
metric_type=excluded.metric_type,
view_type=excluded.view_type;
BEGIN;
DO
$$
BEGIN
IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND
EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL))
THEN
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL;
WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter)
SELECT project_id, user_id, name, 'funnel', is_public, filter
FROM funnels
WHERE deleted_at ISNULL
RETURNING metric_id,_funnel_filter)
INSERT
INTO metric_series(metric_id, name, filter, index)
SELECT metric_id, 'Series 1', _funnel_filter, 0
FROM f_t_m;
ALTER TABLE IF EXISTS metrics
DROP COLUMN IF EXISTS _funnel_filter;
END IF;
END
$$;
COMMIT;

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.6.0-ee'
SELECT 'v1.7.0-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -142,12 +142,11 @@ $$
CREATE TABLE IF NOT EXISTS tenants
(
tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
user_id text NOT NULL DEFAULT generate_api_key(20),
tenant_key text NOT NULL DEFAULT generate_api_key(20),
name text NOT NULL,
api_key text UNIQUE default generate_api_key(20) not null,
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
edition varchar(3) NOT NULL,
version_number text NOT NULL,
license text NULL,
opt_out bool NOT NULL DEFAULT FALSE,
@ -187,67 +186,6 @@ $$
name text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
appearance jsonb NOT NULL default '{
"role": "dev",
"dashboard": {
"cpu": true,
"fps": false,
"avgCpu": true,
"avgFps": true,
"errors": true,
"crashes": true,
"overview": true,
"sessions": true,
"topMetrics": true,
"callsErrors": true,
"pageMetrics": true,
"performance": true,
"timeToRender": false,
"userActivity": false,
"avgFirstPaint": false,
"countSessions": true,
"errorsPerType": true,
"slowestImages": true,
"speedLocation": true,
"slowestDomains": true,
"avgPageLoadTime": true,
"avgTillFirstBit": false,
"avgTimeToRender": true,
"avgVisitedPages": false,
"avgImageLoadTime": true,
"busiestTimeOfDay": true,
"errorsPerDomains": true,
"missingResources": true,
"resourcesByParty": true,
"sessionsFeedback": false,
"slowestResources": true,
"avgUsedJsHeapSize": true,
"domainsErrors_4xx": true,
"domainsErrors_5xx": true,
"memoryConsumption": true,
"pagesDomBuildtime": false,
"pagesResponseTime": true,
"avgRequestLoadTime": true,
"avgSessionDuration": false,
"sessionsPerBrowser": false,
"applicationActivity": true,
"sessionsFrustration": false,
"avgPagesDomBuildtime": true,
"avgPagesResponseTime": false,
"avgTimeToInteractive": true,
"resourcesCountByType": true,
"resourcesLoadingTime": true,
"avgDomContentLoadStart": true,
"avgFirstContentfulPixel": false,
"resourceTypeVsResponseEnd": true,
"impactedSessionsByJsErrors": true,
"impactedSessionsBySlowPages": true,
"resourcesVsVisuallyComplete": true,
"pagesResponseTimeDistribution": true
},
"sessionsLive": false,
"sessionsDevtools": true
}'::jsonb,
api_key text UNIQUE default generate_api_key(20) not null,
jwt_iat timestamp without time zone NULL DEFAULT NULL,
data jsonb NOT NULL DEFAULT'{}'::jsonb,
@ -257,17 +195,17 @@ $$
internal_id text NULL DEFAULT NULL
);
CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops);
CREATE TABLE IF NOT EXISTS basic_authentication
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
password text DEFAULT NULL,
generated_password boolean NOT NULL DEFAULT false,
invitation_token text NULL DEFAULT NULL,
invited_at timestamp without time zone NULL DEFAULT NULL,
change_pwd_token text NULL DEFAULT NULL,
change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL,
password text DEFAULT NULL,
invitation_token text NULL DEFAULT NULL,
invited_at timestamp without time zone NULL DEFAULT NULL,
change_pwd_token text NULL DEFAULT NULL,
change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL,
changed_at timestamp,
UNIQUE (user_id)
);
@ -320,6 +258,7 @@ $$
CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key);
CREATE INDEX IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
DROP TRIGGER IF EXISTS on_insert_or_update ON projects;
CREATE TRIGGER on_insert_or_update
AFTER INSERT OR UPDATE
@ -785,8 +724,10 @@ $$
);
CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id);
CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id);
CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at);
CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action);
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined');
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined','funnel');
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map');
CREATE TABLE IF NOT EXISTS metrics
(
@ -836,8 +777,9 @@ $$
(
dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
description text NOT NULL DEFAULT '',
is_public boolean NOT NULL DEFAULT TRUE,
is_pinned boolean NOT NULL DEFAULT FALSE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
@ -1265,102 +1207,102 @@ LANGUAGE plpgsql;
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'overview', '{
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'overview', '{
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'overview', '{
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'overview', '{
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'overview', '{
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'overview', '{
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'overview', '{
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'overview', '{
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'overview', '{
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'overview', '{
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'overview', '{
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'overview', '{
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'overview', '{
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'overview', '{
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'overview', '{
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'overview', '{
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'overview', '{
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'overview', '{
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'overview', '{
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'overview', '{
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0

View file

@ -10,6 +10,7 @@ build.sh
servers/peerjs-server.js
servers/sourcemaps-handler.js
servers/sourcemaps-server.js
#servers/websocket.js
/utils
/Dockerfile
/utils/geoIP.js
/utils/HeapSnapshot.js
/utils/helper.js

8
ee/utilities/clean.sh Executable file
View file

@ -0,0 +1,8 @@
rm -rf ./utils/geoIP.js
rm -rf ./utils/HeapSnapshot.js
rm -rf ./utils/helper.js
rm -rf servers/peerjs-server.js
rm -rf servers/sourcemaps-handler.js
rm -rf servers/sourcemaps-server.js
rm -rf build.sh

View file

@ -13,9 +13,9 @@
"@socket.io/redis-adapter": "^7.1.0",
"express": "^4.17.1",
"redis": "^4.0.3",
"socket.io": "^4.4.1",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0"
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0"
}
},
"node_modules/@maxmind/geoip2-node": {
@ -83,14 +83,6 @@
"@node-redis/client": "^1.0.0"
}
},
"node_modules/@socket.io/base64-arraybuffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
"integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==",
"engines": {
"node": ">= 0.6.0"
}
},
"node_modules/@socket.io/redis-adapter": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz",
@ -121,9 +113,9 @@
"integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw=="
},
"node_modules/@types/node": {
"version": "17.0.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz",
"integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w=="
"version": "17.0.42",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz",
"integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -332,9 +324,9 @@
}
},
"node_modules/engine.io": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz",
"integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==",
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz",
"integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==",
"dependencies": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@ -352,12 +344,9 @@
}
},
"node_modules/engine.io-parser": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz",
"integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==",
"dependencies": {
"@socket.io/base64-arraybuffer": "~1.0.2"
},
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz",
"integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==",
"engines": {
"node": ">=10.0.0"
}
@ -667,7 +656,7 @@
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
"engines": {
"node": ">=0.10.0"
}
@ -869,15 +858,15 @@
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"node_modules/socket.io": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz",
"integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==",
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz",
"integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==",
"dependencies": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"debug": "~4.3.2",
"engine.io": "~6.1.0",
"socket.io-adapter": "~2.3.3",
"engine.io": "~6.2.0",
"socket.io-adapter": "~2.4.0",
"socket.io-parser": "~4.0.4"
},
"engines": {
@ -902,6 +891,11 @@
"node": ">=10.0.0"
}
},
"node_modules/socket.io/node_modules/socket.io-adapter": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz",
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
},
"node_modules/statuses": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
@ -1092,11 +1086,6 @@
"integrity": "sha512-HGQ8YooJ8Mx7l28tD7XjtB3ImLEjlUxG1wC1PAjxu6hPJqjPshUZxAICzDqDjtIbhDTf48WXXUcx8TQJB1XTKA==",
"requires": {}
},
"@socket.io/base64-arraybuffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
"integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ=="
},
"@socket.io/redis-adapter": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz",
@ -1124,9 +1113,9 @@
"integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw=="
},
"@types/node": {
"version": "17.0.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz",
"integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w=="
"version": "17.0.42",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz",
"integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ=="
},
"accepts": {
"version": "1.3.8",
@ -1281,9 +1270,9 @@
"integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k="
},
"engine.io": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz",
"integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==",
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz",
"integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==",
"requires": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@ -1298,12 +1287,9 @@
}
},
"engine.io-parser": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz",
"integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==",
"requires": {
"@socket.io/base64-arraybuffer": "~1.0.2"
}
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz",
"integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg=="
},
"escape-html": {
"version": "1.0.3",
@ -1546,7 +1532,7 @@
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="
},
"on-finished": {
"version": "2.3.0",
@ -1696,16 +1682,23 @@
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"socket.io": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz",
"integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==",
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz",
"integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==",
"requires": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"debug": "~4.3.2",
"engine.io": "~6.1.0",
"socket.io-adapter": "~2.3.3",
"engine.io": "~6.2.0",
"socket.io-adapter": "~2.4.0",
"socket.io-parser": "~4.0.4"
},
"dependencies": {
"socket.io-adapter": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz",
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
}
}
},
"socket.io-adapter": {
@ -1774,7 +1767,7 @@
},
"uWebSockets.js": {
"version": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#a58e810e47a23696410f6073c8c905dc38f75da5",
"from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.6.0"
"from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.10.0"
},
"vary": {
"version": "1.1.2",

View file

@ -22,8 +22,8 @@
"@socket.io/redis-adapter": "^7.1.0",
"express": "^4.17.1",
"redis": "^4.0.3",
"socket.io": "^4.4.1",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0"
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0"
}
}

2
ee/utilities/prepare-dev.sh Executable file
View file

@ -0,0 +1,2 @@
#!/bin/bash
rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./

View file

@ -16,8 +16,9 @@ const PREFIX = process.env.prefix || `/assist`
if (process.env.uws !== "true") {
let wsapp = express();
wsapp.use(express.json());
wsapp.use(express.urlencoded({extended: true}));
wsapp.use(request_logger("[wsapp]"));
wsapp.use(request_logger("[app]"));
wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => {
res.statusCode = 200;
res.end("ok!");
@ -73,10 +74,18 @@ if (process.env.uws !== "true") {
}
}
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
socket.start(uapp);

View file

@ -1,7 +1,19 @@
const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const {extractPeerId} = require('../utils/helper');
const {
extractPeerId,
hasFilters,
isValidSession,
sortPaginate,
getValidAttributes,
uniqueAutocomplete
} = require('../utils/helper');
const {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest
} = require('../utils/helper-ee');
const {geoip} = require('../utils/geoIP');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
@ -59,33 +71,6 @@ const uniqueSessions = function (data) {
return resArr;
}
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.of('/').adapter.allRooms();
}
@ -103,7 +88,7 @@ const respond = function (res, data) {
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
@ -111,10 +96,11 @@ const socketsList = async function (req, res) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -125,22 +111,23 @@ const socketsList = async function (req, res) {
}
respond(res, liveSessions);
}
wsRouter.get(`/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -149,13 +136,15 @@ const socketsListByProject = async function (req, res) {
}
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? liveSessions[_projectKey]
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
@ -165,8 +154,8 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -177,25 +166,25 @@ const socketsLive = async function (req, res) {
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
}
}
respond(res, liveSessions);
respond(res, sortPaginate(liveSessions, filters));
}
wsRouter.get(`/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -206,9 +195,33 @@ const socketsLiveByProject = async function (req, res) {
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []);
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
const autocomplete = async function (req, res) {
debug && console.log("[WS]autocomplete");
let _projectKey = extractProjectKeyFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)];
}
}
}
}
}
respond(res, uniqueAutocomplete(results));
}
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
const findSessionSocketId = async (io, peerId) => {
const connected_sockets = await io.in(peerId).fetchSockets();
@ -281,6 +294,21 @@ function extractSessionInfo(socket) {
}
}
wsRouter.get(`/sockets-list`, socketsList);
wsRouter.post(`/sockets-list`, socketsList);
wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject);
wsRouter.get(`/sockets-live`, socketsLive);
wsRouter.post(`/sockets-live`, socketsLive);
wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject);
module.exports = {
wsRouter,
start: (server, prefix) => {
@ -409,6 +437,7 @@ module.exports = {
socketsList,
socketsListByProject,
socketsLive,
socketsLiveByProject
socketsLiveByProject,
autocomplete
}
};

View file

@ -1,7 +1,19 @@
const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const {extractPeerId} = require('../utils/helper');
const {
extractPeerId,
hasFilters,
isValidSession,
sortPaginate,
getValidAttributes,
uniqueAutocomplete
} = require('../utils/helper');
const {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest,
} = require('../utils/helper-ee');
const {geoip} = require('../utils/geoIP');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
@ -42,33 +54,6 @@ const createSocketIOServer = function (server, prefix) {
}
}
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
@ -86,18 +71,18 @@ const respond = function (res, data) {
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -108,22 +93,23 @@ const socketsList = async function (req, res) {
}
respond(res, liveSessions);
}
wsRouter.get(`/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -132,13 +118,15 @@ const socketsListByProject = async function (req, res) {
}
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
@ -148,8 +136,8 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -159,25 +147,25 @@ const socketsLive = async function (req, res) {
}
}
}
respond(res, liveSessions);
respond(res, sortPaginate(liveSessions, filters));
}
wsRouter.get(`/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -187,9 +175,33 @@ const socketsLiveByProject = async function (req, res) {
}
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
const autocomplete = async function (req, res) {
debug && console.log("[WS]autocomplete");
let _projectKey = extractProjectKeyFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)];
}
}
}
}
}
respond(res, uniqueAutocomplete(results));
}
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
const findSessionSocketId = async (io, peerId) => {
const connected_sockets = await io.in(peerId).fetchSockets();
@ -260,6 +272,21 @@ function extractSessionInfo(socket) {
}
}
wsRouter.get(`/sockets-list`, socketsList);
wsRouter.post(`/sockets-list`, socketsList);
wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject);
wsRouter.get(`/sockets-live`, socketsLive);
wsRouter.post(`/sockets-live`, socketsLive);
wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject);
module.exports = {
wsRouter,
start: (server, prefix) => {
@ -374,6 +401,7 @@ module.exports = {
socketsList,
socketsListByProject,
socketsLive,
socketsLiveByProject
socketsLiveByProject,
autocomplete
}
};

View file

@ -0,0 +1,98 @@
const helper = require('./helper');
let debug = process.env.debug === "1" || false;
const getBodyFromUWSResponse = async function (res) {
return new Promise(((resolve, reject) => {
let buffer;
res.onData((ab, isLast) => {
let chunk = Buffer.from(ab);
if (buffer) {
buffer = Buffer.concat([buffer, chunk]);
} else {
buffer = Buffer.concat([chunk]);
}
if (isLast) {
let json;
try {
json = JSON.parse(buffer);
} catch (e) {
console.error(e);
/* res.close calls onAborted */
// try {
// res.close();
// } catch (e2) {
// console.error(e2);
// }
json = {};
}
resolve(json);
}
});
}));
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else {
return helper.extractProjectKeyFromRequest(req);
}
return undefined;
}
const extractSessionIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(1)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(1)}`);
return req.getParameter(1);
}
} else {
return helper.extractSessionIdFromRequest(req);
}
return undefined;
}
const extractPayloadFromRequest = async function (req, res) {
let filters = {
"query": {},
"filter": {}
};
if (process.env.uws === "true") {
if (req.getQuery("q")) {
debug && console.log(`[WS]where q=${req.getQuery("q")}`);
filters.query.value = req.getQuery("q");
}
if (req.getQuery("key")) {
debug && console.log(`[WS]where key=${req.getQuery("key")}`);
filters.query.key = req.getQuery("key");
}
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
filters.filter.userID = [req.getQuery("userId")];
}
if (!filters.query.value) {
let body = await getBodyFromUWSResponse(res);
filters = {
...filters,
"sort": {
"key": body.sort && body.sort.key ? body.sort.key : undefined,
"order": body.sort && body.sort.order === "DESC"
},
"pagination": {
"limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined,
"page": body.pagination && body.pagination.page ? body.pagination.page : undefined
}
}
filters.filter = {...filters.filter, ...(body.filter || {})};
}
} else {
return helper.extractPayloadFromRequest(req);
}
filters.filter = helper.objectToObjectOfArrays(filters.filter);
debug && console.log("payload/filters:" + JSON.stringify(filters))
return Object.keys(filters).length > 0 ? filters : undefined;
}
module.exports = {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest
};

View file

@ -0,0 +1,179 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.7.0'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS dashboards
ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT '';
ALTER TABLE users
DROP COLUMN IF EXISTS appearance;
ALTER TABLE basic_authentication
DROP COLUMN IF EXISTS generated_password;
ALTER TABLE tenants
DROP COLUMN IF EXISTS edition;
ALTER TABLE dashboards
ALTER COLUMN user_id DROP NOT NULL;
DO
$$
BEGIN
IF EXISTS(SELECT *
FROM information_schema.columns
WHERE table_name = 'tenants'
and column_name = 'user_id')
THEN
ALTER TABLE tenants
RENAME COLUMN user_id TO tenant_key;
END IF;
END
$$;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined';
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_fps', 'predefined', 'overview')
ON CONFLICT (predefined_key) DO UPDATE
SET name=excluded.name,
category=excluded.category,
default_config=excluded.default_config,
is_predefined=excluded.is_predefined,
is_template=excluded.is_template,
is_public=excluded.is_public,
metric_type=excluded.metric_type,
view_type=excluded.view_type;
BEGIN;
DO
$$
BEGIN
IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND
EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL))
THEN
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL;
WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter)
SELECT project_id, user_id, name, 'funnel', is_public, filter
FROM funnels
WHERE deleted_at ISNULL
RETURNING metric_id,_funnel_filter)
INSERT
INTO metric_series(metric_id, name, filter, index)
SELECT metric_id, 'Series 1', _funnel_filter, 0
FROM f_t_m;
ALTER TABLE IF EXISTS metrics
DROP COLUMN IF EXISTS _funnel_filter;
END IF;
END
$$;
COMMIT;

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.6.0'
SELECT 'v1.7.0'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@ -117,11 +117,10 @@ $$
CREATE TABLE tenants
(
tenant_id integer NOT NULL DEFAULT 1,
user_id text NOT NULL DEFAULT generate_api_key(20),
tenant_key text NOT NULL DEFAULT generate_api_key(20),
name text NOT NULL,
api_key text NOT NULL DEFAULT generate_api_key(20),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
edition varchar(3) NOT NULL,
version_number text NOT NULL,
license text NULL,
opt_out bool NOT NULL DEFAULT FALSE,
@ -142,67 +141,6 @@ $$
name text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
appearance jsonb NOT NULL default '{
"role": "dev",
"dashboard": {
"cpu": true,
"fps": false,
"avgCpu": true,
"avgFps": true,
"errors": true,
"crashes": true,
"overview": true,
"sessions": true,
"topMetrics": true,
"callsErrors": true,
"pageMetrics": true,
"performance": true,
"timeToRender": false,
"userActivity": false,
"avgFirstPaint": false,
"countSessions": true,
"errorsPerType": true,
"slowestImages": true,
"speedLocation": true,
"slowestDomains": true,
"avgPageLoadTime": true,
"avgTillFirstBit": false,
"avgTimeToRender": true,
"avgVisitedPages": false,
"avgImageLoadTime": true,
"busiestTimeOfDay": true,
"errorsPerDomains": true,
"missingResources": true,
"resourcesByParty": true,
"sessionsFeedback": false,
"slowestResources": true,
"avgUsedJsHeapSize": true,
"domainsErrors_4xx": true,
"domainsErrors_5xx": true,
"memoryConsumption": true,
"pagesDomBuildtime": false,
"pagesResponseTime": true,
"avgRequestLoadTime": true,
"avgSessionDuration": false,
"sessionsPerBrowser": false,
"applicationActivity": true,
"sessionsFrustration": false,
"avgPagesDomBuildtime": true,
"avgPagesResponseTime": false,
"avgTimeToInteractive": true,
"resourcesCountByType": true,
"resourcesLoadingTime": true,
"avgDomContentLoadStart": true,
"avgFirstContentfulPixel": false,
"resourceTypeVsResponseEnd": true,
"impactedSessionsByJsErrors": true,
"impactedSessionsBySlowPages": true,
"resourcesVsVisuallyComplete": true,
"pagesResponseTimeDistribution": true
},
"sessionsLive": false,
"sessionsDevtools": true
}'::jsonb,
api_key text UNIQUE default generate_api_key(20) not null,
jwt_iat timestamp without time zone NULL DEFAULT NULL,
data jsonb NOT NULL DEFAULT '{}'::jsonb,
@ -212,12 +150,11 @@ $$
CREATE TABLE basic_authentication
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
password text DEFAULT NULL,
generated_password boolean NOT NULL DEFAULT false,
invitation_token text NULL DEFAULT NULL,
invited_at timestamp without time zone NULL DEFAULT NULL,
change_pwd_token text NULL DEFAULT NULL,
change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL,
password text DEFAULT NULL,
invitation_token text NULL DEFAULT NULL,
invited_at timestamp without time zone NULL DEFAULT NULL,
change_pwd_token text NULL DEFAULT NULL,
change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL,
changed_at timestamp,
UNIQUE (user_id)
);
@ -264,6 +201,8 @@ $$
);
CREATE INDEX projects_project_key_idx ON public.projects (project_key);
CREATE INDEX projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
CREATE TRIGGER on_insert_or_update
AFTER INSERT OR UPDATE
ON projects
@ -940,7 +879,7 @@ $$
CREATE INDEX jobs_start_at_idx ON jobs (start_at);
CREATE INDEX jobs_project_id_idx ON jobs (project_id);
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined');
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined', 'funnel');
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map');
CREATE TABLE metrics
(
@ -990,8 +929,9 @@ $$
(
dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
description text NOT NULL DEFAULT '',
is_public boolean NOT NULL DEFAULT TRUE,
is_pinned boolean NOT NULL DEFAULT FALSE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
@ -1059,102 +999,102 @@ LANGUAGE plpgsql;
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'overview', '{
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'overview', '{
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'overview', '{
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'overview', '{
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'overview', '{
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'overview', '{
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'overview', '{
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'overview', '{
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'overview', '{
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'overview', '{
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'overview', '{
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'overview', '{
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'overview', '{
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'overview', '{
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'overview', '{
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'overview', '{
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'overview', '{
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'overview', '{
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'overview', '{
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'overview', '{
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0

69
scripts/vagrant/README.md Normal file
View file

@ -0,0 +1,69 @@
<aside>
💡 As of today, VirtualBox is not supported on Mac M1. You can try with VMWare as the Vagrant backend, but this has not been tested.
</aside>
### Installation
- Vagrant: [https://www.vagrantup.com/downloads](https://www.vagrantup.com/downloads)
- VirtualBox: [https://www.virtualbox.org/wiki/Downloads](https://www.virtualbox.org/wiki/Downloads)
### Configuration
```bash
mkdir openreplay-contributions
cd openreplay-contributions
git clone https://github.com/openreplay/openreplay -b dev
cp -rf openreplay/scripts/vagrant/ .
vagrant up
```
### To access OpenReplay instance
```bash
Add ip address from about output to your local resolver
## Mac/Linux
Copy paste the command from the vagrant output
## Windows
Use the following instructions if youre running Windows 10 or Windows 8:
Press the Windows key.
Type Notepad in the search field.
In the search results, right-click Notepad and select Run as administrator.
From Notepad, open the following file:
c:\Windows\System32\Drivers\etc\hosts
add the below line in the hosts file
<ip address from vagrant output> openreplay.local
Select File > Save to save your changes.
**Open browser**
http://openreplay.local
```
### To start developing
- [Frontend](../../frontend/development.md)
- [API](../../api/development.md)
- [Backend](../../backend/development.md)
### Notes
Itll be a good practice to take a snapshot once the initial setup is complete, so that if something is not working as expected, you can always fall back to a stable known version.
```bash
cd openreplay-dev
vagrant snapshot save <openreplay-version-base>
# For example
vagrant snapshot save openreplay-160-base
```
```bash
# To restore the snapshot
cd openreplay-dev
vagrant snapshot restore openreplay-160-base
```
<aside>
💡 If the base VM is deleted, the snapshot wont be available.
</aside>

129
scripts/vagrant/Vagrantfile vendored Normal file
View file

@ -0,0 +1,129 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure("2") do |config|
# The most common configuration options are documented and commented below.
# For a complete reference, please see the online documentation at
# https://docs.vagrantup.com.
# Every Vagrant development environment requires a box. You can search for
# boxes at https://vagrantcloud.com/search.
config.vm.box = "peru/ubuntu-20.04-server-amd64"
config.vm.define "openreplay-dev"
# Disable automatic box update checking. If you disable this, then
# boxes will only be checked for updates when the user runs
# `vagrant box outdated`. This is not recommended.
# config.vm.box_check_update = false
# Create a forwarded port mapping which allows access to a specific port
# within the machine from a port on the host machine. In the example below,
# accessing "localhost:8080" will access port 80 on the guest machine.
# NOTE: This will enable public access to the opened port
# config.vm.network "forwarded_port", guest: 80, host: 8080
# Create a forwarded port mapping which allows access to a specific port
# within the machine from a port on the host machine and only allow access
# via 127.0.0.1 to disable public access
# config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1"
# Create a private network, which allows host-only access to the machine
# using a specific IP.
config.vm.network "private_network", type: "dhcp"
# Create a public network, which generally matched to bridged network.
# Bridged networks make the machine appear as another physical device on
# your network.
# config.vm.network "public_network"
# Share an additional folder to the guest VM. The first argument is
# the path on the host to the actual folder. The second argument is
# the path on the guest to mount the folder. And the optional third
# argument is a set of non-required options.
config.vm.synced_folder "./", "/home/vagrant/openreplay-dev/"
# Provider-specific configuration so you can fine-tune various
# backing providers for Vagrant. These expose provider-specific options.
# Example for VirtualBox:
#
config.vm.provider "virtualbox" do |vb|
# Display the VirtualBox GUI when booting the machine
vb.gui = false
# Customize the amount of memory on the VM:
vb.cpus = "2"
vb.memory = "4096"
end
#
# View the documentation for the provider you are using for more
# information on available options.
# Enable provisioning with a shell script. Additional provisioners such as
# Ansible, Chef, Docker, Puppet and Salt are also available. Please see the
# documentation for more information about their specific syntax and use.
config.vm.provision "shell", inline: <<-SHELL
set -x
IP_ADDR=`ip r | tail -n1 | awk '{print $NF}'`
# Updating host domainName
grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts
apt-get update
apt-get install -y git curl
curl -fsSL https://get.docker.com | sh -
usermod -aG docker vagrant
git clone https://github.com/openreplay/openreplay infra
cd infra/scripts/helmcharts
# changing container runtime for k3s to docker
sudo -u vagrant git checkout -- init.sh
sed -i 's/INSTALL_K3S_EXEC=\\(.*\\)\\\"/INSTALL_K3S_EXEC=\\1 --docker\\\"/g' init.sh
DOMAIN_NAME=openreplay.local bash init.sh
cp -rf /root/.kube /home/vagrant/
cp -rf /home/vagrant/infra/scripts/helmcharts/vars.yaml /home/vagrant/openreplay-dev/openreplay/scripts/helmcharts/vars.yaml
chown -R vagrant:vagrant /home/vagrant
cat <<EOF
################################################
Openreplay Dev environment preparation completed.
################################################
Steps to do:
Add ip address from about output to your local resolver
## Mac (Paste the following command in terminal)
sudo -- sh -c 'grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts'
## Linux (Paste the following command in terminal)
sudo -- sh -c 'grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts'
## Windows
Use the following instructions if youre running Windows 10 or Windows 8:
Press the Windows key.
Type Notepad in the search field.
In the search results, right-click Notepad and select Run as administrator.
From Notepad, open the following file:
c:\\Windows\\System32\\Drivers\\etc\\hosts
add the below line in the hosts file
$IP_ADDR openreplay.local
Select File > Save to save your changes.
To Access Openreplay:
- Open your browser and go to "http://openreplay.local"
EOF
SHELL
end

View file

@ -11,7 +11,7 @@
"dependencies": {
"@maxmind/geoip2-node": "^3.4.0",
"express": "^4.17.1",
"socket.io": "^4.4.1",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2"
}
},
@ -26,14 +26,6 @@
"maxmind": "^4.2.0"
}
},
"node_modules/@socket.io/base64-arraybuffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
"integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==",
"engines": {
"node": ">= 0.6.0"
}
},
"node_modules/@types/component-emitter": {
"version": "1.2.11",
"resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz",
@ -50,9 +42,9 @@
"integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw=="
},
"node_modules/@types/node": {
"version": "17.0.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz",
"integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w=="
"version": "17.0.42",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz",
"integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -232,9 +224,9 @@
}
},
"node_modules/engine.io": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz",
"integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==",
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz",
"integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==",
"dependencies": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@ -252,12 +244,9 @@
}
},
"node_modules/engine.io-parser": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz",
"integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==",
"dependencies": {
"@socket.io/base64-arraybuffer": "~1.0.2"
},
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz",
"integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==",
"engines": {
"node": ">=10.0.0"
}
@ -549,7 +538,7 @@
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
"engines": {
"node": ">=0.10.0"
}
@ -706,15 +695,15 @@
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"node_modules/socket.io": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz",
"integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==",
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz",
"integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==",
"dependencies": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"debug": "~4.3.2",
"engine.io": "~6.1.0",
"socket.io-adapter": "~2.3.3",
"engine.io": "~6.2.0",
"socket.io-adapter": "~2.4.0",
"socket.io-parser": "~4.0.4"
},
"engines": {
@ -722,9 +711,9 @@
}
},
"node_modules/socket.io-adapter": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz",
"integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ=="
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz",
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
},
"node_modules/socket.io-parser": {
"version": "4.0.4",
@ -916,11 +905,6 @@
"maxmind": "^4.2.0"
}
},
"@socket.io/base64-arraybuffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
"integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ=="
},
"@types/component-emitter": {
"version": "1.2.11",
"resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz",
@ -937,9 +921,9 @@
"integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw=="
},
"@types/node": {
"version": "17.0.25",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz",
"integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w=="
"version": "17.0.42",
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz",
"integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ=="
},
"accepts": {
"version": "1.3.8",
@ -1074,9 +1058,9 @@
"integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k="
},
"engine.io": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz",
"integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==",
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz",
"integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==",
"requires": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@ -1106,12 +1090,9 @@
}
},
"engine.io-parser": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz",
"integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==",
"requires": {
"@socket.io/base64-arraybuffer": "~1.0.2"
}
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz",
"integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg=="
},
"escape-html": {
"version": "1.0.3",
@ -1314,7 +1295,7 @@
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="
},
"on-finished": {
"version": "2.3.0",
@ -1423,15 +1404,15 @@
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"socket.io": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz",
"integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==",
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz",
"integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==",
"requires": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"debug": "~4.3.2",
"engine.io": "~6.1.0",
"socket.io-adapter": "~2.3.3",
"engine.io": "~6.2.0",
"socket.io-adapter": "~2.4.0",
"socket.io-parser": "~4.0.4"
},
"dependencies": {
@ -1451,9 +1432,9 @@
}
},
"socket.io-adapter": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz",
"integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ=="
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz",
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
},
"socket.io-parser": {
"version": "4.0.4",

View file

@ -20,7 +20,7 @@
"dependencies": {
"@maxmind/geoip2-node": "^3.4.0",
"express": "^4.17.1",
"socket.io": "^4.4.1",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2"
}
}

View file

@ -7,6 +7,8 @@ const HOST = '0.0.0.0';
const PORT = 9001;
const wsapp = express();
wsapp.use(express.json());
wsapp.use(express.urlencoded({extended: true}));
wsapp.use(request_logger("[wsapp]"));
wsapp.use(`/assist/${process.env.S3_KEY}`, socket.wsRouter);

View file

@ -1,7 +1,17 @@
const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const {extractPeerId} = require('../utils/helper');
const {
extractPeerId,
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
hasFilters,
isValidSession,
extractPayloadFromRequest,
sortPaginate,
getValidAttributes,
uniqueAutocomplete
} = require('../utils/helper');
const {geoip} = require('../utils/geoIP');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
@ -28,22 +38,6 @@ const createSocketIOServer = function (server, prefix) {
});
}
const extractUserIdFromRequest = function (req) {
if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
@ -57,18 +51,18 @@ const respond = function (res, data) {
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -79,22 +73,23 @@ const socketsList = async function (req, res) {
}
respond(res, liveSessions);
}
wsRouter.get(`/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (hasFilters(filters)) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(sessionId);
}
}
@ -103,13 +98,15 @@ const socketsListByProject = async function (req, res) {
}
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? liveSessions[_projectKey]
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
@ -119,8 +116,8 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -130,25 +127,25 @@ const socketsLive = async function (req, res) {
}
}
}
respond(res, liveSessions);
respond(res, sortPaginate(liveSessions, filters));
}
wsRouter.get(`/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
if (hasFilters(filters)) {
if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
@ -158,9 +155,34 @@ const socketsLiveByProject = async function (req, res) {
}
}
}
respond(res, liveSessions[_projectKey] || []);
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
const autocomplete = async function (req, res) {
debug && console.log("[WS]autocomplete");
let _projectKey = extractProjectKeyFromRequest(req);
let filters = extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)];
}
}
}
}
}
respond(res, uniqueAutocomplete(results));
}
const findSessionSocketId = async (io, peerId) => {
const connected_sockets = await io.in(peerId).fetchSockets();
@ -231,6 +253,20 @@ function extractSessionInfo(socket) {
}
}
wsRouter.get(`/sockets-list`, socketsList);
wsRouter.post(`/sockets-list`, socketsList);
wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject);
wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-live`, socketsLive);
wsRouter.post(`/sockets-live`, socketsLive);
wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject);
module.exports = {
wsRouter,
start: (server, prefix) => {

View file

@ -24,7 +24,176 @@ const request_logger = (identity) => {
next();
}
};
const extractProjectKeyFromRequest = function (req) {
if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const extractSessionIdFromRequest = function (req) {
if (req.params.sessionId) {
debug && console.log(`[WS]where sessionId=${req.params.sessionId}`);
return req.params.sessionId;
}
return undefined;
}
const isValidSession = function (sessionInfo, filters) {
let foundAll = true;
for (const [key, values] of Object.entries(filters)) {
let found = false;
if (values !== undefined && values !== null) {
for (const [skey, svalue] of Object.entries(sessionInfo)) {
if (svalue !== undefined && svalue !== null) {
if (typeof (svalue) === "object") {
if (isValidSession(svalue, {[key]: values})) {
found = true;
break;
}
} else if (skey.toLowerCase() === key.toLowerCase()) {
for (let v of values) {
if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) {
found = true;
break;
}
}
if (found) {
break;
}
}
}
}
}
foundAll &&= found;
if (!found) {
break;
}
}
return foundAll;
}
const getValidAttributes = function (sessionInfo, query) {
let matches = [];
let deduplicate = [];
for (const [skey, svalue] of Object.entries(sessionInfo)) {
if (svalue !== undefined && svalue !== null) {
if (typeof (svalue) === "object") {
matches = [...matches, ...getValidAttributes(svalue, query)]
} else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase())
&& String(svalue).toLowerCase().indexOf(query.value.toLowerCase()) >= 0
&& deduplicate.indexOf(skey + '_' + svalue) < 0) {
matches.push({"type": skey, "value": svalue});
deduplicate.push(skey + '_' + svalue);
}
}
}
return matches;
}
const hasFilters = function (filters) {
return filters && filters.filter && Object.keys(filters.filter).length > 0;
}
const objectToObjectOfArrays = function (obj) {
let _obj = {}
if (obj) {
for (let k of Object.keys(obj)) {
if (obj[k] !== undefined && obj[k] !== null) {
_obj[k] = obj[k];
if (!Array.isArray(_obj[k])) {
_obj[k] = [_obj[k]];
}
for (let i = 0; i < _obj[k].length; i++) {
_obj[k][i] = String(_obj[k][i]);
}
}
}
}
return _obj;
}
const extractPayloadFromRequest = function (req) {
let filters = {
"query": {},
"filter": {},
"sort": {
"key": req.body.sort && req.body.sort.key ? req.body.sort.key : undefined,
"order": req.body.sort && req.body.sort.order === "DESC"
},
"pagination": {
"limit": req.body.pagination && req.body.pagination.limit ? req.body.pagination.limit : undefined,
"page": req.body.pagination && req.body.pagination.page ? req.body.pagination.page : undefined
}
};
if (req.query.q) {
debug && console.log(`[WS]where q=${req.query.q}`);
filters.query.value = req.query.q;
}
if (req.query.key) {
debug && console.log(`[WS]where key=${req.query.key}`);
filters.query.key = req.query.key;
}
if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
filters.filter.userID = [req.query.userId];
}
filters.filter = objectToObjectOfArrays(filters.filter);
filters.filter = {...filters.filter, ...(req.body.filter || {})};
debug && console.log("payload/filters:" + JSON.stringify(filters))
return filters;
}
const getValue = function (obj, key) {
if (obj !== undefined && obj !== null) {
let val;
for (let k of Object.keys(obj)) {
if (typeof (obj[k]) === "object") {
val = getValue(obj[k], key);
} else if (k.toLowerCase() === key.toLowerCase()) {
val = obj[k];
}
if (val !== undefined) {
return val;
}
}
}
return undefined;
}
const sortPaginate = function (list, filters) {
const total = list.length;
list.sort((a, b) => {
const vA = getValue(a, filters.sort.key || "timestamp");
const vB = getValue(b, filters.sort.key || "timestamp");
return vA > vB ? 1 : vA < vB ? -1 : 0;
});
if (filters.sort.order) {
list.reverse();
}
if (filters.pagination.page && filters.pagination.limit) {
list = list.slice((filters.pagination.page - 1) * filters.pagination.limit,
filters.pagination.page * filters.pagination.limit);
}
return {"total": total, "sessions": list};
}
const uniqueAutocomplete = function (list) {
let _list = [];
let deduplicate = [];
for (let e of list) {
if (deduplicate.indexOf(e.type + "_" + e.value) < 0) {
_list.push(e);
deduplicate.push(e.type + "_" + e.value)
}
}
return _list;
}
module.exports = {
extractPeerId, request_logger
extractPeerId,
request_logger,
getValidAttributes,
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
isValidSession,
hasFilters,
objectToObjectOfArrays,
extractPayloadFromRequest,
sortPaginate,
uniqueAutocomplete
};