* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* refactor(chalice): removed unused code&endpoints

* refactor(chalice): upgraded dependencies
refactor(alerts): upgraded dependencies
refactor(crons): upgraded dependencies

* refactor(chalice): support userState filter for assist

* refactor(chalice): removed unused code
This commit is contained in:
Kraiem Taha Yassine 2024-11-06 16:36:53 +01:00 committed by GitHub
parent 06667df5cd
commit d1de937ed2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 22 additions and 1341 deletions

View file

@ -6,10 +6,10 @@ name = "pypi"
[packages]
urllib3 = "==1.26.16"
requests = "==2.32.3"
boto3 = "==1.35.51"
boto3 = "==1.35.54"
pyjwt = "==2.9.0"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
elasticsearch = "==8.15.1"
jira = "==3.8.0"
cachetools = "==5.5.0"

View file

@ -196,11 +196,6 @@ def autocomplete(project_id, q: str, key: str = None):
return {"data": results}
def get_ice_servers():
return config("iceServers") if config("iceServers", default=None) is not None \
and len(config("iceServers")) > 0 else None
def __get_efs_path():
efs_path = config("FS_DIR")
if not path_exists(efs_path):

View file

@ -1,15 +1,13 @@
import json
import logging
from decouple import config
from fastapi import HTTPException, status
import schemas
from chalicelib.core import sessions, funnels, errors, issues, heatmaps, sessions_mobs, product_analytics, \
from chalicelib.core import sessions, funnels, errors, issues, heatmaps, product_analytics, \
custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient
logger = logging.getLogger(__name__)
PIE_CHART_GROUP = 5
@ -178,23 +176,6 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
# def __merge_metric_with_data(metric: schemas.CardSchema,
# data: schemas.CardSessionsSchema) -> schemas.CardSchema:
# metric.startTimestamp = data.startTimestamp
# metric.endTimestamp = data.endTimestamp
# metric.page = data.page
# metric.limit = data.limit
# metric.density = data.density
# if data.series is not None and len(data.series) > 0:
# metric.series = data.series
#
# # if len(data.filters) > 0:
# # for s in metric.series:
# # s.filter.filters += data.filters
# # metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
# return metric
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
@ -212,38 +193,6 @@ def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSe
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if raw_metric is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:
return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if raw_metric is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:
return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if len(data.series) == 0:
@ -259,15 +208,6 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
if len(data.series) == 0:
return []
data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endTimestamp = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
return data
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.filters) > 0 or len(data.series) > 0:
filters = [f.model_dump(by_alias=True) for f in data.filters] \
@ -309,7 +249,6 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.FUNNEL: __get_funnel_issues,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
}
@ -343,9 +282,8 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.model_dump())
params["card_info"] = None
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data,
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))

View file

@ -149,30 +149,6 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
return helper.dict_to_camel_case(row)
def get_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT metrics.*, metric_series.series
FROM dashboard_widgets
INNER JOIN dashboards USING (dashboard_id)
INNER JOIN metrics USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE dashboard_id = %(dashboard_id)s
AND widget_id = %(widget_id)s
AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
AND dashboards.deleted_at IS NULL
AND metrics.deleted_at ISNULL
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
with pg_client.PostgresClient() as cur:
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)

View file

@ -295,118 +295,6 @@ def get_details(project_id, error_id, user_id, **data):
return {"data": helper.dict_to_camel_case(row)}
def get_details_chart(project_id, error_id, user_id, **data):
pg_sub_query = __get_basic_constraints()
pg_sub_query.append("error_id = %(error_id)s")
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
pg_sub_query_chart.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
else:
data["startDate"] = int(data["startDate"])
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
else:
data["endDate"] = int(data["endDate"])
density = int(data.get("density", 7))
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"project_id": project_id,
"userId": user_id,
"step_size": step_size,
"error_id": error_id}
main_pg_query = f"""\
SELECT %(error_id)s AS error_id,
browsers_partition,
os_partition,
device_partition,
country_partition,
chart
FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
FROM (SELECT *
FROM (SELECT user_browser AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_browser
ORDER BY count DESC) AS count_per_browser_query
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
FROM (SELECT user_browser_version AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_browser = count_per_browser_query.name
GROUP BY user_browser_version
ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
ON (TRUE)) AS browser_details) AS browser_details
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
FROM (SELECT *
FROM (SELECT user_os AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_os
ORDER BY count DESC) AS count_per_os_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_os = count_per_os_details.name
GROUP BY user_os_version
ORDER BY count DESC) AS count_per_version_query
) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
FROM (SELECT *
FROM (SELECT user_device_type AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_device_type
ORDER BY count DESC) AS count_per_device_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
FROM (SELECT CASE
WHEN user_device = '' OR user_device ISNULL
THEN 'unknown'
ELSE user_device END AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_device_type = count_per_device_details.name
GROUP BY user_device_type, user_device
ORDER BY count DESC) AS count_per_device_details
) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
FROM (SELECT user_country AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_country
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)}
) AS chart_details ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone()
if row is None:
return {"errors": ["error not found"]}
row["tags"] = __process_tags(row)
return {"data": helper.dict_to_camel_case(row)}
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):

View file

@ -67,10 +67,6 @@ def __check_database_pg(*_):
}
def __not_supported(*_):
return {"errors": ["not supported"]}
def __always_healthy(*_):
return {
"health": True,

View file

@ -62,20 +62,6 @@ def get_by_session_id(session_id, project_id, issue_type=None):
return helper.list_to_camel_case(cur.fetchall())
def get_types_by_project(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""SELECT type,
{ORDER_QUERY}>=0 AS visible,
{ORDER_QUERY} AS order,
{NAME_QUERY} AS name
FROM (SELECT DISTINCT type
FROM public.issues
WHERE project_id = %(project_id)s) AS types
ORDER BY "order";""", {"project_id": project_id}))
return helper.list_to_camel_case(cur.fetchall())
def get_all_types():
return [
{

View file

@ -19,16 +19,6 @@ def __find_groups(client, token):
return response["logGroups"] + __find_groups(client, response["nextToken"])
def __make_stream_filter(start_time, end_time):
def __valid_stream(stream):
return "firstEventTimestamp" in stream and not (
stream['firstEventTimestamp'] <= start_time and stream["lastEventTimestamp"] <= start_time
or stream['firstEventTimestamp'] >= end_time and stream["lastEventTimestamp"] >= end_time
)
return __valid_stream
def __find_streams(project_id, log_group, client, token, stream_filter):
d_args = {"logGroupName": log_group, "orderBy": 'LastEventTime', 'limit': 50}
if token is not None and len(token) > 0:

View file

@ -60,20 +60,6 @@ def get(project_id, integration):
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get_all_by_type(integration):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""\
SELECT integrations.*
FROM public.integrations INNER JOIN public.projects USING(project_id)
WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""",
{"provider": integration})
)
r = cur.fetchall()
return helper.list_to_camel_case(r, flatten=True)
def edit(project_id, integration, changes):
if "projectId" in changes:
changes.pop("project_id")

View file

@ -187,33 +187,6 @@ def search(tenant_id, project_id, key, value):
return {"data": [k[key] for k in value]}
def get_available_keys(project_id):
all_metas = get(project_id=project_id)
return [k["key"] for k in all_metas]
def get_by_session_id(project_id, session_id):
all_metas = get(project_id=project_id)
if len(all_metas) == 0:
return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s
AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
cur.execute(query=query)
session_metas = cur.fetchall()
results = []
for m in session_metas:
r = {}
for k in m.keys():
r[keys[k]] = m[k]
results.append(r)
return results
def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0:
return {}

View file

@ -3,7 +3,6 @@ import math
import schemas
from chalicelib.core import metadata
from chalicelib.utils import args_transformer
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
@ -12,25 +11,6 @@ from chalicelib.utils.metrics_helper import __get_step_size
logger = logging.getLogger(__name__)
# Written by David Aznaurov, inspired by numpy.quantile
def __quantiles(a, q, interpolation='higher'):
arr = a.copy()
arr = sorted(arr)
if isinstance(q, list):
ind = [qi * (len(arr) - 1) for qi in q]
else:
ind = q * (len(arr) - 1)
if interpolation == 'higher':
if isinstance(q, list):
ind = [math.ceil(i) for i in ind]
else:
ind = math.ceil(ind)
if isinstance(q, list):
return [arr[i] for i in ind]
else:
return arr[ind]
def __get_constraints(project_id, time_constraint=True, chart=False, duration=True, project=True,
project_identifier="project_id",
main_table="sessions", time_column="start_ts", data={}):
@ -174,300 +154,6 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return results
def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors",
time_constraint=False)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False,
chart=True, data=args, main_table="errors", time_column="timestamp",
project=False, duration=False)
pg_sub_query_subset.append("m_errors.source = 'js_exception'")
pg_sub_query_subset.append("errors.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("errors.timestamp<%(endTimestamp)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH errors AS (SELECT DISTINCT session_id, timestamp
FROM events.errors
INNER JOIN public.errors AS m_errors USING (error_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(sessions), 0) AS count
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT session_id
FROM errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
"count": 0 if len(rows) == 0 else \
__count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_sub_query_subset),
"impactedSessions": sum([r["count"] for r in rows]),
"chart": rows
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
count = __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_sub_query_subset, **args)
results["progress"] = helper.__progress(old_val=count, new_val=results["count"])
return results
def __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_sub_query, **args):
pg_query = f"""WITH errors AS (SELECT DISTINCT error_id
FROM events.errors
INNER JOIN public.errors AS m_errors USING (error_id)
WHERE {" AND ".join(pg_sub_query)})
SELECT COALESCE(COUNT(*), 0) AS count
FROM errors;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
return cur.fetchone()["count"]
def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=False,
chart=False, data=args, main_table="m_errors", duration=False)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="errors_subsest", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("errors.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("errors.timestamp < %(endTimestamp)s")
pg_sub_query_chart.append("errors_subsest.error_id = top_errors.error_id")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH errors_subsest AS (SELECT session_id, error_id, timestamp
FROM events.errors
INNER JOIN public.errors AS m_errors USING (error_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT *
FROM (SELECT error_id, COUNT(sub_errors) AS count, count(DISTINCT session_id) AS sessions_count
FROM (SELECT error_id, session_id
FROM events.errors
INNER JOIN public.errors AS m_errors USING (error_id)
WHERE {" AND ".join(pg_sub_query_subset)}) AS sub_errors
GROUP BY error_id
ORDER BY sessions_count DESC, count DESC
LIMIT 10) AS top_errors
INNER JOIN LATERAL (SELECT message AS error
FROM public.errors
WHERE project_id = %(project_id)s
AND errors.error_id = top_errors.error_id) AS errors_details ON(TRUE)
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence_at,
MIN(timestamp) AS first_occurrence_at
FROM events.errors
WHERE error_id = top_errors.error_id
GROUP BY error_id) AS errors_time ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart) AS chart
FROM (SELECT generated_timestamp AS timestamp, COALESCE(COUNT(sessions), 0) AS count
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT session_id
FROM errors_subsest
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart) AS chart ON (TRUE);"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
for i in range(len(rows)):
rows[i] = helper.dict_to_camel_case(rows[i])
rows[i]["sessions"] = rows[i].pop("sessionsCount")
rows[i]["error_id"] = rows[i]["errorId"]
rows[i]["startTimestamp"] = startTimestamp
rows[i]["endTimestamp"] = endTimestamp
return rows
def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
rows = __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
results = helper.dict_to_camel_case(rows[0])
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
rows = __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
for key in previous.keys():
results[key + "Progress"] = helper.__progress(old_val=previous[key], new_val=results[key])
return results
def __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
pg_sub_query.append("(pages.dom_content_loaded_time > 0 OR pages.first_contentful_paint_time > 0)")
pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_time, 0)), 0) AS avg_dom_content_load_start,
COALESCE(AVG(NULLIF(pages.first_contentful_paint_time, 0)), 0) AS avg_first_contentful_pixel
FROM (SELECT pages.dom_content_loaded_time, pages.first_contentful_paint_time
FROM events.pages
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
) AS pages;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return rows
def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
row = __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
row = __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
for key in previous:
results[key + "Progress"] = helper.__progress(old_val=previous[key], new_val=results[key])
return results
def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
pg_query = f"""SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return row
RESOURCS_TYPE_TO_DB_TYPE = {
"img": "IMG",
"fetch": "REQUEST",
"stylesheet": "STYLESHEET",
"script": "SCRIPT",
"other": "OTHER",
"media": "MEDIA"
}
def __get_resource_type_from_db_type(db_type):
db_type = db_type.lower()
return RESOURCS_TYPE_TO_DB_TYPE.get(db_type, db_type)
def __get_resource_db_type_from_type(resource_type):
resource_type = resource_type.upper()
return {v: k for k, v in RESOURCS_TYPE_TO_DB_TYPE.items()}.get(resource_type, resource_type)
KEYS = {
'startTimestamp': args_transformer.int_arg,
'endTimestamp': args_transformer.int_arg,
'density': args_transformer.int_arg,
'performanceDensity': args_transformer.int_arg,
'platform': args_transformer.string
}
def dashboard_args(params):
args = {}
if params is not None:
for key in params.keys():
if key in KEYS.keys():
args[key] = KEYS[key](params.get(key))
return args
def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT user_country, COUNT(session_id) AS count
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_country
ORDER BY user_country;"""
cur.execute(cur.mogrify(pg_query,
{"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
rows = cur.fetchall()
return {"count": sum(i["count"] for i in rows), "chart": helper.list_to_camel_case(rows)}
def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH pages AS (SELECT pages.response_time,
pages.first_paint_time,
pages.dom_content_loaded_time,
pages.ttfb,
pages.time_to_interactive
FROM events.pages
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND pages.timestamp >= %(startTimestamp)s
AND pages.timestamp < %(endTimestamp)s
AND (pages.response_time > 0
OR pages.first_paint_time > 0
OR pages.dom_content_loaded_time > 0
OR pages.ttfb > 0
OR pages.time_to_interactive > 0
))
SELECT (SELECT COALESCE(AVG(pages.response_time), 0)
FROM pages
WHERE pages.response_time > 0) AS avg_response_time,
(SELECT COALESCE(AVG(pages.first_paint_time), 0)
FROM pages
WHERE pages.first_paint_time > 0) AS avg_first_paint,
(SELECT COALESCE(AVG(pages.dom_content_loaded_time), 0)
FROM pages
WHERE pages.dom_content_loaded_time > 0) AS avg_dom_content_loaded,
(SELECT COALESCE(AVG(pages.ttfb), 0)
FROM pages
WHERE pages.ttfb > 0) AS avg_till_first_bit,
(SELECT COALESCE(AVG(pages.time_to_interactive), 0)
FROM pages
WHERE pages.time_to_interactive > 0) AS avg_time_to_interactive,
(SELECT COUNT(pages.session_id)
FROM events.pages
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}) AS count_requests;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def __get_neutral(rows, add_All_if_empty=True):
neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}
if add_All_if_empty and len(neutral.keys()) <= 1:
@ -481,56 +167,6 @@ def __merge_rows_with_neutral(rows, neutral):
return rows
def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="requests", time_column="timestamp", project=False,
duration=False)
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("requests.timestamp<%(endTimestamp)s")
pg_sub_query_subset.append("requests.status/100 = %(status_code)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH requests AS(SELECT requests.host, timestamp
FROM events_common.requests INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(JSONB_AGG(requests) FILTER ( WHERE requests IS NOT NULL ), '[]'::JSONB) AS keys
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT requests.host, COUNT(*) AS count
FROM requests
WHERE {" AND ".join(pg_sub_query_chart)}
GROUP BY host
ORDER BY count DESC
LIMIT 5
) AS requests ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 4, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows, key="host")
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
result = {"4xx": rows}
params["status_code"] = 5
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows, key="host")
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
result["5xx"] = rows
return result
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@ -656,20 +292,6 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now
return helper.list_to_camel_case(rows)
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
return __get_calls_errors_4xx_or_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
return __get_calls_errors_4xx_or_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@ -869,41 +491,6 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return rows
def get_performance_avg_page_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
location_constraints = []
location_constraints_vals = {}
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="pages", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s")
pg_query = f"""WITH pages AS(SELECT pages.load_time, timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.load_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT pages.load_time
FROM pages
WHERE {" AND ".join(pg_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **location_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:

View file

@ -1,22 +0,0 @@
import requests
from decouple import config
from chalicelib.core import projects
def start_replay(project_id, session_id, device, os_version, mob_url):
r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={
"projectId": project_id,
"projectKey": projects.get_project_key(project_id),
"session_id": session_id,
"device": device,
"osVersion": os_version,
"mobUrl": mob_url
})
if r.status_code != 200:
print("failed replay middleware")
print("status code: %s" % r.status_code)
print(r.text)
return r.text
result = r.json()
result["url"] = config("IOS_MIDDLEWARE")
return result

View file

@ -1,10 +0,0 @@
def int_arg(x):
return int(x) if x is not None else int(0)
def float_arg(x):
return float(x) if x is not None else float(0)
def string(x):
return x

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.32.3
boto3==1.35.51
boto3==1.35.54
pyjwt==2.9.0
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.3

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.32.3
boto3==1.35.51
boto3==1.35.54
pyjwt==2.9.0
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.3

View file

@ -138,22 +138,6 @@ def get_card_sessions(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if metric_id.isnumeric():
metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CardSessionsSchema = Body(...),
@ -165,17 +149,6 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
def get_card_errors_list(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -345,25 +345,6 @@ class MetadataSchema(BaseModel):
_transform_key = field_validator('key', mode='before')(remove_whitespace)
class EmailPayloadSchema(BaseModel):
auth: str = Field(...)
email: EmailStr = Field(...)
link: str = Field(...)
message: str = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
email: EmailStr = Field(...)
invitation_link: str = Field(...)
client_id: str = Field(...)
sender_name: str = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
class _AlertMessageSchema(BaseModel):
type: str = Field(...)
value: str = Field(...)
@ -1373,6 +1354,7 @@ class LiveFilterType(str, Enum):
USER_BROWSER = FilterType.USER_BROWSER.value
USER_DEVICE = FilterType.USER_DEVICE.value
USER_COUNTRY = FilterType.USER_COUNTRY.value
USER_STATE = FilterType.USER_STATE.value
USER_ID = FilterType.USER_ID.value
USER_ANONYMOUS_ID = FilterType.USER_ANONYMOUS_ID.value
REV_ID = FilterType.REV_ID.value

View file

@ -6,10 +6,10 @@ name = "pypi"
[packages]
urllib3 = "==1.26.16"
requests = "==2.32.3"
boto3 = "==1.35.51"
boto3 = "==1.35.54"
pyjwt = "==2.9.0"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
elasticsearch = "==8.15.1"
jira = "==3.8.0"
cachetools = "==5.5.0"

View file

@ -0,0 +1,6 @@
from decouple import config
def get_ice_servers():
return config("iceServers") if config("iceServers", default=None) is not None \
and len(config("iceServers")) > 0 else None

View file

@ -9,7 +9,7 @@ from chalicelib.core import funnels, issues, heatmaps, sessions_insights, sessio
product_analytics, custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient, extra
from chalicelib.utils.storage import extra
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
logging.info(">>> Using experimental error search")
@ -198,23 +198,6 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
# def __merge_metric_with_data(metric: schemas.CardSchema,
# data: schemas.CardSessionsSchema) -> schemas.CardSchema:
# metric.startTimestamp = data.startTimestamp
# metric.endTimestamp = data.endTimestamp
# metric.page = data.page
# metric.limit = data.limit
# metric.density = data.density
# if data.series is not None and len(data.series) > 0:
# metric.series = data.series
#
# # if len(data.filters) > 0:
# # for s in metric.series:
# # s.filter.filters += data.filters
# # metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
# return metric
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
@ -232,38 +215,6 @@ def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSe
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if raw_metric is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:
return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if raw_metric is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:
return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if len(data.series) == 0:
@ -279,15 +230,6 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
if len(data.series) == 0:
return []
data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endTimestamp = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
return data
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.filters) > 0 or len(data.series) > 0:
filters = [f.model_dump(by_alias=True) for f in data.filters] \
@ -329,7 +271,6 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.FUNNEL: __get_funnel_issues,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
}
@ -377,9 +318,8 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.model_dump())
params["card_info"] = None
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data,
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))

View file

@ -302,118 +302,6 @@ def get_details(project_id, error_id, user_id, **data):
return {"data": helper.dict_to_camel_case(row)}
def get_details_chart(project_id, error_id, user_id, **data):
pg_sub_query = __get_basic_constraints()
pg_sub_query.append("error_id = %(error_id)s")
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
pg_sub_query_chart.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
else:
data["startDate"] = int(data["startDate"])
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
else:
data["endDate"] = int(data["endDate"])
density = int(data.get("density", 7))
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"project_id": project_id,
"userId": user_id,
"step_size": step_size,
"error_id": error_id}
main_pg_query = f"""\
SELECT %(error_id)s AS error_id,
browsers_partition,
os_partition,
device_partition,
country_partition,
chart
FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
FROM (SELECT *
FROM (SELECT user_browser AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_browser
ORDER BY count DESC) AS count_per_browser_query
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
FROM (SELECT user_browser_version AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_browser = count_per_browser_query.name
GROUP BY user_browser_version
ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
ON (TRUE)) AS browser_details) AS browser_details
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
FROM (SELECT *
FROM (SELECT user_os AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_os
ORDER BY count DESC) AS count_per_os_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_os = count_per_os_details.name
GROUP BY user_os_version
ORDER BY count DESC) AS count_per_version_query
) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
FROM (SELECT *
FROM (SELECT user_device_type AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_device_type
ORDER BY count DESC) AS count_per_device_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
FROM (SELECT CASE
WHEN user_device = '' OR user_device ISNULL
THEN 'unknown'
ELSE user_device END AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_device_type = count_per_device_details.name
GROUP BY user_device_type, user_device
ORDER BY count DESC) AS count_per_device_details
) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
FROM (SELECT user_country AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_country
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)}
) AS chart_details ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone()
if row is None:
return {"errors": ["error not found"]}
row["tags"] = __process_tags(row)
return {"data": helper.dict_to_camel_case(row)}
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):

View file

@ -68,10 +68,6 @@ def __check_database_pg(*_):
}
def __not_supported(*_):
return {"errors": ["not supported"]}
def __always_healthy(*_):
return {
"health": True,

View file

@ -1,9 +1,7 @@
import logging
import math
from math import isnan
import schemas
from chalicelib.utils import args_transformer
from chalicelib.utils import ch_client
from chalicelib.utils import exp_ch_helper
from chalicelib.utils import helper
@ -208,288 +206,6 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return results
def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="errors", data=args)
ch_sub_query.append("errors.event_type = 'ERROR'")
ch_sub_query.append("errors.source = 'js_exception'")
ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args)
ch_sub_query_chart.append("errors.event_type = 'ERROR'")
ch_sub_query_chart.append("errors.source = 'js_exception'")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""\
SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT errors.session_id) AS count
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;\
"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = {
"count": 0 if len(rows) == 0 else __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp,
ch_sub_query),
"impactedSessions": sum([r["count"] for r in rows]),
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp,
density=density,
neutral={"count": 0})
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
count = __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp, ch_sub_query,
meta=len(meta_condition) > 0, **args)
results["progress"] = helper.__progress(old_val=count, new_val=results["count"])
return results
def __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp, ch_sub_query, meta=False, **args):
ch_query = f"""\
SELECT
COUNT(DISTINCT errors.message) AS count
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
WHERE {" AND ".join(ch_sub_query)};"""
count = ch.execute(query=ch_query,
params={"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
if count is not None and len(count) > 0:
return count[0]["count"]
return 0
def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="errors", data=args)
ch_sub_query.append("errors.event_type='ERROR'")
ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args)
ch_sub_query_chart.append("errors.event_type='ERROR'")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT *
FROM (SELECT errors.error_id AS error_id,
errors.message AS error,
COUNT(1) AS count,
COUNT(DISTINCT errors.session_id) AS sessions
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY errors.error_id, errors.message) AS errors_chart
INNER JOIN (SELECT error_id AS error_id,
toUnixTimestamp(MAX(datetime))*1000 AS lastOccurrenceAt,
toUnixTimestamp(MIN(datetime))*1000 AS firstOccurrenceAt
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
WHERE event_type='ERROR' AND project_id=%(project_id)s
GROUP BY error_id) AS errors_time USING(error_id)
ORDER BY sessions DESC, count DESC LIMIT 10;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
# print(f"got {len(rows)} rows")
if len(rows) == 0:
return []
error_ids = [r["error_id"] for r in rows]
ch_sub_query.append("error_id = %(error_id)s")
errors = {}
for error_id in error_ids:
ch_query = f"""\
SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(1) AS count
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
params["error_id"] = error_id
errors[error_id] = ch.execute(query=ch_query, params=params)
for row in rows:
row["startTimestamp"] = startTimestamp
row["endTimestamp"] = endTimestamp
row["chart"] = __complete_missing_steps(rows=errors[row["error_id"]], start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"count": 0})
return rows
def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with ch_client.ClickHouseClient() as ch:
rows = __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
results = helper.dict_to_camel_case(rows[0])
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
rows = __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
for key in previous.keys():
results[key + "Progress"] = helper.__progress(old_val=previous[key], new_val=results[key])
return results
def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query.append("pages.event_type='LOCATION'")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("(pages.dom_content_loaded_event_end>0 OR pages.first_contentful_paint_time>0)")
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""SELECT COALESCE(avgOrNull(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
COALESCE(avgOrNull(NULLIF(pages.first_contentful_paint_time,0)),0) AS avg_first_contentful_pixel
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return rows
def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
results = {}
with ch_client.ClickHouseClient() as ch:
rows = __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
results = helper.dict_to_camel_case(rows[0])
for key in results:
if isnan(results[key]):
results[key] = 0
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
rows = __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
for key in previous:
results[key + "Progress"] = helper.__progress(old_val=previous[key], new_val=results[key])
return results
def __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(avgOrNull(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return rows
RESOURCS_TYPE_TO_DB_TYPE = {
"img": "IMG",
"fetch": "REQUEST",
"stylesheet": "STYLESHEET",
"script": "SCRIPT",
"other": "OTHER",
"media": "MEDIA"
}
def __get_resource_type_from_db_type(db_type):
db_type = db_type.lower()
return RESOURCS_TYPE_TO_DB_TYPE.get(db_type, db_type)
def __get_resource_db_type_from_type(resource_type):
resource_type = resource_type.upper()
return {v: k for k, v in RESOURCS_TYPE_TO_DB_TYPE.items()}.get(resource_type, resource_type)
KEYS = {
'startTimestamp': args_transformer.int_arg,
'endTimestamp': args_transformer.int_arg,
'density': args_transformer.int_arg,
'performanceDensity': args_transformer.int_arg,
'platform': args_transformer.string
}
def dashboard_args(params):
args = {}
if params is not None:
for key in params.keys():
if key in KEYS.keys():
args[key] = KEYS[key](params.get(key))
return args
def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT user_country, COUNT(1) AS count
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_country
ORDER BY user_country;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return {"count": sum(i["count"] for i in rows), "chart": helper.list_to_camel_case(rows)}
def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query.append("pages.event_type='LOCATION'")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(avgOrNull(if(pages.response_time>0,pages.response_time,null)),0) AS avg_response_time,
COALESCE(avgOrNull(if(pages.first_paint>0,pages.first_paint,null)),0) AS avg_first_paint,
COALESCE(avgOrNull(if(pages.dom_content_loaded_event_time>0,pages.dom_content_loaded_event_time,null)),0) AS avg_dom_content_loaded,
COALESCE(avgOrNull(if(pages.ttfb>0,pages.ttfb,null)),0) AS avg_till_first_bit,
COALESCE(avgOrNull(if(pages.time_to_interactive>0,pages.time_to_interactive,null)),0) AS avg_time_to_interactive,
(SELECT COUNT(1) FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages WHERE {" AND ".join(ch_sub_query)}) AS count_requests
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
WHERE {" AND ".join(ch_sub_query)}
AND (isNotNull(pages.response_time) AND pages.response_time>0 OR
isNotNull(pages.first_paint) AND pages.first_paint>0 OR
isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0 OR
isNotNull(pages.ttfb) AND pages.ttfb>0 OR
isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0);"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.dict_to_camel_case(rows[0])
def __get_domains_errors_neutral(rows):
neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}
if len(neutral.keys()) == 0:
@ -503,51 +219,6 @@ def __merge_rows_with_neutral(rows, neutral):
return rows
def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
ch_sub_query.append("requests.event_type='REQUEST'")
ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT timestamp,
groupArray([domain, toString(count)]) AS keys
FROM (SELECT toUnixTimestamp(toStartOfInterval(requests.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
requests.url_host AS domain, COUNT(1) AS count
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp,requests.url_host
ORDER BY timestamp, count DESC
LIMIT 5 BY timestamp) AS domain_stats
GROUP BY timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 4, **__get_constraint_values(args)}
# print(ch.format(query=ch_query, params=params))
rows = ch.execute(query=ch_query, params=params)
rows = __nested_array_to_dict_array(rows)
neutral = __get_domains_errors_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
result = {"4xx": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral=neutral)}
params["status_code"] = 5
rows = ch.execute(query=ch_query, params=params)
rows = __nested_array_to_dict_array(rows)
neutral = __get_domains_errors_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
result["5xx"] = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral=neutral)
return result
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
@ -669,20 +340,6 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now
return helper.list_to_camel_case(rows)
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
return __get_calls_errors_4xx_or_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
return __get_calls_errors_4xx_or_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
@ -804,50 +461,6 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
"third_party": 0}))
def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, resources=None, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
location_constraints = []
meta_condition = __get_meta_constraint(args)
location_constraints_vals = {}
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "LOCATION":
location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s")
location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True,
data=args)
ch_sub_query_chart.append("pages.event_type='LOCATION'")
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("pages.load_event_end>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(avgOrNull(pages.load_event_end),0) AS value
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **location_constraints_vals, **__get_constraint_values(args)})
pages = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
# for s in pages:
# for k in s:
# if s[k] is None:
# s[k] = 0
return pages
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
results = {}

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.32.3
boto3==1.35.51
boto3==1.35.54
pyjwt==2.9.0
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.3

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.32.3
boto3==1.35.51
boto3==1.35.54
pyjwt==2.9.0
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.3

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.32.3
boto3==1.35.51
boto3==1.35.54
pyjwt==2.9.0
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.3