* refactor(chalice): upgraded dependencies

* refactor(chalice): upgraded dependencies
feat(chalice): support heatmaps

* feat(chalice): support predefined metric users-count

* feat(chalice): support timeseries of users-count

* refactor(sourcemap-uploader): refactored code
This commit is contained in:
Kraiem Taha Yassine 2024-06-21 14:26:18 +02:00 committed by GitHub
parent fca98c8c85
commit 7b6c02a955
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 209 additions and 73 deletions

View file

@ -56,6 +56,7 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type,
schemas.MetricOfWebVitals.count_users: metrics.get_unique_users,}
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -2913,3 +2913,52 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
row["chart"] = rows
row["unit"] = schemas.TemplatePredefinedUnits.count
return helper.dict_to_camel_case(row)
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("user_id != ''")
pg_sub_query_chart.append("user_id IS NOT NULL")
pg_sub_query_chart.append("user_id != ''")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(sessions), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT user_id
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
"value": sum([r["value"] for r in rows]),
"chart": rows
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
pg_query = f"""SELECT COUNT(DISTINCT sessions.user_id) AS count
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
count = cur.fetchone()["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results

View file

@ -189,7 +189,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
with pg_client.PostgresClient() as cur:
if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
if metric_of == schemas.MetricOfTimeseries.session_count:
# main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.session_id, s.start_ts
{query_part})
SELECT generated_timestamp AS timestamp,
COUNT(s) AS count
@ -200,6 +202,22 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
elif metric_of == schemas.MetricOfTimeseries.user_count:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.user_id, s.start_ts
{query_part}
AND s.user_id IS NOT NULL
AND s.user_id != '')
SELECT generated_timestamp AS timestamp,
COUNT(s) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT user_id AS s
FROM full_sessions
WHERE start_ts >= generated_timestamp
AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
else:
raise Exception(f"Unsupported metricOf:{metric_of}")
else:
main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count
{query_part};""", full_args)
@ -726,7 +744,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
event.value,
value_key=e_k))
elif event_type == events.EventType.TAG.ui_type:
@ -750,7 +769,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
event.value,
value_key=e_k))

View file

@ -994,6 +994,7 @@ class MetricOfWebVitals(str, Enum):
avg_visited_pages = "avgVisitedPages"
count_requests = "countRequests"
count_sessions = "countSessions"
count_users = "countUsers"
class MetricOfTable(str, Enum):
@ -1012,6 +1013,7 @@ class MetricOfTable(str, Enum):
class MetricOfTimeseries(str, Enum):
session_count = "sessionCount"
user_count = "userCount"
class MetricOfFunnels(str, Enum):

View file

@ -2798,3 +2798,54 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
neutral={"value": 0}))
helper.__time_value(results)
return helper.dict_to_camel_case(results)
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("isNotNull(sessions.user_id)")
ch_sub_query_chart.append("sessions.user_id!=''")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""\
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT sessions.user_id) AS value
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;\
"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = {
"value": sum([r["value"] for r in rows]),
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp,
density=density,
neutral={"value": 0})
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
ch_query = f""" SELECT COUNT(DISTINCT user_id) AS count
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
count = ch.execute(query=ch_query, params=params)
count = count[0]["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results

View file

@ -269,15 +269,30 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
with ch_client.ClickHouseClient() as cur:
if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart:
if metric_of == schemas.MetricOfTimeseries.session_count:
query = f"""SELECT toUnixTimestamp(
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
) * 1000 AS timestamp,
COUNT(processed_sessions.session_id) AS count
FROM (SELECT DISTINCT ON(s.session_id) s.session_id AS session_id,
FROM (SELECT s.session_id AS session_id,
s.datetime AS datetime
{query_part}) AS processed_sessions
GROUP BY timestamp
ORDER BY timestamp;"""
elif metric_of == schemas.MetricOfTimeseries.user_count:
query = f"""SELECT toUnixTimestamp(
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
) * 1000 AS timestamp,
COUNT(DISTINCT processed_sessions.user_id) AS count
FROM (SELECT s.user_id AS user_id,
s.datetime AS datetime
{query_part}
WHERE isNotNull(s.user_id)
AND s.user_id != '') AS processed_sessions
GROUP BY timestamp
ORDER BY timestamp;"""
else:
raise Exception(f"Unsupported metricOf:{metric_of}")
main_query = cur.format(query, full_args)
else:
main_query = cur.format(f"""SELECT count(DISTINCT s.session_id) AS count

View file

@ -77,9 +77,7 @@ console.log(command);
server,
)
)
.then((sourceFiles) => console.log('asd') ||
sourceFiles.length > 0
? console.log(
.then((sourceFiles) => sourceFiles.length > 0 ? console.log(
`Successfully uploaded ${sourceFiles.length} sourcemap file${
sourceFiles.length > 1 ? 's' : ''
} for: \n` + sourceFiles.join('\t\n'),