diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py index 360cac96c..47b4237b0 100644 --- a/api/chalicelib/core/dashboard.py +++ b/api/chalicelib/core/dashboard.py @@ -2782,11 +2782,15 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1), - endTimestamp=TimeUTC.now(), value=None, **args): + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) if value is not None: pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT COALESCE(AVG(pages.ttfb), 0) AS value FROM events.pages @@ -2795,11 +2799,25 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de AND pages.timestamp >= %(startTimestamp)s AND pages.timestamp < %(endTimestamp)s AND pages.ttfb > 0;""" - cur.execute(cur.mogrify(pg_query, {"project_id": project_id, - "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, - "value": value, **__get_constraint_values(args)})) + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.ttfb),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT ttfb + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} AND pages.ttfb > 0 + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) row["unit"] = schemas.TemplatePredefinedUnits.millisecond return helper.dict_to_camel_case(row) diff --git a/ee/api/chalicelib/core/dashboard.py b/ee/api/chalicelib/core/dashboard.py index f94cd99b9..092b7c536 100644 --- a/ee/api/chalicelib/core/dashboard.py +++ b/ee/api/chalicelib/core/dashboard.py @@ -2658,23 +2658,41 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1), - endTimestamp=TimeUTC.now(), value=None, **args): - ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) ch_sub_query += meta_condition if value is not None: ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT COALESCE(AVG(pages.ttfb),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0;""" - rows = ch.execute(query=ch_query, - params={"project_id": project_id, - "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, - "value": value, **__get_constraint_values(args)}) - return helper.dict_to_camel_case(rows[0]) + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COALESCE(AVG(NULLIF(pages.ttfb ,0)),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.ttfb) AND pages.ttfb>0 + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params=params) + results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, + neutral={"value": 0})) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(results) def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1),