feat(api): dashboard split old metrics grouped data 2/3
This commit is contained in:
parent
9f2db6f42e
commit
b3018f9f76
3 changed files with 168 additions and 24 deletions
|
|
@ -992,13 +992,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
FROM public.sessions
|
||||
INNER JOIN events.pages USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query_subset)})
|
||||
SELECT COALESCE(avg, 0) AS avg, chart
|
||||
SELECT COALESCE(avg, 0) AS value, chart
|
||||
FROM (SELECT AVG(dom_building_time) FROM pages) AS avg
|
||||
LEFT JOIN
|
||||
(SELECT jsonb_agg(chart) AS chart
|
||||
FROM (
|
||||
SELECT generated_timestamp AS timestamp,
|
||||
COALESCE(AVG(dom_building_time), 0) AS avg
|
||||
COALESCE(AVG(dom_building_time), 0) AS value
|
||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL ( SELECT pages.dom_building_time
|
||||
FROM pages
|
||||
|
|
@ -1154,7 +1154,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
|
|||
pg_sub_query_chart.append(f"url = %(value)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""SELECT generated_timestamp AS timestamp,
|
||||
COALESCE(AVG(pages.response_time),0) AS avg
|
||||
COALESCE(AVG(pages.response_time),0) AS value
|
||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT response_time
|
||||
|
|
@ -1175,7 +1175,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
|
|||
WHERE {" AND ".join(pg_sub_query)};"""
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
avg = cur.fetchone()["avg"]
|
||||
return {"avg": avg, "chart": rows}
|
||||
return {"value": avg, "chart": rows}
|
||||
|
||||
|
||||
@dev.timed
|
||||
|
|
@ -2509,3 +2509,133 @@ def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now
|
|||
rows = cur.fetchall()
|
||||
|
||||
return rows
|
||||
|
||||
|
||||
def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
if len(rows) > 0:
|
||||
results = helper.dict_to_camel_case(rows[0])
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
rows = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
if len(rows) > 0:
|
||||
previous = helper.dict_to_camel_case(rows[0])
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
return results
|
||||
|
||||
|
||||
def __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args):
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
|
||||
pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
|
||||
pg_sub_query.append("pages.dom_content_loaded_time > 0")
|
||||
pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_time, 0)), 0) AS value
|
||||
FROM (SELECT pages.dom_content_loaded_time
|
||||
FROM events.pages
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
) AS pages;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
return rows
|
||||
|
||||
|
||||
def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
if len(rows) > 0:
|
||||
results = helper.dict_to_camel_case(rows[0])
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
if len(rows) > 0:
|
||||
previous = helper.dict_to_camel_case(rows[0])
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
return results
|
||||
|
||||
|
||||
def __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args):
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
|
||||
pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
|
||||
pg_sub_query.append("pages.first_contentful_paint_time > 0")
|
||||
pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.first_contentful_paint_time, 0)), 0) AS value
|
||||
FROM (SELECT pages.first_contentful_paint_time
|
||||
FROM events.pages
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
) AS pages;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
return rows
|
||||
|
||||
|
||||
|
||||
|
||||
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
results = helper.dict_to_camel_case(row)
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
|
||||
previous = helper.dict_to_camel_case(row)
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
return results
|
||||
|
||||
|
||||
def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args):
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
|
||||
pg_query = f"""\
|
||||
SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS value
|
||||
FROM public.sessions
|
||||
WHERE {" AND ".join(pg_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return row
|
||||
|
||||
|
||||
def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
results = helper.dict_to_camel_case(row)
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
|
||||
previous = helper.dict_to_camel_case(row)
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
return results
|
||||
|
||||
|
||||
def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args):
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
|
||||
pg_query = f"""\
|
||||
SELECT COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS value
|
||||
FROM public.sessions
|
||||
WHERE {" AND ".join(pg_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return row
|
||||
|
|
|
|||
|
|
@ -326,17 +326,18 @@ def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPa
|
|||
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
|
||||
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
|
||||
return {"data": [
|
||||
*helper.explode_widget(key="count_sessions",
|
||||
data=dashboard.get_processed_sessions(project_id=projectId, **data.dict())),
|
||||
{"key": "count_sessions",
|
||||
"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
|
||||
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()),
|
||||
"chart": dashboard.get_performance(project_id=projectId, **data.dict())
|
||||
.get("chart", [])}),
|
||||
*helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())),
|
||||
*helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())),
|
||||
*helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()),
|
||||
key="avg_pages_dom_buildtime"),
|
||||
*helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()),
|
||||
key="avg_pages_response_time"),
|
||||
{"key": "avg_pages_dom_buildtime",
|
||||
"data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())},
|
||||
{"key": "avg_pages_response_time",
|
||||
"data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())
|
||||
},
|
||||
*helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())),
|
||||
*helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()),
|
||||
key="avg_time_to_render"),
|
||||
|
|
@ -345,25 +346,32 @@ def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body
|
|||
*helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **data.dict())),
|
||||
]}
|
||||
|
||||
|
||||
@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
|
||||
@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
|
||||
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
|
||||
return {"data": [
|
||||
{"key": schemas.TemplateKeys.count_sessions,
|
||||
"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_image_load_time,
|
||||
"data": dashboard.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_page_load_time,
|
||||
"data": dashboard.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_request_load_time,
|
||||
"data": dashboard.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.count_sessions,
|
||||
# "data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_image_load_time,
|
||||
# "data": dashboard.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_page_load_time,
|
||||
# "data": dashboard.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_request_load_time,
|
||||
# "data": dashboard.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_dom_content_load_start,
|
||||
# "data": dashboard.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_first_contentful_pixel,
|
||||
# "data": dashboard.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())}
|
||||
# {"key": schemas.TemplateKeys.avg_visited_pages,
|
||||
# "data": dashboard.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())},
|
||||
# {"key": schemas.TemplateKeys.avg_session_duration,
|
||||
# "data": dashboard.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())}
|
||||
# {"key": schemas.TemplateKeys.avg_pages_dom_buildtime,
|
||||
# "data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())},
|
||||
|
||||
# *helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())),
|
||||
# *helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())),
|
||||
# *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()),
|
||||
# key="avg_pages_dom_buildtime"),
|
||||
# *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()),
|
||||
# key="avg_pages_response_time"),
|
||||
*helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()),
|
||||
key="avg_pages_response_time"),
|
||||
# *helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())),
|
||||
# *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()),
|
||||
# key="avg_time_to_render"),
|
||||
|
|
|
|||
|
|
@ -908,3 +908,9 @@ class TemplateKeys(str, Enum):
|
|||
avg_request_load_time = "avg_request_load_time"
|
||||
avg_page_load_time = "avg_page_load_time"
|
||||
avg_image_load_time = "avg_image_load_time"
|
||||
avg_dom_content_load_start = "avg_dom_content_load_start"
|
||||
avg_first_contentful_pixel = "avg_first_contentful_pixel"
|
||||
avg_visited_pages = "avg_visited_pages"
|
||||
avg_session_duration = "avg_session_duration"
|
||||
avg_pages_dom_buildtime="avg_pages_dom_buildtime"
|
||||
avg_pages_response_time="avg_pages_response_time"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue