feat(api): dashboard user single DB-connexion for charts and data of the same widget

This commit is contained in:
Taha Yassine Kraiem 2022-04-13 12:42:04 +02:00
parent 98109ee9a3
commit ae9580b345
2 changed files with 145 additions and 148 deletions

View file

@ -2265,7 +2265,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
with pg_client.PostgresClient() as cur:
row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
results = row
results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2276,7 +2276,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
return results
def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_image_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
@ -2286,34 +2286,33 @@ def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(d
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with pg_client.PostgresClient() as cur:
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="resources", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="resources", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
pg_query = f"""WITH resources AS (SELECT resources.duration, resources.timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
AND resources.type = 'img' AND resources.duration>0
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT resources.duration
FROM resources
WHERE {" AND ".join(pg_sub_query_chart)}
) AS resources ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **img_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
pg_query = f"""WITH resources AS (SELECT resources.duration, resources.timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
AND resources.type = 'img' AND resources.duration>0
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT resources.duration
FROM resources
WHERE {" AND ".join(pg_sub_query_chart)}
) AS resources ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **img_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
return rows
@ -2341,7 +2340,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
with pg_client.PostgresClient() as cur:
row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
results = row
results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2352,7 +2351,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
return results
def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_page_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
@ -2360,31 +2359,30 @@ def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(de
location_constraints_vals = {}
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with pg_client.PostgresClient() as cur:
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="pages", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s")
pg_query = f"""WITH pages AS(SELECT pages.load_time, timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.load_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT pages.load_time
FROM pages
WHERE {" AND ".join(pg_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **location_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="pages", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s")
pg_query = f"""WITH pages AS(SELECT pages.load_time, timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.load_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT pages.load_time
FROM pages
WHERE {" AND ".join(pg_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **location_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows
@ -2411,7 +2409,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
with pg_client.PostgresClient() as cur:
row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
results = row
results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2422,7 +2420,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
return results
def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_request_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
@ -2431,33 +2429,33 @@ def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with pg_client.PostgresClient() as cur:
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="resources", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
pg_query = f"""WITH resources AS(SELECT resources.duration, resources.timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
AND resources.type = 'fetch' AND resources.duration>0
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT resources.duration
FROM resources
WHERE {" AND ".join(pg_sub_query_chart)}
) AS resources ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **request_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="resources", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
pg_query = f"""WITH resources AS(SELECT resources.duration, resources.timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
AND resources.type = 'fetch' AND resources.duration>0
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT resources.duration
FROM resources
WHERE {" AND ".join(pg_sub_query_chart)}
) AS resources ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **request_constraints_vals, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows

View file

@ -2097,7 +2097,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
with ch_client.ClickHouseClient() as ch:
row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2126,7 +2126,7 @@ def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp
return result
def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, resources=None, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
@ -2143,30 +2143,30 @@ def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(de
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True,
data=args)
ch_sub_query_chart += meta_condition
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(pages.load_event_end ,0)) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True,
data=args)
ch_sub_query_chart += meta_condition
rows = ch.execute(query=ch_query,
params={**params, **location_constraints_vals, **__get_constraint_values(args)})
pages = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(pages.load_event_end ,0)) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
for s in pages:
for k in s:
if s[k] is None:
s[k] = 0
rows = ch.execute(query=ch_query,
params={**params, **location_constraints_vals, **__get_constraint_values(args)})
pages = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
for s in pages:
for k in s:
if s[k] is None:
s[k] = 0
return pages
@ -2175,7 +2175,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
with ch_client.ClickHouseClient() as ch:
row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2204,7 +2204,7 @@ def __get_application_activity_avg_image_load_time(ch, project_id, startTimestam
return result
def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, resources=None, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
@ -2223,25 +2223,24 @@ def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(d
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **img_constraints_vals, **__get_constraint_values(args)})
images = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **img_constraints_vals, **__get_constraint_values(args)})
images = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
for s in images:
for k in s:
if s[k] is None:
s[k] = 0
for s in images:
for k in s:
if s[k] is None:
s[k] = 0
return images
@ -2250,7 +2249,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
with ch_client.ClickHouseClient() as ch:
row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
results["chart"] = get_performance_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2279,7 +2278,7 @@ def __get_application_activity_avg_request_load_time(ch, project_id, startTimest
return result
def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, resources=None, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
@ -2297,26 +2296,26 @@ def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={**params, **request_constraints_vals, **__get_constraint_values(args)})
requests = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})]
for s in requests:
for k in s:
if s[k] is None:
s[k] = 0
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={**params, **request_constraints_vals, **__get_constraint_values(args)})
requests = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})]
for s in requests:
for k in s:
if s[k] is None:
s[k] = 0
return requests