Dev (#3022)
* refactor(chalice): refactored code * refactor(chalice): removed support for domainsErrors4xx & domainsErrors5xx predefined cards because UI is not showing them anymore refactor(chalice): removed support of processed_sessions & count_requests predefined cards because UI is not showing them anymore * fix(chalice): fixed table of errors CH * fix(chalice): removed support for errorsPerDomains & errorsPerType predefined cards because UI is not showing them anymore * fix(chalice): removed support for speedLocation predefined card because UI is not showing it anymore
This commit is contained in:
parent
4819907635
commit
b09becdcb7
9 changed files with 25 additions and 805 deletions
|
|
@ -109,7 +109,7 @@ def __get_sort_key(key):
|
||||||
}.get(key, 'max_datetime')
|
}.get(key, 'max_datetime')
|
||||||
|
|
||||||
|
|
||||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, user_id):
|
||||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startTimestamp)
|
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startTimestamp)
|
||||||
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startTimestamp)
|
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startTimestamp)
|
||||||
|
|
||||||
|
|
@ -251,7 +251,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
||||||
elif filter_type == schemas.FilterType.METADATA:
|
elif filter_type == schemas.FilterType.METADATA:
|
||||||
# get metadata list only if you need it
|
# get metadata list only if you need it
|
||||||
if meta_keys is None:
|
if meta_keys is None:
|
||||||
meta_keys = metadata.get(project_id=project_id)
|
meta_keys = metadata.get(project_id=project.project_id)
|
||||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||||
if f.source in meta_keys.keys():
|
if f.source in meta_keys.keys():
|
||||||
if is_any:
|
if is_any:
|
||||||
|
|
@ -328,7 +328,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
||||||
**params,
|
**params,
|
||||||
"startDate": data.startTimestamp,
|
"startDate": data.startTimestamp,
|
||||||
"endDate": data.endTimestamp,
|
"endDate": data.endTimestamp,
|
||||||
"project_id": project_id,
|
"project_id": project.project_id,
|
||||||
"userId": user_id,
|
"userId": user_id,
|
||||||
"step_size": step_size}
|
"step_size": step_size}
|
||||||
if data.limit is not None and data.page is not None:
|
if data.limit is not None and data.page is not None:
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
import schemas
|
import schemas
|
||||||
from chalicelib.core.metrics import metrics
|
from chalicelib.core.metrics import metrics
|
||||||
|
|
@ -7,19 +6,10 @@ from chalicelib.core.metrics import metrics
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors], project_id: int, data: dict):
|
def get_metric(key: schemas.MetricOfWebVitals, project_id: int, data: dict):
|
||||||
supported = {
|
supported = {
|
||||||
schemas.MetricOfWebVitals.COUNT_SESSIONS: metrics.get_processed_sessions,
|
|
||||||
schemas.MetricOfWebVitals.AVG_VISITED_PAGES: metrics.get_user_activity_avg_visited_pages,
|
schemas.MetricOfWebVitals.AVG_VISITED_PAGES: metrics.get_user_activity_avg_visited_pages,
|
||||||
schemas.MetricOfWebVitals.COUNT_REQUESTS: metrics.get_top_metrics_count_requests,
|
schemas.MetricOfWebVitals.COUNT_USERS: metrics.get_unique_users
|
||||||
schemas.MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS: metrics.get_impacted_sessions_by_js_errors,
|
|
||||||
schemas.MetricOfErrors.DOMAINS_ERRORS_4XX: metrics.get_domains_errors_4xx,
|
|
||||||
schemas.MetricOfErrors.DOMAINS_ERRORS_5XX: metrics.get_domains_errors_5xx,
|
|
||||||
schemas.MetricOfErrors.ERRORS_PER_DOMAINS: metrics.get_errors_per_domains,
|
|
||||||
schemas.MetricOfErrors.ERRORS_PER_TYPE: metrics.get_errors_per_type,
|
|
||||||
schemas.MetricOfErrors.RESOURCES_BY_PARTY: metrics.get_resources_by_party,
|
|
||||||
schemas.MetricOfWebVitals.COUNT_USERS: metrics.get_unique_users,
|
|
||||||
schemas.MetricOfWebVitals.SPEED_LOCATION: metrics.get_speed_index_location,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return supported.get(key, lambda *args: None)(project_id=project_id, **data)
|
return supported.get(key, lambda *args: None)(project_id=project_id, **data)
|
||||||
|
|
|
||||||
|
|
@ -86,51 +86,6 @@ def __get_meta_constraint(project_id, data):
|
||||||
return constraints
|
return constraints
|
||||||
|
|
||||||
|
|
||||||
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(),
|
|
||||||
density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
|
||||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
|
|
||||||
chart=True, data=args)
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""SELECT generated_timestamp AS timestamp,
|
|
||||||
COALESCE(COUNT(sessions), 0) AS value
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL ( SELECT 1
|
|
||||||
FROM public.sessions
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS sessions ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp;"""
|
|
||||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
rows = cur.fetchall()
|
|
||||||
results = {
|
|
||||||
"value": sum([r["value"] for r in rows]),
|
|
||||||
"chart": rows
|
|
||||||
}
|
|
||||||
|
|
||||||
diff = endTimestamp - startTimestamp
|
|
||||||
endTimestamp = startTimestamp
|
|
||||||
startTimestamp = endTimestamp - diff
|
|
||||||
|
|
||||||
pg_query = f"""SELECT COUNT(sessions.session_id) AS count
|
|
||||||
FROM public.sessions
|
|
||||||
WHERE {" AND ".join(pg_sub_query)};"""
|
|
||||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
|
||||||
**__get_constraint_values(args)}
|
|
||||||
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
|
|
||||||
count = cur.fetchone()["count"]
|
|
||||||
|
|
||||||
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
|
||||||
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def __get_neutral(rows, add_All_if_empty=True):
|
def __get_neutral(rows, add_All_if_empty=True):
|
||||||
neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}
|
neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}
|
||||||
if add_All_if_empty and len(neutral.keys()) <= 1:
|
if add_All_if_empty and len(neutral.keys()) <= 1:
|
||||||
|
|
@ -144,58 +99,6 @@ def __merge_rows_with_neutral(rows, neutral):
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
|
||||||
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
|
||||||
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
|
|
||||||
data=args, main_table="requests", time_column="timestamp", project=False,
|
|
||||||
duration=False)
|
|
||||||
pg_sub_query_subset.append("requests.status_code/100 = %(status_code)s")
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""WITH requests AS (SELECT host, timestamp
|
|
||||||
FROM events_common.requests INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset)}
|
|
||||||
)
|
|
||||||
SELECT generated_timestamp AS timestamp,
|
|
||||||
COALESCE(JSONB_AGG(requests) FILTER ( WHERE requests IS NOT NULL ), '[]'::JSONB) AS keys
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL ( SELECT requests.host, COUNT(*) AS count
|
|
||||||
FROM requests
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
GROUP BY host
|
|
||||||
ORDER BY count DESC
|
|
||||||
LIMIT 5
|
|
||||||
) AS requests ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp;"""
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
"step_size": step_size,
|
|
||||||
"status_code": status, **__get_constraint_values(args)}
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
rows = cur.fetchall()
|
|
||||||
rows = __nested_array_to_dict_array(rows, key="host")
|
|
||||||
neutral = __get_neutral(rows)
|
|
||||||
rows = __merge_rows_with_neutral(rows, neutral)
|
|
||||||
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
return __get_domains_errors_4xx_and_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
|
|
||||||
endTimestamp=endTimestamp, density=density, **args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
return __get_domains_errors_4xx_and_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
|
|
||||||
endTimestamp=endTimestamp, density=density, **args)
|
|
||||||
|
|
||||||
|
|
||||||
def __nested_array_to_dict_array(rows, key="url_host", value="count"):
|
def __nested_array_to_dict_array(rows, key="url_host", value="count"):
|
||||||
for r in rows:
|
for r in rows:
|
||||||
for i in range(len(r["keys"])):
|
for i in range(len(r["keys"])):
|
||||||
|
|
@ -204,243 +107,6 @@ def __nested_array_to_dict_array(rows, key="url_host", value="count"):
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
|
||||||
def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
|
||||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query.append("requests.success = FALSE")
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
"limit_s": (page - 1) * limit,
|
|
||||||
"limit_e": page * limit,
|
|
||||||
**__get_constraint_values(args)}
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""SELECT COALESCE(SUM(errors_count),0)::INT AS count,
|
|
||||||
COUNT(raw.domain) AS total,
|
|
||||||
jsonb_agg(raw) FILTER ( WHERE rn > %(limit_s)s
|
|
||||||
AND rn <= %(limit_e)s ) AS values
|
|
||||||
FROM (SELECT requests.host AS domain,
|
|
||||||
COUNT(requests.session_id) AS errors_count,
|
|
||||||
row_number() over (ORDER BY COUNT(requests.session_id) DESC ) AS rn
|
|
||||||
FROM events_common.requests
|
|
||||||
INNER JOIN sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query)}
|
|
||||||
GROUP BY requests.host
|
|
||||||
ORDER BY errors_count DESC) AS raw;"""
|
|
||||||
pg_query = cur.mogrify(pg_query, params)
|
|
||||||
logger.debug("-----------")
|
|
||||||
logger.debug(pg_query)
|
|
||||||
logger.debug("-----------")
|
|
||||||
cur.execute(pg_query)
|
|
||||||
row = cur.fetchone()
|
|
||||||
if row:
|
|
||||||
row["values"] = row["values"] or []
|
|
||||||
for r in row["values"]:
|
|
||||||
r.pop("rn")
|
|
||||||
|
|
||||||
return helper.dict_to_camel_case(row)
|
|
||||||
|
|
||||||
|
|
||||||
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
|
|
||||||
platform=None, density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
|
||||||
|
|
||||||
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
|
|
||||||
pg_sub_query_subset.append("requests.timestamp<%(endTimestamp)s")
|
|
||||||
pg_sub_query_subset.append("requests.status_code > 200")
|
|
||||||
|
|
||||||
pg_sub_query_subset_e = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors",
|
|
||||||
time_constraint=False)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False,
|
|
||||||
chart=True, data=args, main_table="", time_column="timestamp",
|
|
||||||
project=False, duration=False)
|
|
||||||
pg_sub_query_subset_e.append("timestamp>=%(startTimestamp)s")
|
|
||||||
pg_sub_query_subset_e.append("timestamp<%(endTimestamp)s")
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""WITH requests AS (SELECT status_code AS status, timestamp
|
|
||||||
FROM events_common.requests
|
|
||||||
INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset)}
|
|
||||||
),
|
|
||||||
errors_integ AS (SELECT timestamp
|
|
||||||
FROM events.errors
|
|
||||||
INNER JOIN public.errors AS m_errors USING (error_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset_e)}
|
|
||||||
AND source != 'js_exception'
|
|
||||||
),
|
|
||||||
errors_js AS (SELECT timestamp
|
|
||||||
FROM events.errors
|
|
||||||
INNER JOIN public.errors AS m_errors USING (error_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset_e)}
|
|
||||||
AND source = 'js_exception'
|
|
||||||
)
|
|
||||||
SELECT generated_timestamp AS timestamp,
|
|
||||||
COALESCE(SUM(CASE WHEN status / 100 = 4 THEN 1 ELSE 0 END), 0) AS _4xx,
|
|
||||||
COALESCE(SUM(CASE WHEN status / 100 = 5 THEN 1 ELSE 0 END), 0) AS _5xx,
|
|
||||||
COALESCE((SELECT COUNT(*)
|
|
||||||
FROM errors_js
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
), 0) AS js,
|
|
||||||
COALESCE((SELECT COUNT(*)
|
|
||||||
FROM errors_integ
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
), 0) AS integrations
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL (SELECT status
|
|
||||||
FROM requests
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS errors_partition ON (TRUE)
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;"""
|
|
||||||
params = {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
rows = cur.fetchall()
|
|
||||||
rows = helper.list_to_camel_case(rows)
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
|
||||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
|
|
||||||
chart=True, data=args)
|
|
||||||
pg_sub_query.append("m_errors.source = 'js_exception'")
|
|
||||||
pg_sub_query.append("m_errors.project_id = %(project_id)s")
|
|
||||||
pg_sub_query.append("errors.timestamp >= %(startTimestamp)s")
|
|
||||||
pg_sub_query.append("errors.timestamp < %(endTimestamp)s")
|
|
||||||
pg_sub_query_chart.append("m_errors.source = 'js_exception'")
|
|
||||||
pg_sub_query_chart.append("m_errors.project_id = %(project_id)s")
|
|
||||||
pg_sub_query_chart.append("errors.timestamp >= generated_timestamp")
|
|
||||||
pg_sub_query_chart.append("errors.timestamp < generated_timestamp+ %(step_size)s")
|
|
||||||
|
|
||||||
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors",
|
|
||||||
time_constraint=False)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False,
|
|
||||||
chart=True, data=args, main_table="errors", time_column="timestamp",
|
|
||||||
project=False, duration=False)
|
|
||||||
pg_sub_query_subset.append("m_errors.source = 'js_exception'")
|
|
||||||
pg_sub_query_subset.append("errors.timestamp>=%(startTimestamp)s")
|
|
||||||
pg_sub_query_subset.append("errors.timestamp<%(endTimestamp)s")
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""WITH errors AS (SELECT DISTINCT ON (session_id,timestamp) session_id, timestamp
|
|
||||||
FROM events.errors
|
|
||||||
INNER JOIN public.errors AS m_errors USING (error_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset)}
|
|
||||||
)
|
|
||||||
SELECT *
|
|
||||||
FROM (SELECT COUNT(DISTINCT session_id) AS sessions_count
|
|
||||||
FROM errors) AS counts
|
|
||||||
LEFT JOIN
|
|
||||||
(SELECT jsonb_agg(chart) AS chart
|
|
||||||
FROM (SELECT generated_timestamp AS timestamp,
|
|
||||||
COALESCE(COUNT(session_id), 0) AS sessions_count
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL ( SELECT DISTINCT session_id
|
|
||||||
FROM errors
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS sessions ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp) AS chart) AS chart ON (TRUE);"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
**__get_constraint_values(args)}))
|
|
||||||
row_sessions = cur.fetchone()
|
|
||||||
pg_query = f"""WITH errors AS ( SELECT DISTINCT ON(errors.error_id,timestamp) errors.error_id,timestamp
|
|
||||||
FROM events.errors
|
|
||||||
INNER JOIN public.errors AS m_errors USING (error_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset)}
|
|
||||||
)
|
|
||||||
SELECT *
|
|
||||||
FROM (SELECT COUNT(DISTINCT errors.error_id) AS errors_count
|
|
||||||
FROM errors) AS counts
|
|
||||||
LEFT JOIN
|
|
||||||
(SELECT jsonb_agg(chart) AS chart
|
|
||||||
FROM (SELECT generated_timestamp AS timestamp,
|
|
||||||
COALESCE(COUNT(error_id), 0) AS errors_count
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL ( SELECT DISTINCT errors.error_id
|
|
||||||
FROM errors
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS errors ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp) AS chart) AS chart ON (TRUE);"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
**__get_constraint_values(args)}))
|
|
||||||
row_errors = cur.fetchone()
|
|
||||||
chart = __merge_charts(row_sessions.pop("chart"), row_errors.pop("chart"))
|
|
||||||
row_sessions = helper.dict_to_camel_case(row_sessions)
|
|
||||||
row_errors = helper.dict_to_camel_case(row_errors)
|
|
||||||
return {**row_sessions, **row_errors, "chart": chart}
|
|
||||||
|
|
||||||
|
|
||||||
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
|
||||||
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
|
|
||||||
chart=False, data=args)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
|
|
||||||
chart=True, data=args, main_table="requests", time_column="timestamp",
|
|
||||||
duration=False)
|
|
||||||
pg_sub_query_subset.append("requests.timestamp >= %(startTimestamp)s")
|
|
||||||
pg_sub_query_subset.append("requests.timestamp < %(endTimestamp)s")
|
|
||||||
# pg_sub_query_subset.append("resources.type IN ('fetch', 'script')")
|
|
||||||
pg_sub_query_subset.append("requests.success = FALSE")
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""WITH requests AS (
|
|
||||||
SELECT requests.host, timestamp
|
|
||||||
FROM events_common.requests
|
|
||||||
INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query_subset)}
|
|
||||||
)
|
|
||||||
SELECT generated_timestamp AS timestamp,
|
|
||||||
SUM(CASE WHEN first.host = sub_requests.host THEN 1 ELSE 0 END) AS first_party,
|
|
||||||
SUM(CASE WHEN first.host != sub_requests.host THEN 1 ELSE 0 END) AS third_party
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN (
|
|
||||||
SELECT requests.host,
|
|
||||||
COUNT(requests.session_id) AS count
|
|
||||||
FROM events_common.requests
|
|
||||||
INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE sessions.project_id = '1'
|
|
||||||
AND sessions.start_ts > (EXTRACT(EPOCH FROM now() - INTERVAL '31 days') * 1000)::BIGINT
|
|
||||||
AND sessions.start_ts < (EXTRACT(EPOCH FROM now()) * 1000)::BIGINT
|
|
||||||
AND requests.timestamp > (EXTRACT(EPOCH FROM now() - INTERVAL '31 days') * 1000)::BIGINT
|
|
||||||
AND requests.timestamp < (EXTRACT(EPOCH FROM now()) * 1000)::BIGINT
|
|
||||||
AND sessions.duration>0
|
|
||||||
GROUP BY requests.host
|
|
||||||
ORDER BY count DESC
|
|
||||||
LIMIT 1
|
|
||||||
) AS first ON (TRUE)
|
|
||||||
LEFT JOIN LATERAL (
|
|
||||||
SELECT requests.host
|
|
||||||
FROM requests
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS sub_requests ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp;"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
|
|
||||||
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
endTimestamp=TimeUTC.now(), **args):
|
||||||
with pg_client.PostgresClient() as cur:
|
with pg_client.PostgresClient() as cur:
|
||||||
|
|
@ -504,49 +170,6 @@ def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp,
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
|
||||||
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
|
|
||||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
|
|
||||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp}
|
|
||||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
|
|
||||||
chart=True, data=args, main_table="pages", time_column="timestamp",
|
|
||||||
duration=False)
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
pg_sub_query.append("pages.path = %(value)s")
|
|
||||||
pg_sub_query_chart.append("pages.path = %(value)s")
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""SELECT COUNT(pages.session_id) AS value
|
|
||||||
FROM events.pages INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query)};"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
"value": value, **__get_constraint_values(args)}))
|
|
||||||
row = cur.fetchone()
|
|
||||||
pg_query = f"""WITH pages AS(SELECT pages.timestamp
|
|
||||||
FROM events.pages INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query)}
|
|
||||||
)
|
|
||||||
SELECT generated_timestamp AS timestamp,
|
|
||||||
COUNT(pages.*) AS value
|
|
||||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
|
||||||
LEFT JOIN LATERAL (
|
|
||||||
SELECT 1
|
|
||||||
FROM pages
|
|
||||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
|
||||||
) AS pages ON (TRUE)
|
|
||||||
GROUP BY generated_timestamp
|
|
||||||
ORDER BY generated_timestamp;"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
|
|
||||||
rows = cur.fetchall()
|
|
||||||
row["chart"] = rows
|
|
||||||
row["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
|
||||||
return helper.dict_to_camel_case(row)
|
|
||||||
|
|
||||||
|
|
||||||
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
endTimestamp=TimeUTC.now(),
|
endTimestamp=TimeUTC.now(),
|
||||||
density=7, **args):
|
density=7, **args):
|
||||||
|
|
@ -594,31 +217,3 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
||||||
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
|
||||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
|
||||||
pg_sub_query.append("pages.speed_index IS NOT NULL")
|
|
||||||
pg_sub_query.append("pages.speed_index>0")
|
|
||||||
|
|
||||||
with pg_client.PostgresClient() as cur:
|
|
||||||
pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS value
|
|
||||||
FROM events.pages INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query)}
|
|
||||||
GROUP BY sessions.user_country
|
|
||||||
ORDER BY value, sessions.user_country;"""
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if len(rows) > 0:
|
|
||||||
pg_query = f"""SELECT AVG(pages.speed_index) AS avg
|
|
||||||
FROM events.pages INNER JOIN public.sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(pg_sub_query)};"""
|
|
||||||
cur.execute(cur.mogrify(pg_query, params))
|
|
||||||
avg = cur.fetchone()["avg"]
|
|
||||||
else:
|
|
||||||
avg = 0
|
|
||||||
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
|
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,8 @@ def __get_basic_constraints(table_name=None, time_constraint=True, round_start=F
|
||||||
return ch_sub_query + __get_generic_constraint(data=data, table_name=table_name)
|
return ch_sub_query + __get_generic_constraint(data=data, table_name=table_name)
|
||||||
|
|
||||||
|
|
||||||
def __get_basic_constraints_events(table_name=None, time_constraint=True, round_start=False, data={}, identifier="project_id"):
|
def __get_basic_constraints_events(table_name=None, time_constraint=True, round_start=False, data={},
|
||||||
|
identifier="project_id"):
|
||||||
if table_name:
|
if table_name:
|
||||||
table_name += "."
|
table_name += "."
|
||||||
else:
|
else:
|
||||||
|
|
@ -155,282 +156,6 @@ def __get_generic_constraint(data, table_name):
|
||||||
return __get_constraint(data=data, fields=SESSIONS_META_FIELDS, table_name=table_name)
|
return __get_constraint(data=data, fields=SESSIONS_META_FIELDS, table_name=table_name)
|
||||||
|
|
||||||
|
|
||||||
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(),
|
|
||||||
density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
|
|
||||||
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
ch_sub_query_chart += meta_condition
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""\
|
|
||||||
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
|
||||||
COUNT(DISTINCT sessions.session_id) AS value
|
|
||||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
|
||||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;\
|
|
||||||
"""
|
|
||||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
|
|
||||||
results = {
|
|
||||||
"value": sum([r["value"] for r in rows]),
|
|
||||||
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp,
|
|
||||||
density=density,
|
|
||||||
neutral={"value": 0})
|
|
||||||
}
|
|
||||||
|
|
||||||
diff = endTimestamp - startTimestamp
|
|
||||||
endTimestamp = startTimestamp
|
|
||||||
startTimestamp = endTimestamp - diff
|
|
||||||
|
|
||||||
ch_query = f""" SELECT COUNT(1) AS count
|
|
||||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
|
||||||
WHERE {" AND ".join(ch_sub_query)};"""
|
|
||||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
|
||||||
**__get_constraint_values(args)}
|
|
||||||
|
|
||||||
count = ch.execute(query=ch_query, parameters=params)
|
|
||||||
|
|
||||||
count = count[0]["count"]
|
|
||||||
|
|
||||||
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
|
||||||
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def __get_domains_errors_neutral(rows):
|
|
||||||
neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}
|
|
||||||
if len(neutral.keys()) == 0:
|
|
||||||
neutral = {"All": 0}
|
|
||||||
return neutral
|
|
||||||
|
|
||||||
|
|
||||||
def __merge_rows_with_neutral(rows, neutral):
|
|
||||||
for i in range(len(rows)):
|
|
||||||
rows[i] = {**neutral, **rows[i]}
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
|
|
||||||
ch_sub_query.append("requests.event_type='REQUEST'")
|
|
||||||
ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT timestamp,
|
|
||||||
groupArray([domain, toString(count)]) AS keys
|
|
||||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(requests.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
|
||||||
requests.url_host AS domain, COUNT(1) AS count
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
|
||||||
WHERE {" AND ".join(ch_sub_query)}
|
|
||||||
GROUP BY timestamp,requests.url_host
|
|
||||||
ORDER BY timestamp, count DESC
|
|
||||||
LIMIT 5 BY timestamp) AS domain_stats
|
|
||||||
GROUP BY timestamp;"""
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
"step_size": step_size,
|
|
||||||
"status_code": status, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
rows = __nested_array_to_dict_array(rows)
|
|
||||||
neutral = __get_domains_errors_neutral(rows)
|
|
||||||
rows = __merge_rows_with_neutral(rows, neutral)
|
|
||||||
|
|
||||||
return __complete_missing_steps(rows=rows, start_time=startTimestamp,
|
|
||||||
end_time=endTimestamp,
|
|
||||||
density=density, neutral=neutral)
|
|
||||||
|
|
||||||
|
|
||||||
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
return __get_domains_errors_4xx_and_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
|
|
||||||
endTimestamp=endTimestamp, density=density, **args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
|
||||||
return __get_domains_errors_4xx_and_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
|
|
||||||
endTimestamp=endTimestamp, density=density, **args)
|
|
||||||
|
|
||||||
|
|
||||||
def __nested_array_to_dict_array(rows):
|
|
||||||
for r in rows:
|
|
||||||
for i in range(len(r["keys"])):
|
|
||||||
r[r["keys"][i][0]] = int(r["keys"][i][1])
|
|
||||||
r.pop("keys")
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="requests", data=args)
|
|
||||||
ch_sub_query.append("requests.event_type = 'REQUEST'")
|
|
||||||
ch_sub_query.append("requests.success = 0")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
**__get_constraint_values(args),
|
|
||||||
"limit_s": (page - 1) * limit,
|
|
||||||
"limit": limit}
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT
|
|
||||||
requests.url_host AS domain,
|
|
||||||
COUNT(1) AS errors_count,
|
|
||||||
COUNT(1) OVER () AS total,
|
|
||||||
SUM(errors_count) OVER () AS count
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
|
||||||
WHERE {" AND ".join(ch_sub_query)}
|
|
||||||
GROUP BY requests.url_host
|
|
||||||
ORDER BY errors_count DESC
|
|
||||||
LIMIT %(limit)s OFFSET %(limit_s)s;"""
|
|
||||||
logger.debug("-----------")
|
|
||||||
logger.debug(ch.format(query=ch_query, parameters=params))
|
|
||||||
logger.debug("-----------")
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
response = {"count": 0, "total": 0, "values": []}
|
|
||||||
if len(rows) > 0:
|
|
||||||
response["count"] = rows[0]["count"]
|
|
||||||
response["total"] = rows[0]["total"]
|
|
||||||
rows = helper.list_to_camel_case(rows)
|
|
||||||
for r in rows:
|
|
||||||
r.pop("count")
|
|
||||||
r.pop("total")
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
|
|
||||||
platform=None, density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
|
||||||
ch_sub_query_chart = __get_basic_constraints(table_name="events", round_start=True,
|
|
||||||
data=args)
|
|
||||||
ch_sub_query_chart.append("(events.event_type = 'REQUEST' OR events.event_type = 'ERROR')")
|
|
||||||
ch_sub_query_chart.append("(events.status>200 OR events.event_type = 'ERROR')")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query_chart += meta_condition
|
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
|
||||||
SUM(events.event_type = 'REQUEST' AND intDiv(events.status, 100) == 4) AS _4xx,
|
|
||||||
SUM(events.event_type = 'REQUEST' AND intDiv(events.status, 100) == 5) AS _5xx,
|
|
||||||
SUM(events.event_type = 'ERROR' AND events.source == 'js_exception') AS js,
|
|
||||||
SUM(events.event_type = 'ERROR' AND events.source != 'js_exception') AS integrations
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS events
|
|
||||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;"""
|
|
||||||
params = {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
rows = helper.list_to_camel_case(rows)
|
|
||||||
|
|
||||||
return __complete_missing_steps(rows=rows, start_time=startTimestamp,
|
|
||||||
end_time=endTimestamp,
|
|
||||||
density=density,
|
|
||||||
neutral={"4xx": 0, "5xx": 0, "js": 0, "integrations": 0})
|
|
||||||
|
|
||||||
|
|
||||||
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
|
||||||
ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args)
|
|
||||||
ch_sub_query_chart.append("errors.event_type='ERROR'")
|
|
||||||
ch_sub_query_chart.append("errors.source == 'js_exception'")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query_chart += meta_condition
|
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
|
||||||
COUNT(DISTINCT errors.session_id) AS sessions_count,
|
|
||||||
COUNT(DISTINCT errors.error_id) AS errors_count
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
|
||||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;;"""
|
|
||||||
rows = ch.execute(query=ch_query,
|
|
||||||
params={"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
|
|
||||||
ch_query = f"""SELECT COUNT(DISTINCT errors.session_id) AS sessions_count,
|
|
||||||
COUNT(DISTINCT errors.error_id) AS errors_count
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
|
||||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
|
||||||
counts = ch.execute(query=ch_query,
|
|
||||||
params={"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
|
|
||||||
return {"sessionsCount": counts[0]["sessions_count"],
|
|
||||||
"errorsCount": counts[0]["errors_count"],
|
|
||||||
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
|
|
||||||
end_time=endTimestamp,
|
|
||||||
density=density,
|
|
||||||
neutral={"sessions_count": 0,
|
|
||||||
"errors_count": 0}))}
|
|
||||||
|
|
||||||
|
|
||||||
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
|
|
||||||
ch_sub_query.append("requests.event_type='REQUEST'")
|
|
||||||
ch_sub_query.append("requests.success = 0")
|
|
||||||
sch_sub_query = ["rs.project_id =toUInt16(%(project_id)s)", "rs.event_type='REQUEST'"]
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
# sch_sub_query += meta_condition
|
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sub_requests.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
|
||||||
SUM(first.url_host = sub_requests.url_host) AS first_party,
|
|
||||||
SUM(first.url_host != sub_requests.url_host) AS third_party
|
|
||||||
FROM
|
|
||||||
(
|
|
||||||
SELECT requests.datetime, requests.url_host
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
|
||||||
WHERE {" AND ".join(ch_sub_query)}
|
|
||||||
) AS sub_requests
|
|
||||||
CROSS JOIN
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
rs.url_host,
|
|
||||||
COUNT(1) AS count
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS rs
|
|
||||||
WHERE {" AND ".join(sch_sub_query)}
|
|
||||||
GROUP BY rs.url_host
|
|
||||||
ORDER BY count DESC
|
|
||||||
LIMIT 1
|
|
||||||
) AS first
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;"""
|
|
||||||
params = {"step_size": step_size,
|
|
||||||
"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
return helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
|
|
||||||
end_time=endTimestamp,
|
|
||||||
density=density,
|
|
||||||
neutral={"first_party": 0,
|
|
||||||
"third_party": 0}))
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
endTimestamp=TimeUTC.now(), **args):
|
||||||
results = {}
|
results = {}
|
||||||
|
|
@ -503,49 +228,8 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
|
||||||
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
|
|
||||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
|
|
||||||
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
|
|
||||||
ch_sub_query_chart.append("pages.event_type='LOCATION'")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query_chart += meta_condition
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
|
|
||||||
ch_sub_query.append("pages.event_type='LOCATION'")
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
ch_sub_query.append("pages.url_path = %(value)s")
|
|
||||||
ch_sub_query_chart.append("pages.url_path = %(value)s")
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT COUNT(1) AS value
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
|
||||||
WHERE {" AND ".join(ch_sub_query)};"""
|
|
||||||
params = {"step_size": step_size, "project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp,
|
|
||||||
"value": value, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
result = rows[0]
|
|
||||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
|
||||||
COUNT(1) AS value
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
|
||||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
|
||||||
GROUP BY timestamp
|
|
||||||
ORDER BY timestamp;"""
|
|
||||||
params = {**params, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
|
|
||||||
end_time=endTimestamp,
|
|
||||||
density=density, neutral={"value": 0})
|
|
||||||
result["chart"] = rows
|
|
||||||
result["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
|
||||||
return helper.dict_to_camel_case(result)
|
|
||||||
|
|
||||||
|
|
||||||
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
endTimestamp=TimeUTC.now(),
|
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||||
density=7, **args):
|
|
||||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||||
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
|
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
|
||||||
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
|
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
|
||||||
|
|
@ -592,30 +276,3 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||||
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
|
||||||
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|
||||||
endTimestamp=TimeUTC.now(), **args):
|
|
||||||
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
|
|
||||||
ch_sub_query.append("pages.event_type='LOCATION'")
|
|
||||||
ch_sub_query.append("isNotNull(pages.speed_index)")
|
|
||||||
ch_sub_query.append("pages.speed_index>0")
|
|
||||||
meta_condition = __get_meta_constraint(args)
|
|
||||||
ch_sub_query += meta_condition
|
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
|
||||||
ch_query = f"""SELECT sessions.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS value
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
|
||||||
INNER JOIN {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions USING (session_id)
|
|
||||||
WHERE {" AND ".join(ch_sub_query)}
|
|
||||||
GROUP BY sessions.user_country
|
|
||||||
ORDER BY value ,sessions.user_country;"""
|
|
||||||
params = {"project_id": project_id,
|
|
||||||
"startTimestamp": startTimestamp,
|
|
||||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
|
||||||
rows = ch.execute(query=ch_query, parameters=params)
|
|
||||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.speed_index),0) AS avg
|
|
||||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
|
||||||
WHERE {" AND ".join(ch_sub_query)};"""
|
|
||||||
avg = ch.execute(query=ch_query, parameters=params)[0]["avg"] if len(rows) > 0 else 0
|
|
||||||
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
|
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,8 @@ import logging
|
||||||
from typing import List, Union
|
from typing import List, Union
|
||||||
|
|
||||||
import schemas
|
import schemas
|
||||||
from chalicelib.core import events, metadata, projects
|
from chalicelib.core import events, metadata
|
||||||
from chalicelib.core.sessions import sessions_favorite, performance_event
|
from chalicelib.core.sessions import performance_event
|
||||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||||
from chalicelib.utils import sql_helper as sh
|
from chalicelib.utils import sql_helper as sh
|
||||||
|
|
||||||
|
|
@ -1002,7 +1002,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
||||||
return full_args, query_part
|
return full_args, query_part
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_sessions(project_id, user_id, start_date, end_date):
|
def get_user_sessions(project_id, user_id, start_date, end_date):
|
||||||
with pg_client.PostgresClient() as cur:
|
with pg_client.PostgresClient() as cur:
|
||||||
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
|
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
|
||||||
|
|
@ -1112,4 +1111,3 @@ def check_recording_status(project_id: int) -> dict:
|
||||||
"recordingStatus": row["recording_status"],
|
"recordingStatus": row["recording_status"],
|
||||||
"sessionsCount": row["sessions_count"]
|
"sessionsCount": row["sessions_count"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -880,27 +880,12 @@ class MetricType(str, Enum):
|
||||||
HEAT_MAP = "heatMap"
|
HEAT_MAP = "heatMap"
|
||||||
|
|
||||||
|
|
||||||
class MetricOfErrors(str, Enum):
|
|
||||||
DOMAINS_ERRORS_4XX = "domainsErrors4xx"
|
|
||||||
DOMAINS_ERRORS_5XX = "domainsErrors5xx"
|
|
||||||
ERRORS_PER_DOMAINS = "errorsPerDomains"
|
|
||||||
ERRORS_PER_TYPE = "errorsPerType"
|
|
||||||
IMPACTED_SESSIONS_BY_JS_ERRORS = "impactedSessionsByJsErrors"
|
|
||||||
RESOURCES_BY_PARTY = "resourcesByParty"
|
|
||||||
|
|
||||||
|
|
||||||
class MetricOfWebVitals(str, Enum):
|
class MetricOfWebVitals(str, Enum):
|
||||||
AVG_SESSION_DURATION = "avgSessionDuration"
|
|
||||||
AVG_USED_JS_HEAP_SIZE = "avgUsedJsHeapSize"
|
|
||||||
AVG_VISITED_PAGES = "avgVisitedPages"
|
AVG_VISITED_PAGES = "avgVisitedPages"
|
||||||
COUNT_REQUESTS = "countRequests"
|
|
||||||
COUNT_SESSIONS = "countSessions"
|
|
||||||
COUNT_USERS = "userCount"
|
COUNT_USERS = "userCount"
|
||||||
SPEED_LOCATION = "speedLocation"
|
|
||||||
|
|
||||||
|
|
||||||
class MetricOfTable(str, Enum):
|
class MetricOfTable(str, Enum):
|
||||||
USER_OS = FilterType.USER_OS.value
|
|
||||||
USER_BROWSER = FilterType.USER_BROWSER.value
|
USER_BROWSER = FilterType.USER_BROWSER.value
|
||||||
USER_DEVICE = FilterType.USER_DEVICE.value
|
USER_DEVICE = FilterType.USER_DEVICE.value
|
||||||
USER_COUNTRY = FilterType.USER_COUNTRY.value
|
USER_COUNTRY = FilterType.USER_COUNTRY.value
|
||||||
|
|
@ -1125,23 +1110,6 @@ class CardFunnel(__CardSchema):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
class CardErrors(__CardSchema):
|
|
||||||
metric_type: Literal[MetricType.ERRORS]
|
|
||||||
metric_of: MetricOfErrors = Field(default=MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS)
|
|
||||||
view_type: MetricOtherViewType = Field(...)
|
|
||||||
|
|
||||||
@model_validator(mode="before")
|
|
||||||
@classmethod
|
|
||||||
def __enforce_default(cls, values):
|
|
||||||
values["series"] = []
|
|
||||||
return values
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def __transform(self):
|
|
||||||
self.metric_of = MetricOfErrors(self.metric_of)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class CardWebVital(__CardSchema):
|
class CardWebVital(__CardSchema):
|
||||||
metric_type: Literal[MetricType.WEB_VITAL]
|
metric_type: Literal[MetricType.WEB_VITAL]
|
||||||
metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.AVG_VISITED_PAGES)
|
metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.AVG_VISITED_PAGES)
|
||||||
|
|
@ -1249,8 +1217,7 @@ class CardPathAnalysis(__CardSchema):
|
||||||
# Union of cards-schemas that doesn't change between FOSS and EE
|
# Union of cards-schemas that doesn't change between FOSS and EE
|
||||||
__cards_union_base = Union[
|
__cards_union_base = Union[
|
||||||
CardTimeSeries, CardTable, CardFunnel,
|
CardTimeSeries, CardTable, CardFunnel,
|
||||||
CardErrors, CardWebVital, CardHeatMap,
|
CardWebVital, CardHeatMap, CardPathAnalysis]
|
||||||
CardPathAnalysis]
|
|
||||||
CardSchema = ORUnion(__cards_union_base, discriminator='metric_type')
|
CardSchema = ORUnion(__cards_union_base, discriminator='metric_type')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,6 @@ def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.Cu
|
||||||
role=user["role"],
|
role=user["role"],
|
||||||
permissions=user["permissions"],
|
permissions=user["permissions"],
|
||||||
serviceAccount=user["serviceAccount"])
|
serviceAccount=user["serviceAccount"])
|
||||||
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>VC")
|
|
||||||
return request.state.currentContext
|
return request.state.currentContext
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,13 @@ ALTER TABLE IF EXISTS public.sessions_notes
|
||||||
ADD COLUMN updated_at timestamp DEFAULT NULL,
|
ADD COLUMN updated_at timestamp DEFAULT NULL,
|
||||||
ALTER COLUMN message DROP NOT NULL;
|
ALTER COLUMN message DROP NOT NULL;
|
||||||
|
|
||||||
|
DELETE
|
||||||
|
FROM public.metrics
|
||||||
|
WHERE metric_of IN ('domainsErrors4xx', 'domainsErrors5xx', 'countSessions',
|
||||||
|
'countRequests', 'errorsPerDomains', 'errorsPerType',
|
||||||
|
'impactedSessionsByJsErrors', 'resourcesByParty', 'userOs',
|
||||||
|
'speedLocation');
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
\elif :is_next
|
\elif :is_next
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,13 @@ ALTER TABLE IF EXISTS public.sessions_notes
|
||||||
ADD COLUMN updated_at timestamp DEFAULT NULL,
|
ADD COLUMN updated_at timestamp DEFAULT NULL,
|
||||||
ALTER COLUMN message DROP NOT NULL;
|
ALTER COLUMN message DROP NOT NULL;
|
||||||
|
|
||||||
|
DELETE
|
||||||
|
FROM public.metrics
|
||||||
|
WHERE metric_of IN ('domainsErrors4xx', 'domainsErrors5xx', 'countSessions',
|
||||||
|
'countRequests', 'errorsPerDomains', 'errorsPerType',
|
||||||
|
'impactedSessionsByJsErrors', 'resourcesByParty', 'userOs',
|
||||||
|
'speedLocation');
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
\elif :is_next
|
\elif :is_next
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue