From 1e2b6d11788d1649e5127610565b50a6fb2e00bf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 24 Jan 2023 16:27:22 +0100 Subject: [PATCH] feat(chalice): insights fixed no-sessions logic --- ee/api/chalicelib/core/sessions_insights.py | 48 +++++++++++++++------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py index 643a3bf1f..f2f85ed3a 100644 --- a/ee/api/chalicelib/core/sessions_insights.py +++ b/ee/api/chalicelib/core/sessions_insights.py @@ -20,14 +20,14 @@ def _table_where(table, index, value): def _sum_table_index(table, index): - print(f'index {index}') + # print(f'index {index}') s = 0 count = 0 for row in table: v = row[index] if v is None: continue - print(v) + # print(v) s += v count += 1 return s @@ -44,8 +44,8 @@ def _sort_table_index(table, index, reverse=False): def _select_rec(l, selector): - print('selector:', selector) - print('list:', l) + # print('selector:', selector) + # print('list:', l) if len(selector) == 1: return l[selector[0]] else: @@ -95,7 +95,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional conditions = ["event_type = 'REQUEST'"] query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.success) as success_rate, T2.url_host as names, + SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.success) as success_rate, T2.url_host as names, T2.url_path as source, avg(T2.duration) as avg_duration FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 LEFT JOIN (SELECT session_id, url_host, url_path, success, message, duration, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime @@ -107,12 +107,17 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional ORDER BY T1.hh DESC;""" with ch_client.ClickHouseClient() as conn: query = conn.format(query=query, params=params) + # print("--------") + # print(query) + # print("--------") res = conn.execute(query=query) + if res is None or sum([r.get("sessions") for r in res]) == 0: + return [] table_hh1, table_hh2, columns, this_period_hosts, last_period_hosts = __get_two_values(res, time_index='hh', name_index='source') test = [k[4] for k in table_hh1] - print(f'length {len(test)}, uniques {len(set(test))}') + # print(f'length {len(test)}, uniques {len(set(test))}') del res new_hosts = [x for x in this_period_hosts if x not in last_period_hosts] @@ -195,7 +200,7 @@ def query_most_errors_by_period(project_id, start_time, end_time, conditions = ["event_type = 'ERROR'"] query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, count(T2.session_id) as sessions, T2.name as names, + SELECT T1.hh, countIf(T2.session_id != 0) as sessions, T2.name as names, groupUniqArray(T2.source) as sources FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 LEFT JOIN (SELECT session_id, name, source, message, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime @@ -210,15 +215,20 @@ def query_most_errors_by_period(project_id, start_time, end_time, with ch_client.ClickHouseClient() as conn: query = conn.format(query=query, params=params) + # print("--------") + # print(query) + # print("--------") res = conn.execute(query=query) + if res is None or sum([r.get("sessions") for r in res]) == 0: + return [] table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh', name_index='names') del res - print(table_hh1) - print('\n') - print(table_hh2) - print('\n') + # print(table_hh1) + # print('\n') + # print(table_hh2) + # print('\n') new_errors = [x for x in this_period_errors if x not in last_period_errors] common_errors = [x for x in this_period_errors if x not in new_errors] @@ -283,7 +293,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, conditions = ["event_type = 'PERFORMANCE'"] query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.avg_cpu) as cpu_used, + SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.avg_cpu) as cpu_used, avg(T2.avg_used_js_heap_size) as memory_used, T2.url_host as names, groupUniqArray(T2.url_path) as sources FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 LEFT JOIN (SELECT session_id, url_host, url_path, avg_used_js_heap_size, avg_cpu, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime @@ -295,7 +305,12 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, ORDER BY T1.hh DESC;""" with ch_client.ClickHouseClient() as conn: query = conn.format(query=query, params=params) + # print("--------") + # print(query) + # print("--------") res = conn.execute(query=query) + if res is None or sum([r.get("sessions") for r in res]) == 0: + return [] table_hh1, table_hh2, columns, this_period_resources, last_period_resources = __get_two_values(res, time_index='hh', name_index='names') @@ -308,7 +323,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, mem_oldvalue = _mean_table_index(table_hh2, memory_idx) cpu_newvalue = _mean_table_index(table_hh2, cpu_idx) cpu_oldvalue = _mean_table_index(table_hh2, cpu_idx) - # TODO: what if _tmp=0 ? + mem_oldvalue = 1 if mem_oldvalue == 0 else mem_oldvalue cpu_oldvalue = 1 if cpu_oldvalue == 0 else cpu_oldvalue return [{'category': schemas_ee.InsightCategories.resources, @@ -338,7 +353,7 @@ def query_click_rage_by_period(project_id, start_time, end_time, conditions = ["issue_type = 'click_rage'", "event_type = 'ISSUE'"] query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, count(T2.session_id) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources + SELECT T1.hh, countIf(T2.session_id != 0) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 LEFT JOIN (SELECT session_id, url_host, url_path, toStartOfInterval(datetime, INTERVAL %(step_size)s second ) as dtime FROM experimental.events @@ -351,7 +366,12 @@ def query_click_rage_by_period(project_id, start_time, end_time, ORDER BY T1.hh DESC;""" with ch_client.ClickHouseClient() as conn: query = conn.format(query=query, params=params) + # print("--------") + # print(query) + # print("--------") res = conn.execute(query=query) + if res is None or sum([r.get("sessions") for r in res]) == 0: + return [] table_hh1, table_hh2, columns, this_period_rage, last_period_rage = __get_two_values(res, time_index='hh', name_index='sources')