From 72bee8e8943cd1a650c9a5c1968fbf1dc93daede Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 26 Apr 2022 18:10:25 +0200 Subject: [PATCH 001/221] feat(api): round time metrics --- api/chalicelib/core/metrics.py | 34 ++++++++++--------- api/chalicelib/utils/helper.py | 20 +++++++++++- api/schemas.py | 1 + ee/api/chalicelib/core/metrics.py | 54 +++++++++++++++++-------------- 4 files changed, 67 insertions(+), 42 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index fb8241440..05c5233f8 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -967,7 +967,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -1126,7 +1126,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} + result = {"value": avg, "chart": rows} + helper.__time_value(result) + return result def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1348,7 +1350,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2241,7 +2243,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2300,7 +2302,7 @@ def __get_application_activity_avg_page_load_time(cur, project_id, startTimestam cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2316,7 +2318,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2369,7 +2371,7 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes "endTimestamp": endTimestamp, **__get_constraint_values(args)})) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2385,7 +2387,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2442,7 +2444,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2512,7 +2514,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2645,7 +2647,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2731,7 +2733,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2772,7 +2774,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2816,7 +2818,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2857,7 +2859,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2899,7 +2901,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 8cfab8a3f..042b2a94b 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -1,12 +1,13 @@ +import math import random import re import string from typing import Union -import math import requests import schemas +from chalicelib.utils.TimeUTC import TimeUTC local_prefix = 'local-' from decouple import config @@ -384,3 +385,20 @@ def custom_alert_to_front(values): if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom: values["query"]["left"] = values["seriesId"] return values + + +def __time_value(row): + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + factor = 1 + if row["value"] > TimeUTC.MS_MINUTE: + row["value"] = row["value"] / TimeUTC.MS_MINUTE + row["unit"] = schemas.TemplatePredefinedUnits.minute + factor = TimeUTC.MS_MINUTE + elif row["value"] > 1 * 1000: + row["value"] = row["value"] / 1000 + row["unit"] = schemas.TemplatePredefinedUnits.second + factor = 1000 + + if "chart" in row and factor > 1: + for r in row["chart"]: + r["value"] /= factor diff --git a/api/schemas.py b/api/schemas.py index f1daef481..bb697d03f 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -966,6 +966,7 @@ class TemplatePredefinedKeys(str, Enum): class TemplatePredefinedUnits(str, Enum): millisecond = "ms" + second = "s" minute = "min" memory = "mb" frame = "f/s" diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 65889e28d..b634a7051 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -943,11 +943,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, - "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, - density=density, neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + + results = {"value": avg, + "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0})} + helper.__time_value(results) + return results def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1088,11 +1090,12 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, - "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, - density=density, neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + results = {"value": avg, + "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0})} + helper.__time_value(results) + return results def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1288,10 +1291,11 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, density=density, - neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + results = {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, density=density, + neutral={"value": 0})} + helper.__time_value(results) + return results def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -2102,7 +2106,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2179,7 +2183,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2255,7 +2259,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2334,7 +2338,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2395,7 +2399,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2523,7 +2527,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2602,7 +2606,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del end_time=endTimestamp, density=density, neutral={"value": 0}) results["chart"] = rows - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2678,7 +2682,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2720,7 +2724,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2762,7 +2766,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2804,5 +2808,5 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) From f8f70b1006042ed3d08a9fcadfe812a4c1f64894 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Apr 2022 14:07:28 +0200 Subject: [PATCH 002/221] feat(api): EE fixed No of pages count widget --- ee/api/chalicelib/core/metrics.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index b634a7051..111671a01 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -2469,13 +2469,15 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args): - ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("sessions.pages_count>0") - ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(sessions.pages_count)),0) AS value - FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)};""" + + ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value + FROM (SELECT COUNT(session_id) AS count + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)}) AS groupped_data + WHERE count>0;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} @@ -2486,19 +2488,22 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) - ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} - ch_sub_query_chart.append("sessions.pages_count>0") - ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - COALESCE(avgOrNull(sessions.pages_count),0) AS value - FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query_chart)} - GROUP BY timestamp - ORDER BY timestamp;""" + ch_query = f"""SELECT timestamp, COALESCE(avgOrNull(count), 0) AS value + FROM (SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + session_id, COUNT(pages.session_id) AS count + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp,session_id + ORDER BY timestamp) AS groupped_data + WHERE count>0 + GROUP BY timestamp + ORDER BY timestamp;""" rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, From aef702603458a6ba94492db2107409b2d25a510f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Apr 2022 14:59:05 +0200 Subject: [PATCH 003/221] feat(api): EE fixed No of pages count widget --- ee/api/chalicelib/core/metrics.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 111671a01..2d6aa7201 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -2476,7 +2476,8 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value FROM (SELECT COUNT(session_id) AS count FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)}) AS groupped_data + WHERE {" AND ".join(ch_sub_query)} + GROUP BY session_id) AS groupped_data WHERE count>0;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} From 1a73b978dc79232767bae1adf3e0c35f66362b3e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Apr 2022 15:29:45 +0200 Subject: [PATCH 004/221] feat(db): EE remove pages_count column --- ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 1 + ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql new file mode 100644 index 000000000..412f3ae2a --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -0,0 +1 @@ +ALTER TABLE sessions DROP COLUMN pages_count; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql index 22cc6b876..712cbd6d4 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql @@ -14,7 +14,6 @@ CREATE TABLE IF NOT EXISTS sessions user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), datetime DateTime, duration UInt32, - pages_count UInt16, events_count UInt16, errors_count UInt16, utm_source Nullable(String), From 35b9d6ebafabd42fe437654d0eaf4e39de1049e0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 13:40:57 +0200 Subject: [PATCH 005/221] feat(api): s3 helper detect environment feat(api): support description for dashboards --- api/chalicelib/core/dashboards.py | 7 ++++--- api/chalicelib/utils/s3.py | 13 ++++++++----- api/schemas.py | 1 + .../helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql | 12 ++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql | 12 ++++++++++++ scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + 7 files changed, 39 insertions(+), 8 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql create mode 100644 scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 7b7bfe252..bce5d3ad0 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -42,8 +42,8 @@ def get_templates(project_id, user_id): def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): with pg_client.PostgresClient() as cur: - pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned) - VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s) + pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description) + VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s) RETURNING *""" params = {"userId": user_id, "projectId": project_id, **data.dict()} if data.metrics is not None and len(data.metrics) > 0: @@ -134,7 +134,8 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo row = cur.fetchone() offset = row["count"] pg_query = f"""UPDATE dashboards - SET name = %(name)s + SET name = %(name)s, + description= %(description)s {", is_public = %(is_public)s" if data.is_public is not None else ""} {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""} WHERE dashboards.project_id = %(projectId)s diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 67e1eafd2..b6575ccb5 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -5,11 +5,14 @@ import boto3 import botocore from botocore.client import Config -client = boto3.client('s3', endpoint_url=config("S3_HOST"), - aws_access_key_id=config("S3_KEY"), - aws_secret_access_key=config("S3_SECRET"), - config=Config(signature_version='s3v4'), - region_name=config("sessions_region")) +if not config("S3_HOST", default=False): + client = boto3.client('s3') +else: + client = boto3.client('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), + config=Config(signature_version='s3v4'), + region_name=config("sessions_region")) def exists(bucket, key): diff --git a/api/schemas.py b/api/schemas.py index bb697d03f..105ead87e 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,6 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) + description: str = Field(default=None) is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql new file mode 100644 index 000000000..e94ccc4e1 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -0,0 +1,12 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.6.1-ee' +$$ LANGUAGE sql IMMUTABLE; + + +ALTER TABLE IF EXISTS dashboards + ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 461a414fc..7d6bdece7 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -838,6 +838,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, + description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, is_pinned boolean NOT NULL DEFAULT FALSE, created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql new file mode 100644 index 000000000..c61efae19 --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -0,0 +1,12 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.6.1' +$$ LANGUAGE sql IMMUTABLE; + + +ALTER TABLE IF EXISTS dashboards + ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 5a01226f1..a4b41fefe 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -992,6 +992,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, + description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, is_pinned boolean NOT NULL DEFAULT FALSE, created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), From 8fa4632ee4063186588444299d02dcf1b8ef82b5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 26 Apr 2022 16:30:48 +0200 Subject: [PATCH 006/221] feat(alerts): changed build script --- api/build_alerts.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/build_alerts.sh b/api/build_alerts.sh index f333c8dc8..1b0b96f7e 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -27,7 +27,7 @@ function make_submodule() { mkdir -p ./alerts/chalicelib/ cp -R ./chalicelib/__init__.py ./alerts/chalicelib/ mkdir -p ./alerts/chalicelib/core/ - cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,roles,assist,events_ios,sessions_mobs,errors,dashboard,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/ + cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,roles,assist,events_ios,sessions_mobs,errors,metrics,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/ mkdir -p ./alerts/chalicelib/utils/ cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/ # -- end of generated part From 4f44edeb3979c065e8cff647cc62a418c54a287e Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 27 Apr 2022 12:54:40 +0000 Subject: [PATCH 007/221] Vagrant for local contribution (#434) * chore(vagrant): initial vagrantfile * chore(vagrant): adding instructions after installation * chore(vagrant): Adding vagrant user to docker group * chore(vagrant): use local docker daemon for k3s * chore(vagrant): fix comment * chore(vagrant): adding hostname in /etc/hosts * chore(vagrant): fix doc * chore(vagrant): limiting cpu * chore(frontend): initialize dev env * chore(docker): adding dockerignore * chore(dockerfile): using cache for fasten build * chore(dockerignore): update * chore(docker): build optimizations * chore(build): all components build option * chore(build): utilities build fix * chore(scrpt): remove debug message * chore(vagrant): provision using stable branch always Signed-off-by: rjshrjndrn --- api/.dockerignore | 6 ++ api/Dockerfile | 6 +- api/build.sh | 10 ++- api/build_alerts.sh | 3 +- backend/.dockerignore | 6 ++ backend/build.sh | 6 +- frontend/build.sh | 1 + frontend/dev-init.sh | 2 + peers/.dockerignore | 6 ++ peers/build.sh | 4 +- scripts/helmcharts/build_deploy.sh | 22 +++++ scripts/helmcharts/local_deploy.sh | 105 +++++++++++++++++++++++ scripts/vagrant/Vagrantfile | 129 +++++++++++++++++++++++++++++ utilities/.dockerignore | 6 ++ utilities/build.sh | 4 +- 15 files changed, 302 insertions(+), 14 deletions(-) create mode 100644 api/.dockerignore create mode 100644 backend/.dockerignore create mode 100644 frontend/dev-init.sh create mode 100644 peers/.dockerignore create mode 100644 scripts/helmcharts/build_deploy.sh create mode 100644 scripts/helmcharts/local_deploy.sh create mode 100644 scripts/vagrant/Vagrantfile create mode 100644 utilities/.dockerignore diff --git a/api/.dockerignore b/api/.dockerignore new file mode 100644 index 000000000..b6aaccd33 --- /dev/null +++ b/api/.dockerignore @@ -0,0 +1,6 @@ +# ignore .git and .cache folders +.git +.cache +**/build.sh +**/build_*.sh +**/*deploy.sh diff --git a/api/Dockerfile b/api/Dockerfile index 0673ab2b5..f3b5e85f5 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -11,8 +11,8 @@ RUN apt update && apt install -y curl && \ curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ apt install -y nodejs && \ apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* && \ - cd sourcemap-reader && \ + rm -rf /var/lib/apt/lists/* +RUN cd sourcemap-reader && \ npm install # Add Tini @@ -23,4 +23,4 @@ ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini ENTRYPOINT ["/tini", "--"] -CMD ./entrypoint.sh \ No newline at end of file +CMD ./entrypoint.sh diff --git a/api/build.sh b/api/build.sh index cec7525f5..8c735ff3c 100644 --- a/api/build.sh +++ b/api/build.sh @@ -12,9 +12,9 @@ envarg="default-foss" check_prereq() { which docker || { echo "Docker not installed, please install docker." - exit=1 + exit 1 } - [[ exit -eq 1 ]] && exit 1 + return } function build_api(){ @@ -32,9 +32,11 @@ function build_api(){ docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest -} + } + echo "api docker build completed" } check_prereq build_api $1 -IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 \ No newline at end of file +echo buil_complete +IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 diff --git a/api/build_alerts.sh b/api/build_alerts.sh index 1b0b96f7e..2a7d88a1e 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -64,7 +64,8 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest } +echo "completed alerts build" } check_prereq -build_api $1 \ No newline at end of file +build_api $1 diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 000000000..b6aaccd33 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,6 @@ +# ignore .git and .cache folders +.git +.cache +**/build.sh +**/build_*.sh +**/*deploy.sh diff --git a/backend/build.sh b/backend/build.sh index c760c1b9b..c3c40dd33 100644 --- a/backend/build.sh +++ b/backend/build.sh @@ -13,9 +13,9 @@ ee="false" check_prereq() { which docker || { echo "Docker not installed, please install docker." - exit=1 + exit 1 } - [[ exit -eq 1 ]] && exit 1 + return } function build_api(){ @@ -30,6 +30,7 @@ function build_api(){ [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1} } + echo "build completed for http" return } for image in $(ls services); @@ -40,6 +41,7 @@ function build_api(){ } echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}" done + echo "backend build completed" } check_prereq diff --git a/frontend/build.sh b/frontend/build.sh index 7b656bc8f..652be2acf 100644 --- a/frontend/build.sh +++ b/frontend/build.sh @@ -20,6 +20,7 @@ check_prereq() { function build(){ # Run docker as the same user, else we'll run in to permission issues. docker run --rm -v /etc/passwd:/etc/passwd -u `id -u`:`id -g` -v $(pwd):/home/${USER} -w /home/${USER} --name node_build node:14-stretch-slim /bin/bash -c "npm install && npm run build:oss" + echo "frotend build completed" } check_prereq diff --git a/frontend/dev-init.sh b/frontend/dev-init.sh new file mode 100644 index 000000000..e32647ff0 --- /dev/null +++ b/frontend/dev-init.sh @@ -0,0 +1,2 @@ +#!/bin/bash +npm install --legacy-peer-deps diff --git a/peers/.dockerignore b/peers/.dockerignore new file mode 100644 index 000000000..b6aaccd33 --- /dev/null +++ b/peers/.dockerignore @@ -0,0 +1,6 @@ +# ignore .git and .cache folders +.git +.cache +**/build.sh +**/build_*.sh +**/*deploy.sh diff --git a/peers/build.sh b/peers/build.sh index c15921ea8..381189927 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -10,9 +10,8 @@ git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} check_prereq() { which docker || { echo "Docker not installed, please install docker." - exit=1 + exit 1 } - [[ exit -eq 1 ]] && exit 1 } function build_api(){ @@ -27,6 +26,7 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/peers:${git_sha1} ${DOCKER_REPO:-'local'}/peers:latest docker push ${DOCKER_REPO:-'local'}/peers:latest } + echo "peer docker build complted" } check_prereq diff --git a/scripts/helmcharts/build_deploy.sh b/scripts/helmcharts/build_deploy.sh new file mode 100644 index 000000000..7a75fad8b --- /dev/null +++ b/scripts/helmcharts/build_deploy.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -e + +# This script will build and push docker image to registry + +# Usage: IMAGE_TAG=latest DOCKER_REPO=rg.fr-par.scw.cloud/foss bash build_deploy.sh + +echo $DOCKER_REPO +[[ -z DOCKER_REPO ]] && { + echo Set DOCKER_REPO="your docker registry" + exit 1 +} || { + docker login $DOCKER_REPO + cd ../../api + PUSH_IMAGE=1 bash build.sh $@ + cd ../backend + PUSH_IMAGE=1 bash build.sh $@ + cd ../utilities + PUSH_IMAGE=1 bash build.sh $@ + cd ../peers + PUSH_IMAGE=1 bash build.sh $@ +} diff --git a/scripts/helmcharts/local_deploy.sh b/scripts/helmcharts/local_deploy.sh new file mode 100644 index 000000000..c8c82ceb4 --- /dev/null +++ b/scripts/helmcharts/local_deploy.sh @@ -0,0 +1,105 @@ +#!/bin/bash +set -e + +# This script will build and push docker image to registry + +# Usage: IMAGE_TAG=latest DOCKER_REPO=rg.fr-par.scw.cloud/foss bash build_deploy.sh + +export DOCKER_REPO="rg.fr-par.scw.cloud/foss" +export IMAGE_TAG=`grep fromVersion vars.yaml | awk '{print $NF}'|xargs` + + +apps=( + api + assets + db + ender + http + integrations + sink + storage + assist + peers + all +) +help(){ + cat <> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts + + apt-get update + apt-get install -y git curl + curl -fsSL https://get.docker.com | sh - + usermod -aG docker vagrant + + git clone https://github.com/openreplay/openreplay infra + cd infra/scripts/helmcharts + + # changing container runtime for k3s to docker + sudo -u vagrant git checkout -- init.sh + sed -i 's/INSTALL_K3S_EXEC=\\(.*\\)\\\"/INSTALL_K3S_EXEC=\\1 --docker\\\"/g' init.sh + + DOMAIN_NAME=openreplay.local bash init.sh + cp -rf /root/.kube /home/vagrant/ + cp -rf /home/vagrant/infra/scripts/helmcharts/vars.yaml /home/vagrant/openreplay-dev/openreplay/scripts/helmcharts/vars.yaml + chown -R vagrant:vagrant /home/vagrant + + cat <> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts' + + ## Linux (Paste the following command in terminal) + + sudo -- sh -c 'grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts' + + ## Windows + + Use the following instructions if you’re running Windows 10 or Windows 8: + + Press the Windows key. + Type Notepad in the search field. + In the search results, right-click Notepad and select Run as administrator. + From Notepad, open the following file: + c:\\Windows\\System32\\Drivers\\etc\\hosts + add the below line in the hosts file + $IP_ADDR openreplay.local + Select File > Save to save your changes. + + To Access Openreplay: + - Open your browser and go to "http://openreplay.local" + + EOF + SHELL +end diff --git a/utilities/.dockerignore b/utilities/.dockerignore new file mode 100644 index 000000000..b6aaccd33 --- /dev/null +++ b/utilities/.dockerignore @@ -0,0 +1,6 @@ +# ignore .git and .cache folders +.git +.cache +**/build.sh +**/build_*.sh +**/*deploy.sh diff --git a/utilities/build.sh b/utilities/build.sh index f7d003ed3..4a290768d 100644 --- a/utilities/build.sh +++ b/utilities/build.sh @@ -10,9 +10,8 @@ git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} check_prereq() { which docker || { echo "Docker not installed, please install docker." - exit=1 + exit 1 } - [[ exit -eq 1 ]] && exit 1 } function build_api(){ @@ -26,6 +25,7 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/assist:${git_sha1} ${DOCKER_REPO:-'local'}/assist:latest docker push ${DOCKER_REPO:-'local'}/assist:latest } + echo "build completed for assist" } check_prereq From bdb6a75d7c123b1f2b8361cfe28a382cdb41f1e2 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 26 Apr 2022 17:05:02 +0200 Subject: [PATCH 008/221] fix(nginx): proper x-forward-for proxying Signed-off-by: rjshrjndrn --- scripts/helmcharts/vars.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index 5cc50af13..297017254 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -55,7 +55,6 @@ ingress-nginx: &ingress-nginx default-ssl-certificate: "app/openreplay-ssl" config: enable-real-ip: true - forwarded-for-header: "proxy_protocol" # Ref: https://kubernetes.github.io/ingress-nginx/user-guide/nginx-configuration/configmap/#max-worker-connections max-worker-connections: 0 # SSL redirection From 1b93f8a4530142884c33f0ad9c42ec97f6265f46 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 25 Apr 2022 23:09:52 +0200 Subject: [PATCH 009/221] gofmt --- backend/pkg/dev/profiling/profiling.go | 25 +- backend/pkg/env/aws.go | 2 +- backend/pkg/env/vars.go | 7 +- backend/pkg/env/worker-id.go | 4 +- backend/pkg/flakeid/flakeid.go | 2 +- backend/pkg/hashid/hashid.go | 1 - backend/pkg/intervals/intervals.go | 6 +- backend/pkg/log/queue.go | 99 +- backend/pkg/messages/facade.go | 2 +- backend/pkg/messages/filters.go | 3 +- backend/pkg/messages/get-timestamp.go | 118 +- .../pkg/messages/legacy-message-transform.go | 15 +- backend/pkg/messages/messages.go | 2304 ++++++------- .../pkg/messages/performance/performance.go | 3 +- backend/pkg/messages/primitives.go | 6 +- backend/pkg/messages/read-message.go | 2846 ++++++++--------- backend/pkg/redisstream/producer.go | 13 +- backend/pkg/redisstream/redis.go | 8 +- backend/pkg/storage/s3.go | 41 +- backend/pkg/url/assets/css.go | 2 +- backend/pkg/url/method.go | 10 +- backend/services/assets/jsexception.go | 8 +- backend/services/assets/main.go | 1 + backend/services/db/heuristics/anr.go | 13 +- backend/services/db/heuristics/clickrage.go | 17 +- backend/services/db/heuristics/heuristics.go | 13 +- backend/services/db/heuristics/performance.go | 25 +- .../db/heuristics/readyMessageStore.go | 5 +- backend/services/db/heuristics/session.go | 12 +- backend/services/db/stats.go | 20 +- backend/services/ender/builder/builderMap.go | 9 +- .../ender/builder/clikRageDetector.go | 26 +- .../services/ender/builder/cpuIssueFinder.go | 29 +- .../ender/builder/deadClickDetector.go | 24 +- .../services/ender/builder/domDropDetector.go | 11 +- .../ender/builder/inputEventBuilder.go | 11 +- .../ender/builder/memoryIssueFinder.go | 24 +- .../ender/builder/pageEventBuilder.go | 16 +- .../builder/performanceTrackAggrBuilder.go | 23 +- backend/services/http/assets.go | 4 +- backend/services/http/handlers-depricated.go | 2 +- backend/services/http/ios-device.go | 256 +- backend/services/http/uuid.go | 2 +- .../integrations/clientManager/manager.go | 24 +- .../integrations/integration/cloudwatch.go | 43 +- .../integrations/integration/elasticsearch.go | 6 +- .../integrations/integration/rollbar.go | 66 +- .../integrations/integration/utils.go | 19 +- backend/services/storage/gzip.go | 19 +- 49 files changed, 3097 insertions(+), 3148 deletions(-) diff --git a/backend/pkg/dev/profiling/profiling.go b/backend/pkg/dev/profiling/profiling.go index 139aaeac6..c05c47549 100644 --- a/backend/pkg/dev/profiling/profiling.go +++ b/backend/pkg/dev/profiling/profiling.go @@ -1,24 +1,23 @@ package profiling import ( - "log" - "net/http" - "github.com/gorilla/mux" - _ "net/http/pprof" -) + "github.com/gorilla/mux" + "log" + "net/http" + _ "net/http/pprof" +) func Profile() { go func() { - router := mux.NewRouter() - router.PathPrefix("/debug/pprof/").Handler(http.DefaultServeMux) - log.Println("Starting profiler...") - if err := http.ListenAndServe(":6060", router); err != nil { - panic(err) - } + router := mux.NewRouter() + router.PathPrefix("/debug/pprof/").Handler(http.DefaultServeMux) + log.Println("Starting profiler...") + if err := http.ListenAndServe(":6060", router); err != nil { + panic(err) + } }() } - /* docker run -p 6060:6060 -e REQUIRED_ENV=http://value -e ANOTHER_ENV=anothervalue workername @@ -34,4 +33,4 @@ go tool pprof http://localhost:6060/debug/pprof/profile?seconds=30 THEN https://www.speedscope.app/ -*/ \ No newline at end of file +*/ diff --git a/backend/pkg/env/aws.go b/backend/pkg/env/aws.go index 6573c8551..cb7445797 100644 --- a/backend/pkg/env/aws.go +++ b/backend/pkg/env/aws.go @@ -19,7 +19,7 @@ func AWSSessionOnRegion(region string) *_session.Session { if AWS_ENDPOINT != "" { config.Endpoint = aws.String(AWS_ENDPOINT) config.DisableSSL = aws.Bool(true) - config.S3ForcePathStyle = aws.Bool(true) + config.S3ForcePathStyle = aws.Bool(true) } aws_session, err := _session.NewSession(config) if err != nil { diff --git a/backend/pkg/env/vars.go b/backend/pkg/env/vars.go index 33ae9da3c..eb88b3c6b 100644 --- a/backend/pkg/env/vars.go +++ b/backend/pkg/env/vars.go @@ -22,7 +22,7 @@ func Uint64(key string) uint64 { v := String(key) n, err := strconv.ParseUint(v, 10, 64) if err != nil { - log.Fatalln(key + " has a wrong value. ", err) + log.Fatalln(key+" has a wrong value. ", err) } return n } @@ -31,12 +31,13 @@ func Uint16(key string) uint16 { v := String(key) n, err := strconv.ParseUint(v, 10, 16) if err != nil { - log.Fatalln(key + " has a wrong value. ", err) + log.Fatalln(key+" has a wrong value. ", err) } return uint16(n) } const MAX_INT = uint64(^uint(0) >> 1) + func Int(key string) int { val := Uint64(key) if val > MAX_INT { @@ -54,4 +55,4 @@ func Bool(key string) bool { return true } return false -} \ No newline at end of file +} diff --git a/backend/pkg/env/worker-id.go b/backend/pkg/env/worker-id.go index 47fdffc43..22d077832 100644 --- a/backend/pkg/env/worker-id.go +++ b/backend/pkg/env/worker-id.go @@ -5,9 +5,9 @@ import ( ) func hashHostname(hostname string) uint16 { - var h uint16 ; + var h uint16 for i, b := range hostname { - h += uint16(i+1)*uint16(b) + h += uint16(i+1) * uint16(b) } return h } diff --git a/backend/pkg/flakeid/flakeid.go b/backend/pkg/flakeid/flakeid.go index 13e064896..c54b990a5 100644 --- a/backend/pkg/flakeid/flakeid.go +++ b/backend/pkg/flakeid/flakeid.go @@ -8,7 +8,7 @@ const ( TIMESTAMP_MAX = 1< m.Timestamp || prt.mints == 0 { - prt.mints = m.Timestamp - } - prt.lastts = m.Timestamp - prt.lastID = m.ID - prt.count += 1 + if prt.maxts < m.Timestamp { + prt.maxts = m.Timestamp + } + if prt.mints > m.Timestamp || prt.mints == 0 { + prt.mints = m.Timestamp + } + prt.lastts = m.Timestamp + prt.lastID = m.ID + prt.count += 1 - - select { - case <-qs.tick: - qs.LogThenReset() - default: - } + select { + case <-qs.tick: + qs.LogThenReset() + default: + } } - func (qs *queueStats) LogThenReset() { - s := "Queue Statistics: " - for i, p := range qs.prts { - s = fmt.Sprintf("%v | %v:: lastTS %v, lastID %v, count %v, maxTS %v, minTS %v", - s, i, p.lastts, p.lastID, p.count, p.maxts, p.mints) - } - log.Println(s) - // reset - qs.prts = make(map[int32]*partitionStats) + s := "Queue Statistics: " + for i, p := range qs.prts { + s = fmt.Sprintf("%v | %v:: lastTS %v, lastID %v, count %v, maxTS %v, minTS %v", + s, i, p.lastts, p.lastID, p.count, p.maxts, p.mints) + } + log.Println(s) + // reset + qs.prts = make(map[int32]*partitionStats) } - // TODO: list of message id to log (mb filter function with callback in messages/utils.go or something) func LogMessage(s string, sessionID uint64, msg messages.Message, m *types.Meta) { - log.Printf("%v | SessionID: %v, Queue info: %v, Message: %v", s, sessionID, m, msg) + log.Printf("%v | SessionID: %v, Queue info: %v, Message: %v", s, sessionID, m, msg) } - diff --git a/backend/pkg/messages/facade.go b/backend/pkg/messages/facade.go index 91d896d19..5c024f2f6 100644 --- a/backend/pkg/messages/facade.go +++ b/backend/pkg/messages/facade.go @@ -36,6 +36,6 @@ func Encode(msg Message) []byte { // } func GetMessageTypeID(b []byte) (uint64, error) { - reader := bytes.NewReader(b) + reader := bytes.NewReader(b) return ReadUint(reader) } diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index f43f40142..44b2c7959 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -1,9 +1,8 @@ // Auto-generated, do not edit package messages - func IsReplayerType(id uint64) bool { - return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id + return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id } func IsIOSType(id uint64) bool { diff --git a/backend/pkg/messages/get-timestamp.go b/backend/pkg/messages/get-timestamp.go index c8e42f756..8b44764a7 100644 --- a/backend/pkg/messages/get-timestamp.go +++ b/backend/pkg/messages/get-timestamp.go @@ -1,65 +1,63 @@ // Auto-generated, do not edit package messages - func GetTimestamp(message Message) uint64 { - switch msg := message.(type) { - - case *IOSBatchMeta: - return msg.Timestamp - - case *IOSSessionStart: - return msg.Timestamp - - case *IOSSessionEnd: - return msg.Timestamp - - case *IOSMetadata: - return msg.Timestamp - - case *IOSCustomEvent: - return msg.Timestamp - - case *IOSUserID: - return msg.Timestamp - - case *IOSUserAnonymousID: - return msg.Timestamp - - case *IOSScreenChanges: - return msg.Timestamp - - case *IOSCrash: - return msg.Timestamp - - case *IOSScreenEnter: - return msg.Timestamp - - case *IOSScreenLeave: - return msg.Timestamp - - case *IOSClickEvent: - return msg.Timestamp - - case *IOSInputEvent: - return msg.Timestamp - - case *IOSPerformanceEvent: - return msg.Timestamp - - case *IOSLog: - return msg.Timestamp - - case *IOSInternalError: - return msg.Timestamp - - case *IOSNetworkCall: - return msg.Timestamp - - case *IOSIssueEvent: - return msg.Timestamp - - } - return uint64(message.Meta().Timestamp) -} + switch msg := message.(type) { + case *IOSBatchMeta: + return msg.Timestamp + + case *IOSSessionStart: + return msg.Timestamp + + case *IOSSessionEnd: + return msg.Timestamp + + case *IOSMetadata: + return msg.Timestamp + + case *IOSCustomEvent: + return msg.Timestamp + + case *IOSUserID: + return msg.Timestamp + + case *IOSUserAnonymousID: + return msg.Timestamp + + case *IOSScreenChanges: + return msg.Timestamp + + case *IOSCrash: + return msg.Timestamp + + case *IOSScreenEnter: + return msg.Timestamp + + case *IOSScreenLeave: + return msg.Timestamp + + case *IOSClickEvent: + return msg.Timestamp + + case *IOSInputEvent: + return msg.Timestamp + + case *IOSPerformanceEvent: + return msg.Timestamp + + case *IOSLog: + return msg.Timestamp + + case *IOSInternalError: + return msg.Timestamp + + case *IOSNetworkCall: + return msg.Timestamp + + case *IOSIssueEvent: + return msg.Timestamp + + } + return uint64(message.Meta().Timestamp) +} diff --git a/backend/pkg/messages/legacy-message-transform.go b/backend/pkg/messages/legacy-message-transform.go index 637f8d443..031c4444a 100644 --- a/backend/pkg/messages/legacy-message-transform.go +++ b/backend/pkg/messages/legacy-message-transform.go @@ -1,21 +1,20 @@ package messages - func transformDepricated(msg Message) Message { switch m := msg.(type) { case *MouseClickDepricated: - meta := m.Meta() + meta := m.Meta() meta.TypeID = 33 return &MouseClick{ - meta: meta, - ID: m.ID, + meta: meta, + ID: m.ID, HesitationTime: m.HesitationTime, - Label: m.Label, + Label: m.Label, // Selector: '', } // case *FetchDepricated: // return &Fetch { - // Method: m.Method, + // Method: m.Method, // URL: m.URL, // Request: m.Request, // Response: m.Response, @@ -25,8 +24,6 @@ func transformDepricated(msg Message) Message { // // Headers: '' // } default: - return msg + return msg } } - - diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 38a1f61ba..e9aec5788 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -2,1693 +2,1693 @@ package messages type Message interface { - Encode() []byte - Meta() *meta + Encode() []byte + Meta() *meta } type meta struct { - Timestamp int64 - Index uint64 - TypeID uint64 + Timestamp int64 + Index uint64 + TypeID uint64 } // Might also implement Encode() here (?) func (m *meta) Meta() *meta { - return m + return m } type BatchMeta struct { - *meta - PageNo uint64 - FirstIndex uint64 - Timestamp int64 + *meta + PageNo uint64 + FirstIndex uint64 + Timestamp int64 } func (msg *BatchMeta) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 80 - p := 1 - p = WriteUint(msg.PageNo, buf, p) - p = WriteUint(msg.FirstIndex, buf, p) - p = WriteInt(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 80 + p := 1 + p = WriteUint(msg.PageNo, buf, p) + p = WriteUint(msg.FirstIndex, buf, p) + p = WriteInt(msg.Timestamp, buf, p) + return buf[:p] } type Timestamp struct { - *meta - Timestamp uint64 + *meta + Timestamp uint64 } func (msg *Timestamp) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 0 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 0 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] } type SessionStart struct { - *meta - Timestamp uint64 - ProjectID uint64 - TrackerVersion string - RevID string - UserUUID string - UserAgent string - UserOS string - UserOSVersion string - UserBrowser string - UserBrowserVersion string - UserDevice string - UserDeviceType string - UserDeviceMemorySize uint64 - UserDeviceHeapSize uint64 - UserCountry string - UserID string + *meta + Timestamp uint64 + ProjectID uint64 + TrackerVersion string + RevID string + UserUUID string + UserAgent string + UserOS string + UserOSVersion string + UserBrowser string + UserBrowserVersion string + UserDevice string + UserDeviceType string + UserDeviceMemorySize uint64 + UserDeviceHeapSize uint64 + UserCountry string + UserID string } func (msg *SessionStart) Encode() []byte { - buf := make([]byte, 161+len(msg.TrackerVersion)+len(msg.RevID)+len(msg.UserUUID)+len(msg.UserAgent)+len(msg.UserOS)+len(msg.UserOSVersion)+len(msg.UserBrowser)+len(msg.UserBrowserVersion)+len(msg.UserDevice)+len(msg.UserDeviceType)+len(msg.UserCountry)+len(msg.UserID)) - buf[0] = 1 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.ProjectID, buf, p) - p = WriteString(msg.TrackerVersion, buf, p) - p = WriteString(msg.RevID, buf, p) - p = WriteString(msg.UserUUID, buf, p) - p = WriteString(msg.UserAgent, buf, p) - p = WriteString(msg.UserOS, buf, p) - p = WriteString(msg.UserOSVersion, buf, p) - p = WriteString(msg.UserBrowser, buf, p) - p = WriteString(msg.UserBrowserVersion, buf, p) - p = WriteString(msg.UserDevice, buf, p) - p = WriteString(msg.UserDeviceType, buf, p) - p = WriteUint(msg.UserDeviceMemorySize, buf, p) - p = WriteUint(msg.UserDeviceHeapSize, buf, p) - p = WriteString(msg.UserCountry, buf, p) - p = WriteString(msg.UserID, buf, p) - return buf[:p] + buf := make([]byte, 161+len(msg.TrackerVersion)+len(msg.RevID)+len(msg.UserUUID)+len(msg.UserAgent)+len(msg.UserOS)+len(msg.UserOSVersion)+len(msg.UserBrowser)+len(msg.UserBrowserVersion)+len(msg.UserDevice)+len(msg.UserDeviceType)+len(msg.UserCountry)+len(msg.UserID)) + buf[0] = 1 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.ProjectID, buf, p) + p = WriteString(msg.TrackerVersion, buf, p) + p = WriteString(msg.RevID, buf, p) + p = WriteString(msg.UserUUID, buf, p) + p = WriteString(msg.UserAgent, buf, p) + p = WriteString(msg.UserOS, buf, p) + p = WriteString(msg.UserOSVersion, buf, p) + p = WriteString(msg.UserBrowser, buf, p) + p = WriteString(msg.UserBrowserVersion, buf, p) + p = WriteString(msg.UserDevice, buf, p) + p = WriteString(msg.UserDeviceType, buf, p) + p = WriteUint(msg.UserDeviceMemorySize, buf, p) + p = WriteUint(msg.UserDeviceHeapSize, buf, p) + p = WriteString(msg.UserCountry, buf, p) + p = WriteString(msg.UserID, buf, p) + return buf[:p] } type SessionDisconnect struct { - *meta - Timestamp uint64 + *meta + Timestamp uint64 } func (msg *SessionDisconnect) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 2 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 2 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] } type SessionEnd struct { - *meta - Timestamp uint64 + *meta + Timestamp uint64 } func (msg *SessionEnd) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 3 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 3 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] } type SetPageLocation struct { - *meta - URL string - Referrer string - NavigationStart uint64 + *meta + URL string + Referrer string + NavigationStart uint64 } func (msg *SetPageLocation) Encode() []byte { - buf := make([]byte, 31+len(msg.URL)+len(msg.Referrer)) - buf[0] = 4 - p := 1 - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Referrer, buf, p) - p = WriteUint(msg.NavigationStart, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.URL)+len(msg.Referrer)) + buf[0] = 4 + p := 1 + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Referrer, buf, p) + p = WriteUint(msg.NavigationStart, buf, p) + return buf[:p] } type SetViewportSize struct { - *meta - Width uint64 - Height uint64 + *meta + Width uint64 + Height uint64 } func (msg *SetViewportSize) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 5 - p := 1 - p = WriteUint(msg.Width, buf, p) - p = WriteUint(msg.Height, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 5 + p := 1 + p = WriteUint(msg.Width, buf, p) + p = WriteUint(msg.Height, buf, p) + return buf[:p] } type SetViewportScroll struct { - *meta - X int64 - Y int64 + *meta + X int64 + Y int64 } func (msg *SetViewportScroll) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 6 - p := 1 - p = WriteInt(msg.X, buf, p) - p = WriteInt(msg.Y, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 6 + p := 1 + p = WriteInt(msg.X, buf, p) + p = WriteInt(msg.Y, buf, p) + return buf[:p] } type CreateDocument struct { - *meta + *meta } func (msg *CreateDocument) Encode() []byte { - buf := make([]byte, 1) - buf[0] = 7 - p := 1 + buf := make([]byte, 1) + buf[0] = 7 + p := 1 - return buf[:p] + return buf[:p] } type CreateElementNode struct { - *meta - ID uint64 - ParentID uint64 - index uint64 - Tag string - SVG bool + *meta + ID uint64 + ParentID uint64 + index uint64 + Tag string + SVG bool } func (msg *CreateElementNode) Encode() []byte { - buf := make([]byte, 51+len(msg.Tag)) - buf[0] = 8 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.ParentID, buf, p) - p = WriteUint(msg.index, buf, p) - p = WriteString(msg.Tag, buf, p) - p = WriteBoolean(msg.SVG, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Tag)) + buf[0] = 8 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.ParentID, buf, p) + p = WriteUint(msg.index, buf, p) + p = WriteString(msg.Tag, buf, p) + p = WriteBoolean(msg.SVG, buf, p) + return buf[:p] } type CreateTextNode struct { - *meta - ID uint64 - ParentID uint64 - Index uint64 + *meta + ID uint64 + ParentID uint64 + Index uint64 } func (msg *CreateTextNode) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 9 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.ParentID, buf, p) - p = WriteUint(msg.Index, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 9 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.ParentID, buf, p) + p = WriteUint(msg.Index, buf, p) + return buf[:p] } type MoveNode struct { - *meta - ID uint64 - ParentID uint64 - Index uint64 + *meta + ID uint64 + ParentID uint64 + Index uint64 } func (msg *MoveNode) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 10 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.ParentID, buf, p) - p = WriteUint(msg.Index, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 10 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.ParentID, buf, p) + p = WriteUint(msg.Index, buf, p) + return buf[:p] } type RemoveNode struct { - *meta - ID uint64 + *meta + ID uint64 } func (msg *RemoveNode) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 11 - p := 1 - p = WriteUint(msg.ID, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 11 + p := 1 + p = WriteUint(msg.ID, buf, p) + return buf[:p] } type SetNodeAttribute struct { - *meta - ID uint64 - Name string - Value string + *meta + ID uint64 + Name string + Value string } func (msg *SetNodeAttribute) Encode() []byte { - buf := make([]byte, 31+len(msg.Name)+len(msg.Value)) - buf[0] = 12 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Name)+len(msg.Value)) + buf[0] = 12 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type RemoveNodeAttribute struct { - *meta - ID uint64 - Name string + *meta + ID uint64 + Name string } func (msg *RemoveNodeAttribute) Encode() []byte { - buf := make([]byte, 21+len(msg.Name)) - buf[0] = 13 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Name, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Name)) + buf[0] = 13 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Name, buf, p) + return buf[:p] } type SetNodeData struct { - *meta - ID uint64 - Data string + *meta + ID uint64 + Data string } func (msg *SetNodeData) Encode() []byte { - buf := make([]byte, 21+len(msg.Data)) - buf[0] = 14 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Data, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Data)) + buf[0] = 14 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Data, buf, p) + return buf[:p] } type SetCSSData struct { - *meta - ID uint64 - Data string + *meta + ID uint64 + Data string } func (msg *SetCSSData) Encode() []byte { - buf := make([]byte, 21+len(msg.Data)) - buf[0] = 15 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Data, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Data)) + buf[0] = 15 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Data, buf, p) + return buf[:p] } type SetNodeScroll struct { - *meta - ID uint64 - X int64 - Y int64 + *meta + ID uint64 + X int64 + Y int64 } func (msg *SetNodeScroll) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 16 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteInt(msg.X, buf, p) - p = WriteInt(msg.Y, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 16 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteInt(msg.X, buf, p) + p = WriteInt(msg.Y, buf, p) + return buf[:p] } type SetInputTarget struct { - *meta - ID uint64 - Label string + *meta + ID uint64 + Label string } func (msg *SetInputTarget) Encode() []byte { - buf := make([]byte, 21+len(msg.Label)) - buf[0] = 17 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Label, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Label)) + buf[0] = 17 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Label, buf, p) + return buf[:p] } type SetInputValue struct { - *meta - ID uint64 - Value string - Mask int64 + *meta + ID uint64 + Value string + Mask int64 } func (msg *SetInputValue) Encode() []byte { - buf := make([]byte, 31+len(msg.Value)) - buf[0] = 18 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Value, buf, p) - p = WriteInt(msg.Mask, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Value)) + buf[0] = 18 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Value, buf, p) + p = WriteInt(msg.Mask, buf, p) + return buf[:p] } type SetInputChecked struct { - *meta - ID uint64 - Checked bool + *meta + ID uint64 + Checked bool } func (msg *SetInputChecked) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 19 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteBoolean(msg.Checked, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 19 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteBoolean(msg.Checked, buf, p) + return buf[:p] } type MouseMove struct { - *meta - X uint64 - Y uint64 + *meta + X uint64 + Y uint64 } func (msg *MouseMove) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 20 - p := 1 - p = WriteUint(msg.X, buf, p) - p = WriteUint(msg.Y, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 20 + p := 1 + p = WriteUint(msg.X, buf, p) + p = WriteUint(msg.Y, buf, p) + return buf[:p] } type MouseClickDepricated struct { - *meta - ID uint64 - HesitationTime uint64 - Label string + *meta + ID uint64 + HesitationTime uint64 + Label string } func (msg *MouseClickDepricated) Encode() []byte { - buf := make([]byte, 31+len(msg.Label)) - buf[0] = 21 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.HesitationTime, buf, p) - p = WriteString(msg.Label, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Label)) + buf[0] = 21 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.HesitationTime, buf, p) + p = WriteString(msg.Label, buf, p) + return buf[:p] } type ConsoleLog struct { - *meta - Level string - Value string + *meta + Level string + Value string } func (msg *ConsoleLog) Encode() []byte { - buf := make([]byte, 21+len(msg.Level)+len(msg.Value)) - buf[0] = 22 - p := 1 - p = WriteString(msg.Level, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Level)+len(msg.Value)) + buf[0] = 22 + p := 1 + p = WriteString(msg.Level, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type PageLoadTiming struct { - *meta - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 - DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 + *meta + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 + DomContentLoadedEventStart uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 } func (msg *PageLoadTiming) Encode() []byte { - buf := make([]byte, 91) - buf[0] = 23 - p := 1 - p = WriteUint(msg.RequestStart, buf, p) - p = WriteUint(msg.ResponseStart, buf, p) - p = WriteUint(msg.ResponseEnd, buf, p) - p = WriteUint(msg.DomContentLoadedEventStart, buf, p) - p = WriteUint(msg.DomContentLoadedEventEnd, buf, p) - p = WriteUint(msg.LoadEventStart, buf, p) - p = WriteUint(msg.LoadEventEnd, buf, p) - p = WriteUint(msg.FirstPaint, buf, p) - p = WriteUint(msg.FirstContentfulPaint, buf, p) - return buf[:p] + buf := make([]byte, 91) + buf[0] = 23 + p := 1 + p = WriteUint(msg.RequestStart, buf, p) + p = WriteUint(msg.ResponseStart, buf, p) + p = WriteUint(msg.ResponseEnd, buf, p) + p = WriteUint(msg.DomContentLoadedEventStart, buf, p) + p = WriteUint(msg.DomContentLoadedEventEnd, buf, p) + p = WriteUint(msg.LoadEventStart, buf, p) + p = WriteUint(msg.LoadEventEnd, buf, p) + p = WriteUint(msg.FirstPaint, buf, p) + p = WriteUint(msg.FirstContentfulPaint, buf, p) + return buf[:p] } type PageRenderTiming struct { - *meta - SpeedIndex uint64 - VisuallyComplete uint64 - TimeToInteractive uint64 + *meta + SpeedIndex uint64 + VisuallyComplete uint64 + TimeToInteractive uint64 } func (msg *PageRenderTiming) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 24 - p := 1 - p = WriteUint(msg.SpeedIndex, buf, p) - p = WriteUint(msg.VisuallyComplete, buf, p) - p = WriteUint(msg.TimeToInteractive, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 24 + p := 1 + p = WriteUint(msg.SpeedIndex, buf, p) + p = WriteUint(msg.VisuallyComplete, buf, p) + p = WriteUint(msg.TimeToInteractive, buf, p) + return buf[:p] } type JSException struct { - *meta - Name string - Message string - Payload string + *meta + Name string + Message string + Payload string } func (msg *JSException) Encode() []byte { - buf := make([]byte, 31+len(msg.Name)+len(msg.Message)+len(msg.Payload)) - buf[0] = 25 - p := 1 - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Message, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Name)+len(msg.Message)+len(msg.Payload)) + buf[0] = 25 + p := 1 + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Message, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type RawErrorEvent struct { - *meta - Timestamp uint64 - Source string - Name string - Message string - Payload string + *meta + Timestamp uint64 + Source string + Name string + Message string + Payload string } func (msg *RawErrorEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Source)+len(msg.Name)+len(msg.Message)+len(msg.Payload)) - buf[0] = 26 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Source, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Message, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Source)+len(msg.Name)+len(msg.Message)+len(msg.Payload)) + buf[0] = 26 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Source, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Message, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type RawCustomEvent struct { - *meta - Name string - Payload string + *meta + Name string + Payload string } func (msg *RawCustomEvent) Encode() []byte { - buf := make([]byte, 21+len(msg.Name)+len(msg.Payload)) - buf[0] = 27 - p := 1 - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Name)+len(msg.Payload)) + buf[0] = 27 + p := 1 + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type UserID struct { - *meta - ID string + *meta + ID string } func (msg *UserID) Encode() []byte { - buf := make([]byte, 11+len(msg.ID)) - buf[0] = 28 - p := 1 - p = WriteString(msg.ID, buf, p) - return buf[:p] + buf := make([]byte, 11+len(msg.ID)) + buf[0] = 28 + p := 1 + p = WriteString(msg.ID, buf, p) + return buf[:p] } type UserAnonymousID struct { - *meta - ID string + *meta + ID string } func (msg *UserAnonymousID) Encode() []byte { - buf := make([]byte, 11+len(msg.ID)) - buf[0] = 29 - p := 1 - p = WriteString(msg.ID, buf, p) - return buf[:p] + buf := make([]byte, 11+len(msg.ID)) + buf[0] = 29 + p := 1 + p = WriteString(msg.ID, buf, p) + return buf[:p] } type Metadata struct { - *meta - Key string - Value string + *meta + Key string + Value string } func (msg *Metadata) Encode() []byte { - buf := make([]byte, 21+len(msg.Key)+len(msg.Value)) - buf[0] = 30 - p := 1 - p = WriteString(msg.Key, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Key)+len(msg.Value)) + buf[0] = 30 + p := 1 + p = WriteString(msg.Key, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type PageEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - URL string - Referrer string - Loaded bool - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 - DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 - SpeedIndex uint64 - VisuallyComplete uint64 - TimeToInteractive uint64 + *meta + MessageID uint64 + Timestamp uint64 + URL string + Referrer string + Loaded bool + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 + DomContentLoadedEventStart uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 + SpeedIndex uint64 + VisuallyComplete uint64 + TimeToInteractive uint64 } func (msg *PageEvent) Encode() []byte { - buf := make([]byte, 171+len(msg.URL)+len(msg.Referrer)) - buf[0] = 31 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Referrer, buf, p) - p = WriteBoolean(msg.Loaded, buf, p) - p = WriteUint(msg.RequestStart, buf, p) - p = WriteUint(msg.ResponseStart, buf, p) - p = WriteUint(msg.ResponseEnd, buf, p) - p = WriteUint(msg.DomContentLoadedEventStart, buf, p) - p = WriteUint(msg.DomContentLoadedEventEnd, buf, p) - p = WriteUint(msg.LoadEventStart, buf, p) - p = WriteUint(msg.LoadEventEnd, buf, p) - p = WriteUint(msg.FirstPaint, buf, p) - p = WriteUint(msg.FirstContentfulPaint, buf, p) - p = WriteUint(msg.SpeedIndex, buf, p) - p = WriteUint(msg.VisuallyComplete, buf, p) - p = WriteUint(msg.TimeToInteractive, buf, p) - return buf[:p] + buf := make([]byte, 171+len(msg.URL)+len(msg.Referrer)) + buf[0] = 31 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Referrer, buf, p) + p = WriteBoolean(msg.Loaded, buf, p) + p = WriteUint(msg.RequestStart, buf, p) + p = WriteUint(msg.ResponseStart, buf, p) + p = WriteUint(msg.ResponseEnd, buf, p) + p = WriteUint(msg.DomContentLoadedEventStart, buf, p) + p = WriteUint(msg.DomContentLoadedEventEnd, buf, p) + p = WriteUint(msg.LoadEventStart, buf, p) + p = WriteUint(msg.LoadEventEnd, buf, p) + p = WriteUint(msg.FirstPaint, buf, p) + p = WriteUint(msg.FirstContentfulPaint, buf, p) + p = WriteUint(msg.SpeedIndex, buf, p) + p = WriteUint(msg.VisuallyComplete, buf, p) + p = WriteUint(msg.TimeToInteractive, buf, p) + return buf[:p] } type InputEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Value string - ValueMasked bool - Label string + *meta + MessageID uint64 + Timestamp uint64 + Value string + ValueMasked bool + Label string } func (msg *InputEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Value)+len(msg.Label)) - buf[0] = 32 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Value, buf, p) - p = WriteBoolean(msg.ValueMasked, buf, p) - p = WriteString(msg.Label, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Value)+len(msg.Label)) + buf[0] = 32 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Value, buf, p) + p = WriteBoolean(msg.ValueMasked, buf, p) + p = WriteString(msg.Label, buf, p) + return buf[:p] } type ClickEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - HesitationTime uint64 - Label string - Selector string + *meta + MessageID uint64 + Timestamp uint64 + HesitationTime uint64 + Label string + Selector string } func (msg *ClickEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Label)+len(msg.Selector)) - buf[0] = 33 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.HesitationTime, buf, p) - p = WriteString(msg.Label, buf, p) - p = WriteString(msg.Selector, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Label)+len(msg.Selector)) + buf[0] = 33 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.HesitationTime, buf, p) + p = WriteString(msg.Label, buf, p) + p = WriteString(msg.Selector, buf, p) + return buf[:p] } type ErrorEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Source string - Name string - Message string - Payload string + *meta + MessageID uint64 + Timestamp uint64 + Source string + Name string + Message string + Payload string } func (msg *ErrorEvent) Encode() []byte { - buf := make([]byte, 61+len(msg.Source)+len(msg.Name)+len(msg.Message)+len(msg.Payload)) - buf[0] = 34 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Source, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Message, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 61+len(msg.Source)+len(msg.Name)+len(msg.Message)+len(msg.Payload)) + buf[0] = 34 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Source, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Message, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type ResourceEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 - EncodedBodySize uint64 - DecodedBodySize uint64 - URL string - Type string - Success bool - Method string - Status uint64 + *meta + MessageID uint64 + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 + EncodedBodySize uint64 + DecodedBodySize uint64 + URL string + Type string + Success bool + Method string + Status uint64 } func (msg *ResourceEvent) Encode() []byte { - buf := make([]byte, 121+len(msg.URL)+len(msg.Type)+len(msg.Method)) - buf[0] = 35 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteUint(msg.TTFB, buf, p) - p = WriteUint(msg.HeaderSize, buf, p) - p = WriteUint(msg.EncodedBodySize, buf, p) - p = WriteUint(msg.DecodedBodySize, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Type, buf, p) - p = WriteBoolean(msg.Success, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteUint(msg.Status, buf, p) - return buf[:p] + buf := make([]byte, 121+len(msg.URL)+len(msg.Type)+len(msg.Method)) + buf[0] = 35 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteUint(msg.TTFB, buf, p) + p = WriteUint(msg.HeaderSize, buf, p) + p = WriteUint(msg.EncodedBodySize, buf, p) + p = WriteUint(msg.DecodedBodySize, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Type, buf, p) + p = WriteBoolean(msg.Success, buf, p) + p = WriteString(msg.Method, buf, p) + p = WriteUint(msg.Status, buf, p) + return buf[:p] } type CustomEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Name string - Payload string + *meta + MessageID uint64 + Timestamp uint64 + Name string + Payload string } func (msg *CustomEvent) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)+len(msg.Payload)) - buf[0] = 36 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Name)+len(msg.Payload)) + buf[0] = 36 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type CSSInsertRule struct { - *meta - ID uint64 - Rule string - Index uint64 + *meta + ID uint64 + Rule string + Index uint64 } func (msg *CSSInsertRule) Encode() []byte { - buf := make([]byte, 31+len(msg.Rule)) - buf[0] = 37 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Rule, buf, p) - p = WriteUint(msg.Index, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Rule)) + buf[0] = 37 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Rule, buf, p) + p = WriteUint(msg.Index, buf, p) + return buf[:p] } type CSSDeleteRule struct { - *meta - ID uint64 - Index uint64 + *meta + ID uint64 + Index uint64 } func (msg *CSSDeleteRule) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 38 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.Index, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 38 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.Index, buf, p) + return buf[:p] } type Fetch struct { - *meta - Method string - URL string - Request string - Response string - Status uint64 - Timestamp uint64 - Duration uint64 + *meta + Method string + URL string + Request string + Response string + Status uint64 + Timestamp uint64 + Duration uint64 } func (msg *Fetch) Encode() []byte { - buf := make([]byte, 71+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) - buf[0] = 39 - p := 1 - p = WriteString(msg.Method, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Request, buf, p) - p = WriteString(msg.Response, buf, p) - p = WriteUint(msg.Status, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - return buf[:p] + buf := make([]byte, 71+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) + buf[0] = 39 + p := 1 + p = WriteString(msg.Method, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Request, buf, p) + p = WriteString(msg.Response, buf, p) + p = WriteUint(msg.Status, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + return buf[:p] } type Profiler struct { - *meta - Name string - Duration uint64 - Args string - Result string + *meta + Name string + Duration uint64 + Args string + Result string } func (msg *Profiler) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)+len(msg.Args)+len(msg.Result)) - buf[0] = 40 - p := 1 - p = WriteString(msg.Name, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteString(msg.Args, buf, p) - p = WriteString(msg.Result, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Name)+len(msg.Args)+len(msg.Result)) + buf[0] = 40 + p := 1 + p = WriteString(msg.Name, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteString(msg.Args, buf, p) + p = WriteString(msg.Result, buf, p) + return buf[:p] } type OTable struct { - *meta - Key string - Value string + *meta + Key string + Value string } func (msg *OTable) Encode() []byte { - buf := make([]byte, 21+len(msg.Key)+len(msg.Value)) - buf[0] = 41 - p := 1 - p = WriteString(msg.Key, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Key)+len(msg.Value)) + buf[0] = 41 + p := 1 + p = WriteString(msg.Key, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type StateAction struct { - *meta - Type string + *meta + Type string } func (msg *StateAction) Encode() []byte { - buf := make([]byte, 11+len(msg.Type)) - buf[0] = 42 - p := 1 - p = WriteString(msg.Type, buf, p) - return buf[:p] + buf := make([]byte, 11+len(msg.Type)) + buf[0] = 42 + p := 1 + p = WriteString(msg.Type, buf, p) + return buf[:p] } type StateActionEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Type string + *meta + MessageID uint64 + Timestamp uint64 + Type string } func (msg *StateActionEvent) Encode() []byte { - buf := make([]byte, 31+len(msg.Type)) - buf[0] = 43 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Type, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Type)) + buf[0] = 43 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Type, buf, p) + return buf[:p] } type Redux struct { - *meta - Action string - State string - Duration uint64 + *meta + Action string + State string + Duration uint64 } func (msg *Redux) Encode() []byte { - buf := make([]byte, 31+len(msg.Action)+len(msg.State)) - buf[0] = 44 - p := 1 - p = WriteString(msg.Action, buf, p) - p = WriteString(msg.State, buf, p) - p = WriteUint(msg.Duration, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Action)+len(msg.State)) + buf[0] = 44 + p := 1 + p = WriteString(msg.Action, buf, p) + p = WriteString(msg.State, buf, p) + p = WriteUint(msg.Duration, buf, p) + return buf[:p] } type Vuex struct { - *meta - Mutation string - State string + *meta + Mutation string + State string } func (msg *Vuex) Encode() []byte { - buf := make([]byte, 21+len(msg.Mutation)+len(msg.State)) - buf[0] = 45 - p := 1 - p = WriteString(msg.Mutation, buf, p) - p = WriteString(msg.State, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Mutation)+len(msg.State)) + buf[0] = 45 + p := 1 + p = WriteString(msg.Mutation, buf, p) + p = WriteString(msg.State, buf, p) + return buf[:p] } type MobX struct { - *meta - Type string - Payload string + *meta + Type string + Payload string } func (msg *MobX) Encode() []byte { - buf := make([]byte, 21+len(msg.Type)+len(msg.Payload)) - buf[0] = 46 - p := 1 - p = WriteString(msg.Type, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Type)+len(msg.Payload)) + buf[0] = 46 + p := 1 + p = WriteString(msg.Type, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type NgRx struct { - *meta - Action string - State string - Duration uint64 + *meta + Action string + State string + Duration uint64 } func (msg *NgRx) Encode() []byte { - buf := make([]byte, 31+len(msg.Action)+len(msg.State)) - buf[0] = 47 - p := 1 - p = WriteString(msg.Action, buf, p) - p = WriteString(msg.State, buf, p) - p = WriteUint(msg.Duration, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Action)+len(msg.State)) + buf[0] = 47 + p := 1 + p = WriteString(msg.Action, buf, p) + p = WriteString(msg.State, buf, p) + p = WriteUint(msg.Duration, buf, p) + return buf[:p] } type GraphQL struct { - *meta - OperationKind string - OperationName string - Variables string - Response string + *meta + OperationKind string + OperationName string + Variables string + Response string } func (msg *GraphQL) Encode() []byte { - buf := make([]byte, 41+len(msg.OperationKind)+len(msg.OperationName)+len(msg.Variables)+len(msg.Response)) - buf[0] = 48 - p := 1 - p = WriteString(msg.OperationKind, buf, p) - p = WriteString(msg.OperationName, buf, p) - p = WriteString(msg.Variables, buf, p) - p = WriteString(msg.Response, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.OperationKind)+len(msg.OperationName)+len(msg.Variables)+len(msg.Response)) + buf[0] = 48 + p := 1 + p = WriteString(msg.OperationKind, buf, p) + p = WriteString(msg.OperationName, buf, p) + p = WriteString(msg.Variables, buf, p) + p = WriteString(msg.Response, buf, p) + return buf[:p] } type PerformanceTrack struct { - *meta - Frames int64 - Ticks int64 - TotalJSHeapSize uint64 - UsedJSHeapSize uint64 + *meta + Frames int64 + Ticks int64 + TotalJSHeapSize uint64 + UsedJSHeapSize uint64 } func (msg *PerformanceTrack) Encode() []byte { - buf := make([]byte, 41) - buf[0] = 49 - p := 1 - p = WriteInt(msg.Frames, buf, p) - p = WriteInt(msg.Ticks, buf, p) - p = WriteUint(msg.TotalJSHeapSize, buf, p) - p = WriteUint(msg.UsedJSHeapSize, buf, p) - return buf[:p] + buf := make([]byte, 41) + buf[0] = 49 + p := 1 + p = WriteInt(msg.Frames, buf, p) + p = WriteInt(msg.Ticks, buf, p) + p = WriteUint(msg.TotalJSHeapSize, buf, p) + p = WriteUint(msg.UsedJSHeapSize, buf, p) + return buf[:p] } type GraphQLEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - OperationKind string - OperationName string - Variables string - Response string + *meta + MessageID uint64 + Timestamp uint64 + OperationKind string + OperationName string + Variables string + Response string } func (msg *GraphQLEvent) Encode() []byte { - buf := make([]byte, 61+len(msg.OperationKind)+len(msg.OperationName)+len(msg.Variables)+len(msg.Response)) - buf[0] = 50 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.OperationKind, buf, p) - p = WriteString(msg.OperationName, buf, p) - p = WriteString(msg.Variables, buf, p) - p = WriteString(msg.Response, buf, p) - return buf[:p] + buf := make([]byte, 61+len(msg.OperationKind)+len(msg.OperationName)+len(msg.Variables)+len(msg.Response)) + buf[0] = 50 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.OperationKind, buf, p) + p = WriteString(msg.OperationName, buf, p) + p = WriteString(msg.Variables, buf, p) + p = WriteString(msg.Response, buf, p) + return buf[:p] } type FetchEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Method string - URL string - Request string - Response string - Status uint64 - Duration uint64 + *meta + MessageID uint64 + Timestamp uint64 + Method string + URL string + Request string + Response string + Status uint64 + Duration uint64 } func (msg *FetchEvent) Encode() []byte { - buf := make([]byte, 81+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) - buf[0] = 51 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Request, buf, p) - p = WriteString(msg.Response, buf, p) - p = WriteUint(msg.Status, buf, p) - p = WriteUint(msg.Duration, buf, p) - return buf[:p] + buf := make([]byte, 81+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) + buf[0] = 51 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Method, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Request, buf, p) + p = WriteString(msg.Response, buf, p) + p = WriteUint(msg.Status, buf, p) + p = WriteUint(msg.Duration, buf, p) + return buf[:p] } type DOMDrop struct { - *meta - Timestamp uint64 + *meta + Timestamp uint64 } func (msg *DOMDrop) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 52 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 52 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] } type ResourceTiming struct { - *meta - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 - EncodedBodySize uint64 - DecodedBodySize uint64 - URL string - Initiator string + *meta + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 + EncodedBodySize uint64 + DecodedBodySize uint64 + URL string + Initiator string } func (msg *ResourceTiming) Encode() []byte { - buf := make([]byte, 81+len(msg.URL)+len(msg.Initiator)) - buf[0] = 53 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteUint(msg.TTFB, buf, p) - p = WriteUint(msg.HeaderSize, buf, p) - p = WriteUint(msg.EncodedBodySize, buf, p) - p = WriteUint(msg.DecodedBodySize, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Initiator, buf, p) - return buf[:p] + buf := make([]byte, 81+len(msg.URL)+len(msg.Initiator)) + buf[0] = 53 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteUint(msg.TTFB, buf, p) + p = WriteUint(msg.HeaderSize, buf, p) + p = WriteUint(msg.EncodedBodySize, buf, p) + p = WriteUint(msg.DecodedBodySize, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Initiator, buf, p) + return buf[:p] } type ConnectionInformation struct { - *meta - Downlink uint64 - Type string + *meta + Downlink uint64 + Type string } func (msg *ConnectionInformation) Encode() []byte { - buf := make([]byte, 21+len(msg.Type)) - buf[0] = 54 - p := 1 - p = WriteUint(msg.Downlink, buf, p) - p = WriteString(msg.Type, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Type)) + buf[0] = 54 + p := 1 + p = WriteUint(msg.Downlink, buf, p) + p = WriteString(msg.Type, buf, p) + return buf[:p] } type SetPageVisibility struct { - *meta - hidden bool + *meta + hidden bool } func (msg *SetPageVisibility) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 55 - p := 1 - p = WriteBoolean(msg.hidden, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 55 + p := 1 + p = WriteBoolean(msg.hidden, buf, p) + return buf[:p] } type PerformanceTrackAggr struct { - *meta - TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 - MinTotalJSHeapSize uint64 - AvgTotalJSHeapSize uint64 - MaxTotalJSHeapSize uint64 - MinUsedJSHeapSize uint64 - AvgUsedJSHeapSize uint64 - MaxUsedJSHeapSize uint64 + *meta + TimestampStart uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 + MinTotalJSHeapSize uint64 + AvgTotalJSHeapSize uint64 + MaxTotalJSHeapSize uint64 + MinUsedJSHeapSize uint64 + AvgUsedJSHeapSize uint64 + MaxUsedJSHeapSize uint64 } func (msg *PerformanceTrackAggr) Encode() []byte { - buf := make([]byte, 141) - buf[0] = 56 - p := 1 - p = WriteUint(msg.TimestampStart, buf, p) - p = WriteUint(msg.TimestampEnd, buf, p) - p = WriteUint(msg.MinFPS, buf, p) - p = WriteUint(msg.AvgFPS, buf, p) - p = WriteUint(msg.MaxFPS, buf, p) - p = WriteUint(msg.MinCPU, buf, p) - p = WriteUint(msg.AvgCPU, buf, p) - p = WriteUint(msg.MaxCPU, buf, p) - p = WriteUint(msg.MinTotalJSHeapSize, buf, p) - p = WriteUint(msg.AvgTotalJSHeapSize, buf, p) - p = WriteUint(msg.MaxTotalJSHeapSize, buf, p) - p = WriteUint(msg.MinUsedJSHeapSize, buf, p) - p = WriteUint(msg.AvgUsedJSHeapSize, buf, p) - p = WriteUint(msg.MaxUsedJSHeapSize, buf, p) - return buf[:p] + buf := make([]byte, 141) + buf[0] = 56 + p := 1 + p = WriteUint(msg.TimestampStart, buf, p) + p = WriteUint(msg.TimestampEnd, buf, p) + p = WriteUint(msg.MinFPS, buf, p) + p = WriteUint(msg.AvgFPS, buf, p) + p = WriteUint(msg.MaxFPS, buf, p) + p = WriteUint(msg.MinCPU, buf, p) + p = WriteUint(msg.AvgCPU, buf, p) + p = WriteUint(msg.MaxCPU, buf, p) + p = WriteUint(msg.MinTotalJSHeapSize, buf, p) + p = WriteUint(msg.AvgTotalJSHeapSize, buf, p) + p = WriteUint(msg.MaxTotalJSHeapSize, buf, p) + p = WriteUint(msg.MinUsedJSHeapSize, buf, p) + p = WriteUint(msg.AvgUsedJSHeapSize, buf, p) + p = WriteUint(msg.MaxUsedJSHeapSize, buf, p) + return buf[:p] } type LongTask struct { - *meta - Timestamp uint64 - Duration uint64 - Context uint64 - ContainerType uint64 - ContainerSrc string - ContainerId string - ContainerName string + *meta + Timestamp uint64 + Duration uint64 + Context uint64 + ContainerType uint64 + ContainerSrc string + ContainerId string + ContainerName string } func (msg *LongTask) Encode() []byte { - buf := make([]byte, 71+len(msg.ContainerSrc)+len(msg.ContainerId)+len(msg.ContainerName)) - buf[0] = 59 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteUint(msg.Context, buf, p) - p = WriteUint(msg.ContainerType, buf, p) - p = WriteString(msg.ContainerSrc, buf, p) - p = WriteString(msg.ContainerId, buf, p) - p = WriteString(msg.ContainerName, buf, p) - return buf[:p] + buf := make([]byte, 71+len(msg.ContainerSrc)+len(msg.ContainerId)+len(msg.ContainerName)) + buf[0] = 59 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteUint(msg.Context, buf, p) + p = WriteUint(msg.ContainerType, buf, p) + p = WriteString(msg.ContainerSrc, buf, p) + p = WriteString(msg.ContainerId, buf, p) + p = WriteString(msg.ContainerName, buf, p) + return buf[:p] } type SetNodeAttributeURLBased struct { - *meta - ID uint64 - Name string - Value string - BaseURL string + *meta + ID uint64 + Name string + Value string + BaseURL string } func (msg *SetNodeAttributeURLBased) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)+len(msg.Value)+len(msg.BaseURL)) - buf[0] = 60 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Value, buf, p) - p = WriteString(msg.BaseURL, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Name)+len(msg.Value)+len(msg.BaseURL)) + buf[0] = 60 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Value, buf, p) + p = WriteString(msg.BaseURL, buf, p) + return buf[:p] } type SetCSSDataURLBased struct { - *meta - ID uint64 - Data string - BaseURL string + *meta + ID uint64 + Data string + BaseURL string } func (msg *SetCSSDataURLBased) Encode() []byte { - buf := make([]byte, 31+len(msg.Data)+len(msg.BaseURL)) - buf[0] = 61 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Data, buf, p) - p = WriteString(msg.BaseURL, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Data)+len(msg.BaseURL)) + buf[0] = 61 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Data, buf, p) + p = WriteString(msg.BaseURL, buf, p) + return buf[:p] } type IssueEvent struct { - *meta - MessageID uint64 - Timestamp uint64 - Type string - ContextString string - Context string - Payload string + *meta + MessageID uint64 + Timestamp uint64 + Type string + ContextString string + Context string + Payload string } func (msg *IssueEvent) Encode() []byte { - buf := make([]byte, 61+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload)) - buf[0] = 62 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Type, buf, p) - p = WriteString(msg.ContextString, buf, p) - p = WriteString(msg.Context, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 61+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload)) + buf[0] = 62 + p := 1 + p = WriteUint(msg.MessageID, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Type, buf, p) + p = WriteString(msg.ContextString, buf, p) + p = WriteString(msg.Context, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type TechnicalInfo struct { - *meta - Type string - Value string + *meta + Type string + Value string } func (msg *TechnicalInfo) Encode() []byte { - buf := make([]byte, 21+len(msg.Type)+len(msg.Value)) - buf[0] = 63 - p := 1 - p = WriteString(msg.Type, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Type)+len(msg.Value)) + buf[0] = 63 + p := 1 + p = WriteString(msg.Type, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type CustomIssue struct { - *meta - Name string - Payload string + *meta + Name string + Payload string } func (msg *CustomIssue) Encode() []byte { - buf := make([]byte, 21+len(msg.Name)+len(msg.Payload)) - buf[0] = 64 - p := 1 - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 21+len(msg.Name)+len(msg.Payload)) + buf[0] = 64 + p := 1 + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type PageClose struct { - *meta + *meta } func (msg *PageClose) Encode() []byte { - buf := make([]byte, 1) - buf[0] = 65 - p := 1 + buf := make([]byte, 1) + buf[0] = 65 + p := 1 - return buf[:p] + return buf[:p] } type AssetCache struct { - *meta - URL string + *meta + URL string } func (msg *AssetCache) Encode() []byte { - buf := make([]byte, 11+len(msg.URL)) - buf[0] = 66 - p := 1 - p = WriteString(msg.URL, buf, p) - return buf[:p] + buf := make([]byte, 11+len(msg.URL)) + buf[0] = 66 + p := 1 + p = WriteString(msg.URL, buf, p) + return buf[:p] } type CSSInsertRuleURLBased struct { - *meta - ID uint64 - Rule string - Index uint64 - BaseURL string + *meta + ID uint64 + Rule string + Index uint64 + BaseURL string } func (msg *CSSInsertRuleURLBased) Encode() []byte { - buf := make([]byte, 41+len(msg.Rule)+len(msg.BaseURL)) - buf[0] = 67 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteString(msg.Rule, buf, p) - p = WriteUint(msg.Index, buf, p) - p = WriteString(msg.BaseURL, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Rule)+len(msg.BaseURL)) + buf[0] = 67 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Rule, buf, p) + p = WriteUint(msg.Index, buf, p) + p = WriteString(msg.BaseURL, buf, p) + return buf[:p] } type MouseClick struct { - *meta - ID uint64 - HesitationTime uint64 - Label string - Selector string + *meta + ID uint64 + HesitationTime uint64 + Label string + Selector string } func (msg *MouseClick) Encode() []byte { - buf := make([]byte, 41+len(msg.Label)+len(msg.Selector)) - buf[0] = 69 - p := 1 - p = WriteUint(msg.ID, buf, p) - p = WriteUint(msg.HesitationTime, buf, p) - p = WriteString(msg.Label, buf, p) - p = WriteString(msg.Selector, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Label)+len(msg.Selector)) + buf[0] = 69 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteUint(msg.HesitationTime, buf, p) + p = WriteString(msg.Label, buf, p) + p = WriteString(msg.Selector, buf, p) + return buf[:p] } type CreateIFrameDocument struct { - *meta - FrameID uint64 - ID uint64 + *meta + FrameID uint64 + ID uint64 } func (msg *CreateIFrameDocument) Encode() []byte { - buf := make([]byte, 21) - buf[0] = 70 - p := 1 - p = WriteUint(msg.FrameID, buf, p) - p = WriteUint(msg.ID, buf, p) - return buf[:p] + buf := make([]byte, 21) + buf[0] = 70 + p := 1 + p = WriteUint(msg.FrameID, buf, p) + p = WriteUint(msg.ID, buf, p) + return buf[:p] } type IOSBatchMeta struct { - *meta - Timestamp uint64 - Length uint64 - FirstIndex uint64 + *meta + Timestamp uint64 + Length uint64 + FirstIndex uint64 } func (msg *IOSBatchMeta) Encode() []byte { - buf := make([]byte, 31) - buf[0] = 107 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteUint(msg.FirstIndex, buf, p) - return buf[:p] + buf := make([]byte, 31) + buf[0] = 107 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteUint(msg.FirstIndex, buf, p) + return buf[:p] } type IOSSessionStart struct { - *meta - Timestamp uint64 - ProjectID uint64 - TrackerVersion string - RevID string - UserUUID string - UserOS string - UserOSVersion string - UserDevice string - UserDeviceType string - UserCountry string + *meta + Timestamp uint64 + ProjectID uint64 + TrackerVersion string + RevID string + UserUUID string + UserOS string + UserOSVersion string + UserDevice string + UserDeviceType string + UserCountry string } func (msg *IOSSessionStart) Encode() []byte { - buf := make([]byte, 101+len(msg.TrackerVersion)+len(msg.RevID)+len(msg.UserUUID)+len(msg.UserOS)+len(msg.UserOSVersion)+len(msg.UserDevice)+len(msg.UserDeviceType)+len(msg.UserCountry)) - buf[0] = 90 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.ProjectID, buf, p) - p = WriteString(msg.TrackerVersion, buf, p) - p = WriteString(msg.RevID, buf, p) - p = WriteString(msg.UserUUID, buf, p) - p = WriteString(msg.UserOS, buf, p) - p = WriteString(msg.UserOSVersion, buf, p) - p = WriteString(msg.UserDevice, buf, p) - p = WriteString(msg.UserDeviceType, buf, p) - p = WriteString(msg.UserCountry, buf, p) - return buf[:p] + buf := make([]byte, 101+len(msg.TrackerVersion)+len(msg.RevID)+len(msg.UserUUID)+len(msg.UserOS)+len(msg.UserOSVersion)+len(msg.UserDevice)+len(msg.UserDeviceType)+len(msg.UserCountry)) + buf[0] = 90 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.ProjectID, buf, p) + p = WriteString(msg.TrackerVersion, buf, p) + p = WriteString(msg.RevID, buf, p) + p = WriteString(msg.UserUUID, buf, p) + p = WriteString(msg.UserOS, buf, p) + p = WriteString(msg.UserOSVersion, buf, p) + p = WriteString(msg.UserDevice, buf, p) + p = WriteString(msg.UserDeviceType, buf, p) + p = WriteString(msg.UserCountry, buf, p) + return buf[:p] } type IOSSessionEnd struct { - *meta - Timestamp uint64 + *meta + Timestamp uint64 } func (msg *IOSSessionEnd) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 91 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] + buf := make([]byte, 11) + buf[0] = 91 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] } type IOSMetadata struct { - *meta - Timestamp uint64 - Length uint64 - Key string - Value string + *meta + Timestamp uint64 + Length uint64 + Key string + Value string } func (msg *IOSMetadata) Encode() []byte { - buf := make([]byte, 41+len(msg.Key)+len(msg.Value)) - buf[0] = 92 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Key, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Key)+len(msg.Value)) + buf[0] = 92 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Key, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type IOSCustomEvent struct { - *meta - Timestamp uint64 - Length uint64 - Name string - Payload string + *meta + Timestamp uint64 + Length uint64 + Name string + Payload string } func (msg *IOSCustomEvent) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)+len(msg.Payload)) - buf[0] = 93 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Name)+len(msg.Payload)) + buf[0] = 93 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } type IOSUserID struct { - *meta - Timestamp uint64 - Length uint64 - Value string + *meta + Timestamp uint64 + Length uint64 + Value string } func (msg *IOSUserID) Encode() []byte { - buf := make([]byte, 31+len(msg.Value)) - buf[0] = 94 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Value)) + buf[0] = 94 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type IOSUserAnonymousID struct { - *meta - Timestamp uint64 - Length uint64 - Value string + *meta + Timestamp uint64 + Length uint64 + Value string } func (msg *IOSUserAnonymousID) Encode() []byte { - buf := make([]byte, 31+len(msg.Value)) - buf[0] = 95 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Value)) + buf[0] = 95 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Value, buf, p) + return buf[:p] } type IOSScreenChanges struct { - *meta - Timestamp uint64 - Length uint64 - X uint64 - Y uint64 - Width uint64 - Height uint64 + *meta + Timestamp uint64 + Length uint64 + X uint64 + Y uint64 + Width uint64 + Height uint64 } func (msg *IOSScreenChanges) Encode() []byte { - buf := make([]byte, 61) - buf[0] = 96 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteUint(msg.X, buf, p) - p = WriteUint(msg.Y, buf, p) - p = WriteUint(msg.Width, buf, p) - p = WriteUint(msg.Height, buf, p) - return buf[:p] + buf := make([]byte, 61) + buf[0] = 96 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteUint(msg.X, buf, p) + p = WriteUint(msg.Y, buf, p) + p = WriteUint(msg.Width, buf, p) + p = WriteUint(msg.Height, buf, p) + return buf[:p] } type IOSCrash struct { - *meta - Timestamp uint64 - Length uint64 - Name string - Reason string - Stacktrace string + *meta + Timestamp uint64 + Length uint64 + Name string + Reason string + Stacktrace string } func (msg *IOSCrash) Encode() []byte { - buf := make([]byte, 51+len(msg.Name)+len(msg.Reason)+len(msg.Stacktrace)) - buf[0] = 97 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Reason, buf, p) - p = WriteString(msg.Stacktrace, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Name)+len(msg.Reason)+len(msg.Stacktrace)) + buf[0] = 97 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteString(msg.Reason, buf, p) + p = WriteString(msg.Stacktrace, buf, p) + return buf[:p] } type IOSScreenEnter struct { - *meta - Timestamp uint64 - Length uint64 - Title string - ViewName string + *meta + Timestamp uint64 + Length uint64 + Title string + ViewName string } func (msg *IOSScreenEnter) Encode() []byte { - buf := make([]byte, 41+len(msg.Title)+len(msg.ViewName)) - buf[0] = 98 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Title, buf, p) - p = WriteString(msg.ViewName, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Title)+len(msg.ViewName)) + buf[0] = 98 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Title, buf, p) + p = WriteString(msg.ViewName, buf, p) + return buf[:p] } type IOSScreenLeave struct { - *meta - Timestamp uint64 - Length uint64 - Title string - ViewName string + *meta + Timestamp uint64 + Length uint64 + Title string + ViewName string } func (msg *IOSScreenLeave) Encode() []byte { - buf := make([]byte, 41+len(msg.Title)+len(msg.ViewName)) - buf[0] = 99 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Title, buf, p) - p = WriteString(msg.ViewName, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Title)+len(msg.ViewName)) + buf[0] = 99 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Title, buf, p) + p = WriteString(msg.ViewName, buf, p) + return buf[:p] } type IOSClickEvent struct { - *meta - Timestamp uint64 - Length uint64 - Label string - X uint64 - Y uint64 + *meta + Timestamp uint64 + Length uint64 + Label string + X uint64 + Y uint64 } func (msg *IOSClickEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Label)) - buf[0] = 100 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Label, buf, p) - p = WriteUint(msg.X, buf, p) - p = WriteUint(msg.Y, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Label)) + buf[0] = 100 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Label, buf, p) + p = WriteUint(msg.X, buf, p) + p = WriteUint(msg.Y, buf, p) + return buf[:p] } type IOSInputEvent struct { - *meta - Timestamp uint64 - Length uint64 - Value string - ValueMasked bool - Label string + *meta + Timestamp uint64 + Length uint64 + Value string + ValueMasked bool + Label string } func (msg *IOSInputEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Value)+len(msg.Label)) - buf[0] = 101 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Value, buf, p) - p = WriteBoolean(msg.ValueMasked, buf, p) - p = WriteString(msg.Label, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Value)+len(msg.Label)) + buf[0] = 101 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Value, buf, p) + p = WriteBoolean(msg.ValueMasked, buf, p) + p = WriteString(msg.Label, buf, p) + return buf[:p] } type IOSPerformanceEvent struct { - *meta - Timestamp uint64 - Length uint64 - Name string - Value uint64 + *meta + Timestamp uint64 + Length uint64 + Name string + Value uint64 } func (msg *IOSPerformanceEvent) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)) - buf[0] = 102 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteUint(msg.Value, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Name)) + buf[0] = 102 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Name, buf, p) + p = WriteUint(msg.Value, buf, p) + return buf[:p] } type IOSLog struct { - *meta - Timestamp uint64 - Length uint64 - Severity string - Content string + *meta + Timestamp uint64 + Length uint64 + Severity string + Content string } func (msg *IOSLog) Encode() []byte { - buf := make([]byte, 41+len(msg.Severity)+len(msg.Content)) - buf[0] = 103 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Severity, buf, p) - p = WriteString(msg.Content, buf, p) - return buf[:p] + buf := make([]byte, 41+len(msg.Severity)+len(msg.Content)) + buf[0] = 103 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Severity, buf, p) + p = WriteString(msg.Content, buf, p) + return buf[:p] } type IOSInternalError struct { - *meta - Timestamp uint64 - Length uint64 - Content string + *meta + Timestamp uint64 + Length uint64 + Content string } func (msg *IOSInternalError) Encode() []byte { - buf := make([]byte, 31+len(msg.Content)) - buf[0] = 104 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteString(msg.Content, buf, p) - return buf[:p] + buf := make([]byte, 31+len(msg.Content)) + buf[0] = 104 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteString(msg.Content, buf, p) + return buf[:p] } type IOSNetworkCall struct { - *meta - Timestamp uint64 - Length uint64 - Duration uint64 - Headers string - Body string - URL string - Success bool - Method string - Status uint64 + *meta + Timestamp uint64 + Length uint64 + Duration uint64 + Headers string + Body string + URL string + Success bool + Method string + Status uint64 } func (msg *IOSNetworkCall) Encode() []byte { - buf := make([]byte, 91+len(msg.Headers)+len(msg.Body)+len(msg.URL)+len(msg.Method)) - buf[0] = 105 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Length, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteString(msg.Headers, buf, p) - p = WriteString(msg.Body, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteBoolean(msg.Success, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteUint(msg.Status, buf, p) - return buf[:p] + buf := make([]byte, 91+len(msg.Headers)+len(msg.Body)+len(msg.URL)+len(msg.Method)) + buf[0] = 105 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Length, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteString(msg.Headers, buf, p) + p = WriteString(msg.Body, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteBoolean(msg.Success, buf, p) + p = WriteString(msg.Method, buf, p) + p = WriteUint(msg.Status, buf, p) + return buf[:p] } type IOSPerformanceAggregated struct { - *meta - TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 - MinMemory uint64 - AvgMemory uint64 - MaxMemory uint64 - MinBattery uint64 - AvgBattery uint64 - MaxBattery uint64 + *meta + TimestampStart uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 + MinMemory uint64 + AvgMemory uint64 + MaxMemory uint64 + MinBattery uint64 + AvgBattery uint64 + MaxBattery uint64 } func (msg *IOSPerformanceAggregated) Encode() []byte { - buf := make([]byte, 141) - buf[0] = 110 - p := 1 - p = WriteUint(msg.TimestampStart, buf, p) - p = WriteUint(msg.TimestampEnd, buf, p) - p = WriteUint(msg.MinFPS, buf, p) - p = WriteUint(msg.AvgFPS, buf, p) - p = WriteUint(msg.MaxFPS, buf, p) - p = WriteUint(msg.MinCPU, buf, p) - p = WriteUint(msg.AvgCPU, buf, p) - p = WriteUint(msg.MaxCPU, buf, p) - p = WriteUint(msg.MinMemory, buf, p) - p = WriteUint(msg.AvgMemory, buf, p) - p = WriteUint(msg.MaxMemory, buf, p) - p = WriteUint(msg.MinBattery, buf, p) - p = WriteUint(msg.AvgBattery, buf, p) - p = WriteUint(msg.MaxBattery, buf, p) - return buf[:p] + buf := make([]byte, 141) + buf[0] = 110 + p := 1 + p = WriteUint(msg.TimestampStart, buf, p) + p = WriteUint(msg.TimestampEnd, buf, p) + p = WriteUint(msg.MinFPS, buf, p) + p = WriteUint(msg.AvgFPS, buf, p) + p = WriteUint(msg.MaxFPS, buf, p) + p = WriteUint(msg.MinCPU, buf, p) + p = WriteUint(msg.AvgCPU, buf, p) + p = WriteUint(msg.MaxCPU, buf, p) + p = WriteUint(msg.MinMemory, buf, p) + p = WriteUint(msg.AvgMemory, buf, p) + p = WriteUint(msg.MaxMemory, buf, p) + p = WriteUint(msg.MinBattery, buf, p) + p = WriteUint(msg.AvgBattery, buf, p) + p = WriteUint(msg.MaxBattery, buf, p) + return buf[:p] } type IOSIssueEvent struct { - *meta - Timestamp uint64 - Type string - ContextString string - Context string - Payload string + *meta + Timestamp uint64 + Type string + ContextString string + Context string + Payload string } func (msg *IOSIssueEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload)) - buf[0] = 111 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Type, buf, p) - p = WriteString(msg.ContextString, buf, p) - p = WriteString(msg.Context, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] + buf := make([]byte, 51+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload)) + buf[0] = 111 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteString(msg.Type, buf, p) + p = WriteString(msg.ContextString, buf, p) + p = WriteString(msg.Context, buf, p) + p = WriteString(msg.Payload, buf, p) + return buf[:p] } diff --git a/backend/pkg/messages/performance/performance.go b/backend/pkg/messages/performance/performance.go index 4cfb28045..27e28215e 100644 --- a/backend/pkg/messages/performance/performance.go +++ b/backend/pkg/messages/performance/performance.go @@ -4,7 +4,6 @@ import ( "math" ) - func TimeDiff(t1 uint64, t2 uint64) uint64 { if t1 < t2 { return 0 @@ -30,4 +29,4 @@ func CPURateFromTickRate(tickRate float64) uint64 { func CPURate(ticks int64, dt uint64) uint64 { return CPURateFromTickRate(TickRate(ticks, dt)) -} \ No newline at end of file +} diff --git a/backend/pkg/messages/primitives.go b/backend/pkg/messages/primitives.go index 70952eeab..8687ef413 100644 --- a/backend/pkg/messages/primitives.go +++ b/backend/pkg/messages/primitives.go @@ -1,9 +1,9 @@ package messages import ( + "encoding/json" "errors" "io" - "encoding/json" "log" ) @@ -37,7 +37,7 @@ func ReadData(reader io.Reader) ([]byte, error) { } return p, nil } - + func ReadUint(reader io.Reader) (uint64, error) { var x uint64 var s uint @@ -152,4 +152,4 @@ func WriteJson(v interface{}, buf []byte, p int) int { return WriteString("null", buf, p) } return WriteData(data, buf, p) -} \ No newline at end of file +} diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index 31512c9c8..60f9c17a8 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -2,1430 +2,1430 @@ package messages import ( - "fmt" - "io" + "fmt" + "io" ) func ReadMessage(reader io.Reader) (Message, error) { - t, err := ReadUint(reader) - if err != nil { - return nil, err - } - switch t { - - case 80: - msg := &BatchMeta{meta: &meta{TypeID: 80}} - if msg.PageNo, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, nil - - case 0: - msg := &Timestamp{meta: &meta{TypeID: 0}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 1: - msg := &SessionStart{meta: &meta{TypeID: 1}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ProjectID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TrackerVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.RevID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserUUID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserAgent, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOS, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOSVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserBrowser, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserBrowserVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDevice, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceType, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceMemorySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UserDeviceHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UserCountry, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 2: - msg := &SessionDisconnect{meta: &meta{TypeID: 2}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 3: - msg := &SessionEnd{meta: &meta{TypeID: 3}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 4: - msg := &SetPageLocation{meta: &meta{TypeID: 4}} - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Referrer, err = ReadString(reader); err != nil { - return nil, err - } - if msg.NavigationStart, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 5: - msg := &SetViewportSize{meta: &meta{TypeID: 5}} - if msg.Width, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Height, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 6: - msg := &SetViewportScroll{meta: &meta{TypeID: 6}} - if msg.X, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, nil - - case 7: - msg := &CreateDocument{meta: &meta{TypeID: 7}} - - return msg, nil - - case 8: - msg := &CreateElementNode{meta: &meta{TypeID: 8}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.index, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Tag, err = ReadString(reader); err != nil { - return nil, err - } - if msg.SVG, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, nil - - case 9: - msg := &CreateTextNode{meta: &meta{TypeID: 9}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 10: - msg := &MoveNode{meta: &meta{TypeID: 10}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 11: - msg := &RemoveNode{meta: &meta{TypeID: 11}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 12: - msg := &SetNodeAttribute{meta: &meta{TypeID: 12}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 13: - msg := &RemoveNodeAttribute{meta: &meta{TypeID: 13}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 14: - msg := &SetNodeData{meta: &meta{TypeID: 14}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 15: - msg := &SetCSSData{meta: &meta{TypeID: 15}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 16: - msg := &SetNodeScroll{meta: &meta{TypeID: 16}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.X, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, nil - - case 17: - msg := &SetInputTarget{meta: &meta{TypeID: 17}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 18: - msg := &SetInputValue{meta: &meta{TypeID: 18}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Mask, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, nil - - case 19: - msg := &SetInputChecked{meta: &meta{TypeID: 19}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Checked, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, nil - - case 20: - msg := &MouseMove{meta: &meta{TypeID: 20}} - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 21: - msg := &MouseClickDepricated{meta: &meta{TypeID: 21}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 22: - msg := &ConsoleLog{meta: &meta{TypeID: 22}} - if msg.Level, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 23: - msg := &PageLoadTiming{meta: &meta{TypeID: 23}} - if msg.RequestStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 24: - msg := &PageRenderTiming{meta: &meta{TypeID: 24}} - if msg.SpeedIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 25: - msg := &JSException{meta: &meta{TypeID: 25}} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 26: - msg := &RawErrorEvent{meta: &meta{TypeID: 26}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Source, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 27: - msg := &RawCustomEvent{meta: &meta{TypeID: 27}} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 28: - msg := &UserID{meta: &meta{TypeID: 28}} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 29: - msg := &UserAnonymousID{meta: &meta{TypeID: 29}} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 30: - msg := &Metadata{meta: &meta{TypeID: 30}} - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 31: - msg := &PageEvent{meta: &meta{TypeID: 31}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Referrer, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Loaded, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.RequestStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.SpeedIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 32: - msg := &InputEvent{meta: &meta{TypeID: 32}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 33: - msg := &ClickEvent{meta: &meta{TypeID: 33}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Selector, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 34: - msg := &ErrorEvent{meta: &meta{TypeID: 34}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Source, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 35: - msg := &ResourceEvent{meta: &meta{TypeID: 35}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TTFB, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HeaderSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Success, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 36: - msg := &CustomEvent{meta: &meta{TypeID: 36}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 37: - msg := &CSSInsertRule{meta: &meta{TypeID: 37}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 38: - msg := &CSSDeleteRule{meta: &meta{TypeID: 38}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 39: - msg := &Fetch{meta: &meta{TypeID: 39}} - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Request, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 40: - msg := &Profiler{meta: &meta{TypeID: 40}} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Args, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Result, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 41: - msg := &OTable{meta: &meta{TypeID: 41}} - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 42: - msg := &StateAction{meta: &meta{TypeID: 42}} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 43: - msg := &StateActionEvent{meta: &meta{TypeID: 43}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 44: - msg := &Redux{meta: &meta{TypeID: 44}} - if msg.Action, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 45: - msg := &Vuex{meta: &meta{TypeID: 45}} - if msg.Mutation, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 46: - msg := &MobX{meta: &meta{TypeID: 46}} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 47: - msg := &NgRx{meta: &meta{TypeID: 47}} - if msg.Action, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 48: - msg := &GraphQL{meta: &meta{TypeID: 48}} - if msg.OperationKind, err = ReadString(reader); err != nil { - return nil, err - } - if msg.OperationName, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Variables, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 49: - msg := &PerformanceTrack{meta: &meta{TypeID: 49}} - if msg.Frames, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Ticks, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.TotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 50: - msg := &GraphQLEvent{meta: &meta{TypeID: 50}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.OperationKind, err = ReadString(reader); err != nil { - return nil, err - } - if msg.OperationName, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Variables, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 51: - msg := &FetchEvent{meta: &meta{TypeID: 51}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Request, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 52: - msg := &DOMDrop{meta: &meta{TypeID: 52}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 53: - msg := &ResourceTiming{meta: &meta{TypeID: 53}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TTFB, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HeaderSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Initiator, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 54: - msg := &ConnectionInformation{meta: &meta{TypeID: 54}} - if msg.Downlink, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 55: - msg := &SetPageVisibility{meta: &meta{TypeID: 55}} - if msg.hidden, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, nil - - case 56: - msg := &PerformanceTrackAggr{meta: &meta{TypeID: 56}} - if msg.TimestampStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 59: - msg := &LongTask{meta: &meta{TypeID: 59}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ContainerType, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ContainerSrc, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContainerId, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContainerName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 60: - msg := &SetNodeAttributeURLBased{meta: &meta{TypeID: 60}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 61: - msg := &SetCSSDataURLBased{meta: &meta{TypeID: 61}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 62: - msg := &IssueEvent{meta: &meta{TypeID: 62}} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContextString, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 63: - msg := &TechnicalInfo{meta: &meta{TypeID: 63}} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 64: - msg := &CustomIssue{meta: &meta{TypeID: 64}} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 65: - msg := &PageClose{meta: &meta{TypeID: 65}} - - return msg, nil - - case 66: - msg := &AssetCache{meta: &meta{TypeID: 66}} - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 67: - msg := &CSSInsertRuleURLBased{meta: &meta{TypeID: 67}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 69: - msg := &MouseClick{meta: &meta{TypeID: 69}} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Selector, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 70: - msg := &CreateIFrameDocument{meta: &meta{TypeID: 70}} - if msg.FrameID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 107: - msg := &IOSBatchMeta{meta: &meta{TypeID: 107}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstIndex, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 90: - msg := &IOSSessionStart{meta: &meta{TypeID: 90}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ProjectID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TrackerVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.RevID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserUUID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOS, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOSVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDevice, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceType, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserCountry, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 91: - msg := &IOSSessionEnd{meta: &meta{TypeID: 91}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 92: - msg := &IOSMetadata{meta: &meta{TypeID: 92}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 93: - msg := &IOSCustomEvent{meta: &meta{TypeID: 93}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 94: - msg := &IOSUserID{meta: &meta{TypeID: 94}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 95: - msg := &IOSUserAnonymousID{meta: &meta{TypeID: 95}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 96: - msg := &IOSScreenChanges{meta: &meta{TypeID: 96}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Width, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Height, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 97: - msg := &IOSCrash{meta: &meta{TypeID: 97}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Reason, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Stacktrace, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 98: - msg := &IOSScreenEnter{meta: &meta{TypeID: 98}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Title, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ViewName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 99: - msg := &IOSScreenLeave{meta: &meta{TypeID: 99}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Title, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ViewName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 100: - msg := &IOSClickEvent{meta: &meta{TypeID: 100}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 101: - msg := &IOSInputEvent{meta: &meta{TypeID: 101}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 102: - msg := &IOSPerformanceEvent{meta: &meta{TypeID: 102}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 103: - msg := &IOSLog{meta: &meta{TypeID: 103}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Severity, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Content, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 104: - msg := &IOSInternalError{meta: &meta{TypeID: 104}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Content, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - case 105: - msg := &IOSNetworkCall{meta: &meta{TypeID: 105}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Headers, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Body, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Success, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 110: - msg := &IOSPerformanceAggregated{meta: &meta{TypeID: 110}} - if msg.TimestampStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinBattery, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgBattery, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxBattery, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, nil - - case 111: - msg := &IOSIssueEvent{meta: &meta{TypeID: 111}} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContextString, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, nil - - } - return nil, fmt.Errorf("Unknown message code: %v", t) + t, err := ReadUint(reader) + if err != nil { + return nil, err + } + switch t { + + case 80: + msg := &BatchMeta{meta: &meta{TypeID: 80}} + if msg.PageNo, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, nil + + case 0: + msg := &Timestamp{meta: &meta{TypeID: 0}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 1: + msg := &SessionStart{meta: &meta{TypeID: 1}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ProjectID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TrackerVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.RevID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserUUID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserAgent, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOS, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOSVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserBrowser, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserBrowserVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDevice, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceType, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceMemorySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UserDeviceHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UserCountry, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 2: + msg := &SessionDisconnect{meta: &meta{TypeID: 2}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 3: + msg := &SessionEnd{meta: &meta{TypeID: 3}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 4: + msg := &SetPageLocation{meta: &meta{TypeID: 4}} + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Referrer, err = ReadString(reader); err != nil { + return nil, err + } + if msg.NavigationStart, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 5: + msg := &SetViewportSize{meta: &meta{TypeID: 5}} + if msg.Width, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Height, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 6: + msg := &SetViewportScroll{meta: &meta{TypeID: 6}} + if msg.X, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, nil + + case 7: + msg := &CreateDocument{meta: &meta{TypeID: 7}} + + return msg, nil + + case 8: + msg := &CreateElementNode{meta: &meta{TypeID: 8}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.index, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Tag, err = ReadString(reader); err != nil { + return nil, err + } + if msg.SVG, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, nil + + case 9: + msg := &CreateTextNode{meta: &meta{TypeID: 9}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 10: + msg := &MoveNode{meta: &meta{TypeID: 10}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 11: + msg := &RemoveNode{meta: &meta{TypeID: 11}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 12: + msg := &SetNodeAttribute{meta: &meta{TypeID: 12}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 13: + msg := &RemoveNodeAttribute{meta: &meta{TypeID: 13}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 14: + msg := &SetNodeData{meta: &meta{TypeID: 14}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 15: + msg := &SetCSSData{meta: &meta{TypeID: 15}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 16: + msg := &SetNodeScroll{meta: &meta{TypeID: 16}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.X, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, nil + + case 17: + msg := &SetInputTarget{meta: &meta{TypeID: 17}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 18: + msg := &SetInputValue{meta: &meta{TypeID: 18}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Mask, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, nil + + case 19: + msg := &SetInputChecked{meta: &meta{TypeID: 19}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Checked, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, nil + + case 20: + msg := &MouseMove{meta: &meta{TypeID: 20}} + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 21: + msg := &MouseClickDepricated{meta: &meta{TypeID: 21}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 22: + msg := &ConsoleLog{meta: &meta{TypeID: 22}} + if msg.Level, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 23: + msg := &PageLoadTiming{meta: &meta{TypeID: 23}} + if msg.RequestStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 24: + msg := &PageRenderTiming{meta: &meta{TypeID: 24}} + if msg.SpeedIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.VisuallyComplete, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimeToInteractive, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 25: + msg := &JSException{meta: &meta{TypeID: 25}} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 26: + msg := &RawErrorEvent{meta: &meta{TypeID: 26}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Source, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 27: + msg := &RawCustomEvent{meta: &meta{TypeID: 27}} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 28: + msg := &UserID{meta: &meta{TypeID: 28}} + if msg.ID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 29: + msg := &UserAnonymousID{meta: &meta{TypeID: 29}} + if msg.ID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 30: + msg := &Metadata{meta: &meta{TypeID: 30}} + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 31: + msg := &PageEvent{meta: &meta{TypeID: 31}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Referrer, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Loaded, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.RequestStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.SpeedIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.VisuallyComplete, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimeToInteractive, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 32: + msg := &InputEvent{meta: &meta{TypeID: 32}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ValueMasked, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 33: + msg := &ClickEvent{meta: &meta{TypeID: 33}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Selector, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 34: + msg := &ErrorEvent{meta: &meta{TypeID: 34}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Source, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 35: + msg := &ResourceEvent{meta: &meta{TypeID: 35}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TTFB, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HeaderSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Success, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 36: + msg := &CustomEvent{meta: &meta{TypeID: 36}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 37: + msg := &CSSInsertRule{meta: &meta{TypeID: 37}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 38: + msg := &CSSDeleteRule{meta: &meta{TypeID: 38}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 39: + msg := &Fetch{meta: &meta{TypeID: 39}} + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Request, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 40: + msg := &Profiler{meta: &meta{TypeID: 40}} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Args, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Result, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 41: + msg := &OTable{meta: &meta{TypeID: 41}} + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 42: + msg := &StateAction{meta: &meta{TypeID: 42}} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 43: + msg := &StateActionEvent{meta: &meta{TypeID: 43}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 44: + msg := &Redux{meta: &meta{TypeID: 44}} + if msg.Action, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 45: + msg := &Vuex{meta: &meta{TypeID: 45}} + if msg.Mutation, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 46: + msg := &MobX{meta: &meta{TypeID: 46}} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 47: + msg := &NgRx{meta: &meta{TypeID: 47}} + if msg.Action, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 48: + msg := &GraphQL{meta: &meta{TypeID: 48}} + if msg.OperationKind, err = ReadString(reader); err != nil { + return nil, err + } + if msg.OperationName, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Variables, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 49: + msg := &PerformanceTrack{meta: &meta{TypeID: 49}} + if msg.Frames, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Ticks, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.TotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 50: + msg := &GraphQLEvent{meta: &meta{TypeID: 50}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.OperationKind, err = ReadString(reader); err != nil { + return nil, err + } + if msg.OperationName, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Variables, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 51: + msg := &FetchEvent{meta: &meta{TypeID: 51}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Request, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 52: + msg := &DOMDrop{meta: &meta{TypeID: 52}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 53: + msg := &ResourceTiming{meta: &meta{TypeID: 53}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TTFB, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HeaderSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Initiator, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 54: + msg := &ConnectionInformation{meta: &meta{TypeID: 54}} + if msg.Downlink, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 55: + msg := &SetPageVisibility{meta: &meta{TypeID: 55}} + if msg.hidden, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, nil + + case 56: + msg := &PerformanceTrackAggr{meta: &meta{TypeID: 56}} + if msg.TimestampStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimestampEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 59: + msg := &LongTask{meta: &meta{TypeID: 59}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ContainerType, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ContainerSrc, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContainerId, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContainerName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 60: + msg := &SetNodeAttributeURLBased{meta: &meta{TypeID: 60}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 61: + msg := &SetCSSDataURLBased{meta: &meta{TypeID: 61}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 62: + msg := &IssueEvent{meta: &meta{TypeID: 62}} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContextString, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 63: + msg := &TechnicalInfo{meta: &meta{TypeID: 63}} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 64: + msg := &CustomIssue{meta: &meta{TypeID: 64}} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 65: + msg := &PageClose{meta: &meta{TypeID: 65}} + + return msg, nil + + case 66: + msg := &AssetCache{meta: &meta{TypeID: 66}} + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 67: + msg := &CSSInsertRuleURLBased{meta: &meta{TypeID: 67}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 69: + msg := &MouseClick{meta: &meta{TypeID: 69}} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Selector, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 70: + msg := &CreateIFrameDocument{meta: &meta{TypeID: 70}} + if msg.FrameID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 107: + msg := &IOSBatchMeta{meta: &meta{TypeID: 107}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstIndex, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 90: + msg := &IOSSessionStart{meta: &meta{TypeID: 90}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ProjectID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TrackerVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.RevID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserUUID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOS, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOSVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDevice, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceType, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserCountry, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 91: + msg := &IOSSessionEnd{meta: &meta{TypeID: 91}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 92: + msg := &IOSMetadata{meta: &meta{TypeID: 92}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 93: + msg := &IOSCustomEvent{meta: &meta{TypeID: 93}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 94: + msg := &IOSUserID{meta: &meta{TypeID: 94}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 95: + msg := &IOSUserAnonymousID{meta: &meta{TypeID: 95}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 96: + msg := &IOSScreenChanges{meta: &meta{TypeID: 96}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Width, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Height, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 97: + msg := &IOSCrash{meta: &meta{TypeID: 97}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Reason, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Stacktrace, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 98: + msg := &IOSScreenEnter{meta: &meta{TypeID: 98}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Title, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ViewName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 99: + msg := &IOSScreenLeave{meta: &meta{TypeID: 99}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Title, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ViewName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 100: + msg := &IOSClickEvent{meta: &meta{TypeID: 100}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 101: + msg := &IOSInputEvent{meta: &meta{TypeID: 101}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ValueMasked, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 102: + msg := &IOSPerformanceEvent{meta: &meta{TypeID: 102}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 103: + msg := &IOSLog{meta: &meta{TypeID: 103}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Severity, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Content, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 104: + msg := &IOSInternalError{meta: &meta{TypeID: 104}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Content, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + case 105: + msg := &IOSNetworkCall{meta: &meta{TypeID: 105}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Headers, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Body, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Success, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 110: + msg := &IOSPerformanceAggregated{meta: &meta{TypeID: 110}} + if msg.TimestampStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimestampEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinBattery, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgBattery, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxBattery, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, nil + + case 111: + msg := &IOSIssueEvent{meta: &meta{TypeID: 111}} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContextString, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, nil + + } + return nil, fmt.Errorf("Unknown message code: %v", t) } diff --git a/backend/pkg/redisstream/producer.go b/backend/pkg/redisstream/producer.go index e67200a4f..d5afc63b3 100644 --- a/backend/pkg/redisstream/producer.go +++ b/backend/pkg/redisstream/producer.go @@ -6,25 +6,24 @@ import ( "openreplay/backend/pkg/env" ) - type Producer struct { - redis *redis.Client - maxLenApprox int64 + redis *redis.Client + maxLenApprox int64 } func NewProducer() *Producer { return &Producer{ - redis: getRedisClient(), + redis: getRedisClient(), maxLenApprox: int64(env.Uint64("REDIS_STREAMS_MAX_LEN")), } } func (p *Producer) Produce(topic string, key uint64, value []byte) error { - args := &redis.XAddArgs{ + args := &redis.XAddArgs{ Stream: topic, Values: map[string]interface{}{ "sessionID": key, - "value": value, + "value": value, }, } args.MaxLenApprox = p.maxLenApprox @@ -35,7 +34,7 @@ func (p *Producer) Produce(topic string, key uint64, value []byte) error { } return nil } - + func (p *Producer) Close(_ int) { // noop } diff --git a/backend/pkg/redisstream/redis.go b/backend/pkg/redisstream/redis.go index dea4afe9b..7dba0b537 100644 --- a/backend/pkg/redisstream/redis.go +++ b/backend/pkg/redisstream/redis.go @@ -2,15 +2,13 @@ package redisstream import ( "log" - + "github.com/go-redis/redis" "openreplay/backend/pkg/env" ) - -var redisClient *redis.Client - +var redisClient *redis.Client func getRedisClient() *redis.Client { if redisClient != nil { @@ -23,4 +21,4 @@ func getRedisClient() *redis.Client { log.Fatalln(err) } return redisClient -} \ No newline at end of file +} diff --git a/backend/pkg/storage/s3.go b/backend/pkg/storage/s3.go index 0f55e3851..408dc1864 100644 --- a/backend/pkg/storage/s3.go +++ b/backend/pkg/storage/s3.go @@ -2,8 +2,8 @@ package storage import ( "io" - "strconv" "sort" + "strconv" _s3 "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" @@ -12,18 +12,17 @@ import ( ) type S3 struct { - uploader *s3manager.Uploader - svc *_s3.S3 - bucket *string + uploader *s3manager.Uploader + svc *_s3.S3 + bucket *string } - func NewS3(region string, bucket string) *S3 { sess := env.AWSSessionOnRegion(region) return &S3{ uploader: s3manager.NewUploader(sess), - svc: _s3.New(sess), // AWS Docs: "These clients are safe to use concurrently." - bucket: &bucket, + svc: _s3.New(sess), // AWS Docs: "These clients are safe to use concurrently." + bucket: &bucket, } } @@ -35,14 +34,14 @@ func (s3 *S3) Upload(reader io.Reader, key string, contentType string, gzipped b contentEncoding = &gzipStr } _, err := s3.uploader.Upload(&s3manager.UploadInput{ - Body: reader, - Bucket: s3.bucket, - Key: &key, - ContentType: &contentType, - CacheControl: &cacheControl, + Body: reader, + Bucket: s3.bucket, + Key: &key, + ContentType: &contentType, + CacheControl: &cacheControl, ContentEncoding: contentEncoding, - }) - return err + }) + return err } func (s3 *S3) Get(key string) (io.ReadCloser, error) { @@ -67,8 +66,8 @@ func (s3 *S3) Exists(key string) bool { return false } - const MAX_RETURNING_COUNT = 40 + func (s3 *S3) GetFrequentlyUsedKeys(projectID uint64) ([]string, error) { prefix := strconv.FormatUint(projectID, 10) + "/" output, err := s3.svc.ListObjectsV2(&_s3.ListObjectsV2Input{ @@ -82,7 +81,7 @@ func (s3 *S3) GetFrequentlyUsedKeys(projectID uint64) ([]string, error) { list := output.Contents max := len(list) - if (max > MAX_RETURNING_COUNT) { + if max > MAX_RETURNING_COUNT { max = MAX_RETURNING_COUNT sort.Slice(list, func(i, j int) bool { return list[i].LastModified.After(*(list[j].LastModified)) @@ -91,8 +90,8 @@ func (s3 *S3) GetFrequentlyUsedKeys(projectID uint64) ([]string, error) { var keyList []string s := len(prefix) - for _, obj := range list[:max] { - keyList = append(keyList, (*obj.Key)[s:]) - } - return keyList, nil -} \ No newline at end of file + for _, obj := range list[:max] { + keyList = append(keyList, (*obj.Key)[s:]) + } + return keyList, nil +} diff --git a/backend/pkg/url/assets/css.go b/backend/pkg/url/assets/css.go index 3bd486bc7..dda8755d7 100644 --- a/backend/pkg/url/assets/css.go +++ b/backend/pkg/url/assets/css.go @@ -39,7 +39,7 @@ func unquote(str string) (string, string) { } func ExtractURLsFromCSS(css string) []string { - indexes := cssUrlsIndex(css) + indexes := cssUrlsIndex(css) urls := make([]string, len(indexes)) for _, idx := range indexes { diff --git a/backend/pkg/url/method.go b/backend/pkg/url/method.go index e7dd9eb49..31e654fde 100644 --- a/backend/pkg/url/method.go +++ b/backend/pkg/url/method.go @@ -1,12 +1,12 @@ package url -var METHODS = []string{ "GET", "HEAD", "POST" , "PUT" , "DELETE" , "CONNECT" , "OPTIONS" , "TRACE" , "PATCH" } +var METHODS = []string{"GET", "HEAD", "POST", "PUT", "DELETE", "CONNECT", "OPTIONS", "TRACE", "PATCH"} func EnsureMethod(method string) string { for _, m := range METHODS { - if m == method { - return method - } + if m == method { + return method + } } return "" -} \ No newline at end of file +} diff --git a/backend/services/assets/jsexception.go b/backend/services/assets/jsexception.go index ce5852bd5..c0b26e0db 100644 --- a/backend/services/assets/jsexception.go +++ b/backend/services/assets/jsexception.go @@ -1,16 +1,14 @@ -package main +package main import ( "encoding/json" "strings" ) - type frame struct { FileName string `json:"fileName"` } - func extractJSExceptionSources(payload *string) ([]string, error) { var frameList []frame err := json.Unmarshal([]byte(*payload), &frameList) @@ -25,8 +23,8 @@ func extractJSExceptionSources(payload *string) ([]string, error) { fn := strings.Split(f.FileName, "?")[0] if strings.HasPrefix(fn, "http") && !presentedFileName[fn] { fileNamesList = append(fileNamesList, f.FileName) - presentedFileName[fn] = true + presentedFileName[fn] = true } } return fileNamesList, nil -} \ No newline at end of file +} diff --git a/backend/services/assets/main.go b/backend/services/assets/main.go index 664dc5b09..259918395 100644 --- a/backend/services/assets/main.go +++ b/backend/services/assets/main.go @@ -66,6 +66,7 @@ func main() { os.Exit(0) case err := <-cacher.Errors: log.Printf("Error while caching: %v", err) + // TODO: notify user case <-tick: cacher.UpdateTimeouts() default: diff --git a/backend/services/db/heuristics/anr.go b/backend/services/db/heuristics/anr.go index 266f882f9..0475b00be 100644 --- a/backend/services/db/heuristics/anr.go +++ b/backend/services/db/heuristics/anr.go @@ -1,23 +1,22 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" + . "openreplay/backend/pkg/messages" ) - const MIN_TIME_AFTER_LAST_HEARTBEAT = 60 * 1000 type anr struct { readyMessageStore - lastLabel string + lastLabel string lastHeartbeatTimestamp uint64 - lastHeartbeatIndex uint64 + lastHeartbeatIndex uint64 } func (h *anr) buildIf(timestamp uint64) { - if h.lastHeartbeatTimestamp != 0 && h.lastHeartbeatTimestamp + MIN_TIME_AFTER_LAST_HEARTBEAT <= timestamp { + if h.lastHeartbeatTimestamp != 0 && h.lastHeartbeatTimestamp+MIN_TIME_AFTER_LAST_HEARTBEAT <= timestamp { m := &IOSIssueEvent{ - Type: "anr", + Type: "anr", ContextString: h.lastLabel, //Context: "{}", //Payload: fmt.SPrint @@ -49,4 +48,4 @@ func (h *anr) HandleMessage(msg Message) { case *IOSSessionEnd: h.buildIf(m.Timestamp) } -} \ No newline at end of file +} diff --git a/backend/services/db/heuristics/clickrage.go b/backend/services/db/heuristics/clickrage.go index 4dc86ee65..9a1db92d5 100644 --- a/backend/services/db/heuristics/clickrage.go +++ b/backend/services/db/heuristics/clickrage.go @@ -1,26 +1,25 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" + . "openreplay/backend/pkg/messages" ) - const CLICK_TIME_DIFF = 200 const MIN_CLICKS_IN_A_ROW = 3 type clickrage struct { readyMessageStore - lastTimestamp uint64 - lastLabel string + lastTimestamp uint64 + lastLabel string firstInARawTimestamp uint64 - firstInARawSeqIndex uint64 - countsInARow int + firstInARawSeqIndex uint64 + countsInARow int } func (h *clickrage) build() { if h.countsInARow >= MIN_CLICKS_IN_A_ROW { m := &IOSIssueEvent{ - Type: "click_rage", + Type: "click_rage", ContextString: h.lastLabel, //Context: "{}", //Payload: fmt.SPrint @@ -39,7 +38,7 @@ func (h *clickrage) build() { func (h *clickrage) HandleMessage(msg Message) { switch m := msg.(type) { case *IOSClickEvent: - if h.lastTimestamp + CLICK_TIME_DIFF < m.Timestamp && h.lastLabel == m.Label { + if h.lastTimestamp+CLICK_TIME_DIFF < m.Timestamp && h.lastLabel == m.Label { h.lastTimestamp = m.Timestamp h.countsInARow += 1 return @@ -55,4 +54,4 @@ func (h *clickrage) HandleMessage(msg Message) { case *IOSSessionEnd: h.build() } -} \ No newline at end of file +} diff --git a/backend/services/db/heuristics/heuristics.go b/backend/services/db/heuristics/heuristics.go index 7832e0a82..677574951 100644 --- a/backend/services/db/heuristics/heuristics.go +++ b/backend/services/db/heuristics/heuristics.go @@ -1,8 +1,8 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" - . "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/messages" ) type MessageHandler interface { @@ -19,7 +19,6 @@ type Handler interface { type mainHandler map[uint64]*sessHandler - func NewHandler() mainHandler { return make(mainHandler) } @@ -43,8 +42,10 @@ func (m mainHandler) HandleMessage(session *Session, msg Message) { } func (m mainHandler) IterateSessionReadyMessages(sessionID uint64, iter func(msg Message)) { - s, ok := m[ sessionID ] - if !ok { return } + s, ok := m[sessionID] + if !ok { + return + } s.IterateReadyMessages(iter) if s.IsEnded() { delete(m, sessionID) @@ -61,5 +62,3 @@ func (m mainHandler) IterateReadyMessages(iter func(sessionID uint64, msg Messag } } } - - diff --git a/backend/services/db/heuristics/performance.go b/backend/services/db/heuristics/performance.go index 931d831e6..c7494a793 100644 --- a/backend/services/db/heuristics/performance.go +++ b/backend/services/db/heuristics/performance.go @@ -1,31 +1,30 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" + . "openreplay/backend/pkg/messages" ) - const AGGR_TIME = 15 * 60 * 1000 - type valueAggregator struct { - sum float64 + sum float64 count float64 } + func (va *valueAggregator) aggregate() uint64 { if va.count == 0 { return 0 } - return uint64(va.sum/va.count) + return uint64(va.sum / va.count) } type performanceAggregator struct { readyMessageStore - pa *IOSPerformanceAggregated - fps valueAggregator - cpu valueAggregator - memory valueAggregator - battery valueAggregator + pa *IOSPerformanceAggregated + fps valueAggregator + cpu valueAggregator + memory valueAggregator + battery valueAggregator } func (h *performanceAggregator) build(timestamp uint64) { @@ -56,7 +55,7 @@ func (h *performanceAggregator) HandleMessage(msg Message) { if h.pa.TimestampStart == 0 { h.pa.TimestampStart = m.Timestamp } - if h.pa.TimestampStart + AGGR_TIME <= m.Timestamp { + if h.pa.TimestampStart+AGGR_TIME <= m.Timestamp { h.build(m.Timestamp) } switch m.Name { @@ -96,8 +95,8 @@ func (h *performanceAggregator) HandleMessage(msg Message) { if m.Value > h.pa.MaxBattery { h.pa.MaxBattery = m.Value } - } + } case *IOSSessionEnd: h.build(m.Timestamp) } -} \ No newline at end of file +} diff --git a/backend/services/db/heuristics/readyMessageStore.go b/backend/services/db/heuristics/readyMessageStore.go index 9c619e20b..bbe77585d 100644 --- a/backend/services/db/heuristics/readyMessageStore.go +++ b/backend/services/db/heuristics/readyMessageStore.go @@ -1,10 +1,9 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" + . "openreplay/backend/pkg/messages" ) - type readyMessageStore struct { store []Message } @@ -18,4 +17,4 @@ func (s *readyMessageStore) IterateReadyMessages(cb func(msg Message)) { cb(msg) } s.store = nil -} \ No newline at end of file +} diff --git a/backend/services/db/heuristics/session.go b/backend/services/db/heuristics/session.go index d828ca478..3946bf918 100644 --- a/backend/services/db/heuristics/session.go +++ b/backend/services/db/heuristics/session.go @@ -1,18 +1,16 @@ package heuristics import ( - . "openreplay/backend/pkg/messages" - . "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/messages" ) - type sessHandler struct { - session *Session + session *Session handlers []Handler - ended bool + ended bool } - func newSessHandler(session *Session) *sessHandler { return &sessHandler{ session: session, @@ -44,4 +42,4 @@ func (s *sessHandler) IterateReadyMessages(cb func(msg Message)) { func (s *sessHandler) IsEnded() bool { return s.ended -} \ No newline at end of file +} diff --git a/backend/services/db/stats.go b/backend/services/db/stats.go index 81abf1b91..2c3a5da38 100644 --- a/backend/services/db/stats.go +++ b/backend/services/db/stats.go @@ -1,25 +1,23 @@ package main import ( - - . "openreplay/backend/pkg/messages" . "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/messages" ) func initStats() { - // noop + // noop } - func insertStats(session *Session, msg Message) error { switch m := msg.(type) { - // Web - case *PerformanceTrackAggr: - return pg.InsertWebStatsPerformance(session.SessionID, m) - case *ResourceEvent: - return pg.InsertWebStatsResourceEvent(session.SessionID, m) - case *LongTask: - return pg.InsertWebStatsLongtask(session.SessionID, m) + // Web + case *PerformanceTrackAggr: + return pg.InsertWebStatsPerformance(session.SessionID, m) + case *ResourceEvent: + return pg.InsertWebStatsResourceEvent(session.SessionID, m) + case *LongTask: + return pg.InsertWebStatsLongtask(session.SessionID, m) // IOS // case *IOSPerformanceAggregated: diff --git a/backend/services/ender/builder/builderMap.go b/backend/services/ender/builder/builderMap.go index 6ab3c3ac7..3f3e4d6e3 100644 --- a/backend/services/ender/builder/builderMap.go +++ b/backend/services/ender/builder/builderMap.go @@ -6,7 +6,6 @@ import ( type builderMap map[uint64]*builder - func NewBuilderMap() builderMap { return make(builderMap) } @@ -28,8 +27,10 @@ func (m builderMap) HandleMessage(sessionID uint64, msg Message, messageID uint6 } func (m builderMap) IterateSessionReadyMessages(sessionID uint64, operatingTs int64, iter func(msg Message)) { - b, ok := m[ sessionID ] - if !ok { return } + b, ok := m[sessionID] + if !ok { + return + } sessionEnded := b.checkTimeouts(operatingTs) b.iterateReadyMessage(iter) if sessionEnded { @@ -48,5 +49,3 @@ func (m builderMap) IterateReadyMessages(operatingTs int64, iter func(sessionID } } } - - diff --git a/backend/services/ender/builder/clikRageDetector.go b/backend/services/ender/builder/clikRageDetector.go index 116d57071..f25efbcd9 100644 --- a/backend/services/ender/builder/clikRageDetector.go +++ b/backend/services/ender/builder/clikRageDetector.go @@ -1,34 +1,32 @@ package builder import ( - "encoding/json" + "encoding/json" . "openreplay/backend/pkg/messages" ) - const CLICK_TIME_DIFF = 300 const MIN_CLICKS_IN_A_ROW = 3 type clickRageDetector struct { - lastTimestamp uint64 - lastLabel string + lastTimestamp uint64 + lastLabel string firstInARawTimestamp uint64 firstInARawMessageId uint64 - countsInARow int + countsInARow int } - func (crd *clickRageDetector) Build() *IssueEvent { var i *IssueEvent if crd.countsInARow >= MIN_CLICKS_IN_A_ROW { - payload, _ := json.Marshal(struct{Count int }{crd.countsInARow,}) + payload, _ := json.Marshal(struct{ Count int }{crd.countsInARow}) i = &IssueEvent{ - Type: "click_rage", + Type: "click_rage", ContextString: crd.lastLabel, - Payload: string(payload), // TODO: json encoder - Timestamp: crd.firstInARawTimestamp, - MessageID: crd.firstInARawMessageId, + Payload: string(payload), // TODO: json encoder + Timestamp: crd.firstInARawTimestamp, + MessageID: crd.firstInARawMessageId, } } crd.lastTimestamp = 0 @@ -39,8 +37,8 @@ func (crd *clickRageDetector) Build() *IssueEvent { return i } -func (crd *clickRageDetector) HandleMouseClick(msg *MouseClick, messageID uint64, timestamp uint64) *IssueEvent { - if crd.lastTimestamp + CLICK_TIME_DIFF > timestamp && crd.lastLabel == msg.Label { +func (crd *clickRageDetector) HandleMouseClick(msg *MouseClick, messageID uint64, timestamp uint64) *IssueEvent { + if crd.lastTimestamp+CLICK_TIME_DIFF > timestamp && crd.lastLabel == msg.Label { crd.lastTimestamp = timestamp crd.countsInARow += 1 return nil @@ -54,4 +52,4 @@ func (crd *clickRageDetector) HandleMouseClick(msg *MouseClick, messageID uint6 crd.countsInARow = 1 } return i -} \ No newline at end of file +} diff --git a/backend/services/ender/builder/cpuIssueFinder.go b/backend/services/ender/builder/cpuIssueFinder.go index be02c280f..1af867ea3 100644 --- a/backend/services/ender/builder/cpuIssueFinder.go +++ b/backend/services/ender/builder/cpuIssueFinder.go @@ -3,20 +3,19 @@ package builder import ( "encoding/json" - "openreplay/backend/pkg/messages/performance" . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/messages/performance" ) -const CPU_THRESHOLD = 70 // % out of 100 +const CPU_THRESHOLD = 70 // % out of 100 const CPU_MIN_DURATION_TRIGGER = 6 * 1000 - type cpuIssueFinder struct { startTimestamp uint64 startMessageID uint64 - lastTimestamp uint64 - maxRate uint64 - contextString string + lastTimestamp uint64 + maxRate uint64 + contextString string } func (f *cpuIssueFinder) Build() *IssueEvent { @@ -35,16 +34,16 @@ func (f *cpuIssueFinder) Build() *IssueEvent { return nil } - payload, _ := json.Marshal(struct{ + payload, _ := json.Marshal(struct { Duration uint64 - Rate uint64 - }{duration,maxRate}) + Rate uint64 + }{duration, maxRate}) return &IssueEvent{ - Type: "cpu", - Timestamp: timestamp, - MessageID: messageID, + Type: "cpu", + Timestamp: timestamp, + MessageID: messageID, ContextString: f.contextString, - Payload: string(payload), + Payload: string(payload), } } @@ -52,8 +51,6 @@ func (f *cpuIssueFinder) HandleSetPageLocation(msg *SetPageLocation) { f.contextString = msg.URL } - - func (f *cpuIssueFinder) HandlePerformanceTrack(msg *PerformanceTrack, messageID uint64, timestamp uint64) *IssueEvent { dt := performance.TimeDiff(timestamp, f.lastTimestamp) if dt == 0 { @@ -82,5 +79,3 @@ func (f *cpuIssueFinder) HandlePerformanceTrack(msg *PerformanceTrack, messageID return nil } - - diff --git a/backend/services/ender/builder/deadClickDetector.go b/backend/services/ender/builder/deadClickDetector.go index 725b025cb..de977b7bd 100644 --- a/backend/services/ender/builder/deadClickDetector.go +++ b/backend/services/ender/builder/deadClickDetector.go @@ -4,25 +4,23 @@ import ( . "openreplay/backend/pkg/messages" ) - const CLICK_RELATION_TIME = 1400 type deadClickDetector struct { - lastMouseClick *MouseClick - lastTimestamp uint64 - lastMessageID uint64 - inputIDSet map[uint64]bool + lastMouseClick *MouseClick + lastTimestamp uint64 + lastMessageID uint64 + inputIDSet map[uint64]bool } - func (d *deadClickDetector) HandleReaction(timestamp uint64) *IssueEvent { var i *IssueEvent - if d.lastMouseClick != nil && d.lastTimestamp + CLICK_RELATION_TIME < timestamp { + if d.lastMouseClick != nil && d.lastTimestamp+CLICK_RELATION_TIME < timestamp { i = &IssueEvent{ - Type: "dead_click", + Type: "dead_click", ContextString: d.lastMouseClick.Label, - Timestamp: d.lastTimestamp, - MessageID: d.lastMessageID, + Timestamp: d.lastTimestamp, + MessageID: d.lastMessageID, } } d.inputIDSet = nil @@ -53,8 +51,8 @@ func (d *deadClickDetector) HandleMessage(msg Message, messageID uint64, timesta d.lastMouseClick = m d.lastTimestamp = timestamp d.lastMessageID = messageID - case *SetNodeAttribute, - *RemoveNodeAttribute, + case *SetNodeAttribute, + *RemoveNodeAttribute, *CreateElementNode, *CreateTextNode, *MoveNode, @@ -66,5 +64,3 @@ func (d *deadClickDetector) HandleMessage(msg Message, messageID uint64, timesta } return i } - - diff --git a/backend/services/ender/builder/domDropDetector.go b/backend/services/ender/builder/domDropDetector.go index 3366a0163..3643038c1 100644 --- a/backend/services/ender/builder/domDropDetector.go +++ b/backend/services/ender/builder/domDropDetector.go @@ -4,14 +4,13 @@ import ( . "openreplay/backend/pkg/messages" ) - type domDropDetector struct { - removedCount int + removedCount int lastDropTimestamp uint64 } -const DROP_WINDOW = 200 //ms -const CRITICAL_COUNT = 1 // Our login page contains 20. But on crush it removes only roots (1-3 nodes). +const DROP_WINDOW = 200 //ms +const CRITICAL_COUNT = 1 // Our login page contains 20. But on crush it removes only roots (1-3 nodes). func (dd *domDropDetector) HandleNodeCreation() { dd.removedCount = 0 @@ -19,7 +18,7 @@ func (dd *domDropDetector) HandleNodeCreation() { } func (dd *domDropDetector) HandleNodeRemoval(ts uint64) { - if dd.lastDropTimestamp + DROP_WINDOW > ts { + if dd.lastDropTimestamp+DROP_WINDOW > ts { dd.removedCount += 1 } else { dd.removedCount = 1 @@ -27,7 +26,6 @@ func (dd *domDropDetector) HandleNodeRemoval(ts uint64) { dd.lastDropTimestamp = ts } - func (dd *domDropDetector) Build() *DOMDrop { var domDrop *DOMDrop if dd.removedCount >= CRITICAL_COUNT { @@ -39,4 +37,3 @@ func (dd *domDropDetector) Build() *DOMDrop { dd.lastDropTimestamp = 0 return domDrop } - diff --git a/backend/services/ender/builder/inputEventBuilder.go b/backend/services/ender/builder/inputEventBuilder.go index 98c7ebaf6..ce1b710ca 100644 --- a/backend/services/ender/builder/inputEventBuilder.go +++ b/backend/services/ender/builder/inputEventBuilder.go @@ -7,9 +7,9 @@ import ( type inputLabels map[uint64]string type inputEventBuilder struct { - inputEvent *InputEvent - inputLabels inputLabels - inputID uint64 + inputEvent *InputEvent + inputLabels inputLabels + inputID uint64 } func NewInputEventBuilder() *inputEventBuilder { @@ -18,7 +18,6 @@ func NewInputEventBuilder() *inputEventBuilder { return ieBuilder } - func (b *inputEventBuilder) ClearLabels() { b.inputLabels = make(inputLabels) } @@ -57,11 +56,11 @@ func (b *inputEventBuilder) HasInstance() bool { return b.inputEvent != nil } -func (b * inputEventBuilder) GetTimestamp() uint64 { +func (b *inputEventBuilder) GetTimestamp() uint64 { if b.inputEvent == nil { return 0 } - return b.inputEvent.Timestamp; + return b.inputEvent.Timestamp } func (b *inputEventBuilder) Build() *InputEvent { diff --git a/backend/services/ender/builder/memoryIssueFinder.go b/backend/services/ender/builder/memoryIssueFinder.go index a2702e505..0d6d71420 100644 --- a/backend/services/ender/builder/memoryIssueFinder.go +++ b/backend/services/ender/builder/memoryIssueFinder.go @@ -1,21 +1,21 @@ package builder import ( - "math" "encoding/json" - + "math" + . "openreplay/backend/pkg/messages" ) const MIN_COUNT = 3 -const MEM_RATE_THRESHOLD = 300 // % to average +const MEM_RATE_THRESHOLD = 300 // % to average type memoryIssueFinder struct { startMessageID uint64 startTimestamp uint64 rate int count float64 - sum float64 + sum float64 contextString string } @@ -23,13 +23,13 @@ func (f *memoryIssueFinder) Build() *IssueEvent { if f.startTimestamp == 0 { return nil } - payload, _ := json.Marshal(struct{Rate int }{f.rate - 100,}) + payload, _ := json.Marshal(struct{ Rate int }{f.rate - 100}) i := &IssueEvent{ - Type: "memory", - Timestamp: f.startTimestamp, - MessageID: f.startMessageID, + Type: "memory", + Timestamp: f.startTimestamp, + MessageID: f.startMessageID, ContextString: f.contextString, - Payload: string(payload), + Payload: string(payload), } f.startTimestamp = 0 f.startMessageID = 0 @@ -48,8 +48,8 @@ func (f *memoryIssueFinder) HandlePerformanceTrack(msg *PerformanceTrack, messag return nil } - average := f.sum/f.count - rate := int(math.Round(float64(msg.UsedJSHeapSize)/average * 100)) + average := f.sum / f.count + rate := int(math.Round(float64(msg.UsedJSHeapSize) / average * 100)) f.sum += float64(msg.UsedJSHeapSize) f.count++ @@ -68,5 +68,3 @@ func (f *memoryIssueFinder) HandlePerformanceTrack(msg *PerformanceTrack, messag return nil } - - diff --git a/backend/services/ender/builder/pageEventBuilder.go b/backend/services/ender/builder/pageEventBuilder.go index db602a996..2b0665894 100644 --- a/backend/services/ender/builder/pageEventBuilder.go +++ b/backend/services/ender/builder/pageEventBuilder.go @@ -5,8 +5,8 @@ import ( ) type pageEventBuilder struct { - pageEvent *PageEvent - firstTimingHandled bool + pageEvent *PageEvent + firstTimingHandled bool } func (b *pageEventBuilder) buildIfTimingsComplete() *PageEvent { @@ -28,7 +28,7 @@ func (b *pageEventBuilder) HandleSetPageLocation(msg *SetPageLocation, messageID } } -func (b * pageEventBuilder) HandlePageLoadTiming(msg *PageLoadTiming) *PageEvent { +func (b *pageEventBuilder) HandlePageLoadTiming(msg *PageLoadTiming) *PageEvent { if !b.HasInstance() { return nil } @@ -62,7 +62,7 @@ func (b * pageEventBuilder) HandlePageLoadTiming(msg *PageLoadTiming) *PageEvent return b.buildIfTimingsComplete() } -func (b * pageEventBuilder) HandlePageRenderTiming(msg *PageRenderTiming) *PageEvent { +func (b *pageEventBuilder) HandlePageRenderTiming(msg *PageRenderTiming) *PageEvent { if !b.HasInstance() { return nil } @@ -76,16 +76,16 @@ func (b *pageEventBuilder) HasInstance() bool { return b.pageEvent != nil } -func (b * pageEventBuilder) GetTimestamp() uint64 { +func (b *pageEventBuilder) GetTimestamp() uint64 { if b.pageEvent == nil { return 0 } - return b.pageEvent.Timestamp; + return b.pageEvent.Timestamp } -func (b * pageEventBuilder) Build() *PageEvent { +func (b *pageEventBuilder) Build() *PageEvent { pageEvent := b.pageEvent b.pageEvent = nil b.firstTimingHandled = false return pageEvent -} \ No newline at end of file +} diff --git a/backend/services/ender/builder/performanceTrackAggrBuilder.go b/backend/services/ender/builder/performanceTrackAggrBuilder.go index b24090ff9..70b751f55 100644 --- a/backend/services/ender/builder/performanceTrackAggrBuilder.go +++ b/backend/services/ender/builder/performanceTrackAggrBuilder.go @@ -3,22 +3,20 @@ package builder import ( "math" - "openreplay/backend/pkg/messages/performance" . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/messages/performance" ) - type performanceTrackAggrBuilder struct { - performanceTrackAggr *PerformanceTrackAggr - lastTimestamp uint64 - count float64 - sumFrameRate float64 - sumTickRate float64 - sumTotalJSHeapSize float64 - sumUsedJSHeapSize float64 + performanceTrackAggr *PerformanceTrackAggr + lastTimestamp uint64 + count float64 + sumFrameRate float64 + sumTickRate float64 + sumTotalJSHeapSize float64 + sumUsedJSHeapSize float64 } - func (b *performanceTrackAggrBuilder) start(timestamp uint64) { b.performanceTrackAggr = &PerformanceTrackAggr{ TimestampStart: timestamp, @@ -39,7 +37,7 @@ func (b *performanceTrackAggrBuilder) HandlePerformanceTrack(msg *PerformanceTra } frameRate := performance.FrameRate(msg.Frames, dt) - tickRate := performance.TickRate(msg.Ticks, dt) + tickRate := performance.TickRate(msg.Ticks, dt) fps := uint64(math.Round(frameRate)) cpu := performance.CPURateFromTickRate(tickRate) @@ -84,7 +82,7 @@ func (b *performanceTrackAggrBuilder) GetStartTimestamp() uint64 { if b.performanceTrackAggr == nil { return 0 } - return b.performanceTrackAggr.TimestampStart; + return b.performanceTrackAggr.TimestampStart } func (b *performanceTrackAggrBuilder) Build() *PerformanceTrackAggr { @@ -106,4 +104,3 @@ func (b *performanceTrackAggrBuilder) Build() *PerformanceTrackAggr { b.lastTimestamp = 0 return performanceTrackAggr } - diff --git a/backend/services/http/assets.go b/backend/services/http/assets.go index cc055087a..b6ac61186 100644 --- a/backend/services/http/assets.go +++ b/backend/services/http/assets.go @@ -1,8 +1,8 @@ package main import ( - "openreplay/backend/pkg/url/assets" "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/url/assets" ) func sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) { @@ -33,4 +33,4 @@ func handleCSS(sessionID uint64, baseURL string, css string) string { return rewriter.RewriteCSS(sessionID, baseURL, css) } return assets.ResolveCSS(baseURL, css) -} \ No newline at end of file +} diff --git a/backend/services/http/handlers-depricated.go b/backend/services/http/handlers-depricated.go index 85f0393b7..06ab7d0f9 100644 --- a/backend/services/http/handlers-depricated.go +++ b/backend/services/http/handlers-depricated.go @@ -1 +1 @@ -package main \ No newline at end of file +package main diff --git a/backend/services/http/ios-device.go b/backend/services/http/ios-device.go index bec1f3b36..6a09e5e07 100644 --- a/backend/services/http/ios-device.go +++ b/backend/services/http/ios-device.go @@ -1,138 +1,138 @@ package main import ( - "strings" + "strings" ) func MapIOSDevice(identifier string) string { - switch identifier { - case "iPod5,1": - return "iPod touch (5th generation)" - case "iPod7,1": - return "iPod touch (6th generation)" - case "iPod9,1": - return "iPod touch (7th generation)" - case "iPhone3,1", "iPhone3,2", "iPhone3,3": - return "iPhone 4" - case "iPhone4,1": - return "iPhone 4s" - case "iPhone5,1", "iPhone5,2": - return "iPhone 5" - case "iPhone5,3", "iPhone5,4": - return "iPhone 5c" - case "iPhone6,1", "iPhone6,2": - return "iPhone 5s" - case "iPhone7,2": - return "iPhone 6" - case "iPhone7,1": - return "iPhone 6 Plus" - case "iPhone8,1": - return "iPhone 6s" - case "iPhone8,2": - return "iPhone 6s Plus" - case "iPhone8,4": - return "iPhone SE" - case "iPhone9,1", "iPhone9,3": - return "iPhone 7" - case "iPhone9,2", "iPhone9,4": - return "iPhone 7 Plus" - case "iPhone10,1", "iPhone10,4": - return "iPhone 8" - case "iPhone10,2", "iPhone10,5": - return "iPhone 8 Plus" - case "iPhone10,3", "iPhone10,6": - return "iPhone X" - case "iPhone11,2": - return "iPhone XS" - case "iPhone11,4", "iPhone11,6": - return "iPhone XS Max" - case "iPhone11,8": - return "iPhone XR" - case "iPhone12,1": - return "iPhone 11" - case "iPhone12,3": - return "iPhone 11 Pro" - case "iPhone12,5": - return "iPhone 11 Pro Max" - case "iPhone12,8": - return "iPhone SE (2nd generation)" - case "iPhone13,1": - return "iPhone 12 mini" - case "iPhone13,2": - return "iPhone 12" - case "iPhone13,3": - return "iPhone 12 Pro" - case "iPhone13,4": - return "iPhone 12 Pro Max" - case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4": - return "iPad 2" - case "iPad3,1", "iPad3,2", "iPad3,3": - return "iPad (3rd generation)" - case "iPad3,4", "iPad3,5", "iPad3,6": - return "iPad (4th generation)" - case "iPad6,11", "iPad6,12": - return "iPad (5th generation)" - case "iPad7,5", "iPad7,6": - return "iPad (6th generation)" - case "iPad7,11", "iPad7,12": - return "iPad (7th generation)" - case "iPad11,6", "iPad11,7": - return "iPad (8th generation)" - case "iPad4,1", "iPad4,2", "iPad4,3": - return "iPad Air" - case "iPad5,3", "iPad5,4": - return "iPad Air 2" - case "iPad11,3", "iPad11,4": - return "iPad Air (3rd generation)" - case "iPad13,1", "iPad13,2": - return "iPad Air (4th generation)" - case "iPad2,5", "iPad2,6", "iPad2,7": - return "iPad mini" - case "iPad4,4", "iPad4,5", "iPad4,6": - return "iPad mini 2" - case "iPad4,7", "iPad4,8", "iPad4,9": - return "iPad mini 3" - case "iPad5,1", "iPad5,2": - return "iPad mini 4" - case "iPad11,1", "iPad11,2": - return "iPad mini (5th generation)" - case "iPad6,3", "iPad6,4": - return "iPad Pro (9.7-inch)" - case "iPad7,3", "iPad7,4": - return "iPad Pro (10.5-inch)" - case "iPad8,1", "iPad8,2", "iPad8,3", "iPad8,4": - return "iPad Pro (11-inch) (1st generation)" - case "iPad8,9", "iPad8,10": - return "iPad Pro (11-inch) (2nd generation)" - case "iPad6,7", "iPad6,8": - return "iPad Pro (12.9-inch) (1st generation)" - case "iPad7,1", "iPad7,2": - return "iPad Pro (12.9-inch) (2nd generation)" - case "iPad8,5", "iPad8,6", "iPad8,7", "iPad8,8": - return "iPad Pro (12.9-inch) (3rd generation)" - case "iPad8,11", "iPad8,12": - return "iPad Pro (12.9-inch) (4th generation)" - case "AppleTV5,3": - return "Apple TV" - case "AppleTV6,2": - return "Apple TV 4K" - case "AudioAccessory1,1": - return "HomePod" - case "AudioAccessory5,1": - return "HomePod mini" - case "i386", "x86_64": - return "Simulator" - default: - return identifier - } + switch identifier { + case "iPod5,1": + return "iPod touch (5th generation)" + case "iPod7,1": + return "iPod touch (6th generation)" + case "iPod9,1": + return "iPod touch (7th generation)" + case "iPhone3,1", "iPhone3,2", "iPhone3,3": + return "iPhone 4" + case "iPhone4,1": + return "iPhone 4s" + case "iPhone5,1", "iPhone5,2": + return "iPhone 5" + case "iPhone5,3", "iPhone5,4": + return "iPhone 5c" + case "iPhone6,1", "iPhone6,2": + return "iPhone 5s" + case "iPhone7,2": + return "iPhone 6" + case "iPhone7,1": + return "iPhone 6 Plus" + case "iPhone8,1": + return "iPhone 6s" + case "iPhone8,2": + return "iPhone 6s Plus" + case "iPhone8,4": + return "iPhone SE" + case "iPhone9,1", "iPhone9,3": + return "iPhone 7" + case "iPhone9,2", "iPhone9,4": + return "iPhone 7 Plus" + case "iPhone10,1", "iPhone10,4": + return "iPhone 8" + case "iPhone10,2", "iPhone10,5": + return "iPhone 8 Plus" + case "iPhone10,3", "iPhone10,6": + return "iPhone X" + case "iPhone11,2": + return "iPhone XS" + case "iPhone11,4", "iPhone11,6": + return "iPhone XS Max" + case "iPhone11,8": + return "iPhone XR" + case "iPhone12,1": + return "iPhone 11" + case "iPhone12,3": + return "iPhone 11 Pro" + case "iPhone12,5": + return "iPhone 11 Pro Max" + case "iPhone12,8": + return "iPhone SE (2nd generation)" + case "iPhone13,1": + return "iPhone 12 mini" + case "iPhone13,2": + return "iPhone 12" + case "iPhone13,3": + return "iPhone 12 Pro" + case "iPhone13,4": + return "iPhone 12 Pro Max" + case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4": + return "iPad 2" + case "iPad3,1", "iPad3,2", "iPad3,3": + return "iPad (3rd generation)" + case "iPad3,4", "iPad3,5", "iPad3,6": + return "iPad (4th generation)" + case "iPad6,11", "iPad6,12": + return "iPad (5th generation)" + case "iPad7,5", "iPad7,6": + return "iPad (6th generation)" + case "iPad7,11", "iPad7,12": + return "iPad (7th generation)" + case "iPad11,6", "iPad11,7": + return "iPad (8th generation)" + case "iPad4,1", "iPad4,2", "iPad4,3": + return "iPad Air" + case "iPad5,3", "iPad5,4": + return "iPad Air 2" + case "iPad11,3", "iPad11,4": + return "iPad Air (3rd generation)" + case "iPad13,1", "iPad13,2": + return "iPad Air (4th generation)" + case "iPad2,5", "iPad2,6", "iPad2,7": + return "iPad mini" + case "iPad4,4", "iPad4,5", "iPad4,6": + return "iPad mini 2" + case "iPad4,7", "iPad4,8", "iPad4,9": + return "iPad mini 3" + case "iPad5,1", "iPad5,2": + return "iPad mini 4" + case "iPad11,1", "iPad11,2": + return "iPad mini (5th generation)" + case "iPad6,3", "iPad6,4": + return "iPad Pro (9.7-inch)" + case "iPad7,3", "iPad7,4": + return "iPad Pro (10.5-inch)" + case "iPad8,1", "iPad8,2", "iPad8,3", "iPad8,4": + return "iPad Pro (11-inch) (1st generation)" + case "iPad8,9", "iPad8,10": + return "iPad Pro (11-inch) (2nd generation)" + case "iPad6,7", "iPad6,8": + return "iPad Pro (12.9-inch) (1st generation)" + case "iPad7,1", "iPad7,2": + return "iPad Pro (12.9-inch) (2nd generation)" + case "iPad8,5", "iPad8,6", "iPad8,7", "iPad8,8": + return "iPad Pro (12.9-inch) (3rd generation)" + case "iPad8,11", "iPad8,12": + return "iPad Pro (12.9-inch) (4th generation)" + case "AppleTV5,3": + return "Apple TV" + case "AppleTV6,2": + return "Apple TV 4K" + case "AudioAccessory1,1": + return "HomePod" + case "AudioAccessory5,1": + return "HomePod mini" + case "i386", "x86_64": + return "Simulator" + default: + return identifier + } } func GetIOSDeviceType(identifier string) string { - if strings.Contains(identifier, "iPhone") { - return "mobile" //"phone" - } - if strings.Contains(identifier, "iPad") { - return "tablet" - } - return "other" + if strings.Contains(identifier, "iPhone") { + return "mobile" //"phone" + } + if strings.Contains(identifier, "iPad") { + return "tablet" + } + return "other" } diff --git a/backend/services/http/uuid.go b/backend/services/http/uuid.go index 13f57bff0..87704d740 100644 --- a/backend/services/http/uuid.go +++ b/backend/services/http/uuid.go @@ -12,4 +12,4 @@ func getUUID(u *string) string { } } return uuid.New().String() -} \ No newline at end of file +} diff --git a/backend/services/integrations/clientManager/manager.go b/backend/services/integrations/clientManager/manager.go index 39cd8dd90..a671a6266 100644 --- a/backend/services/integrations/clientManager/manager.go +++ b/backend/services/integrations/clientManager/manager.go @@ -7,38 +7,36 @@ import ( "openreplay/backend/services/integrations/integration" ) - type manager struct { - clientMap integration.ClientMap - Events chan *integration.SessionErrorEvent - Errors chan error - RequestDataUpdates chan postgres.Integration // not pointer because it could change in other thread + clientMap integration.ClientMap + Events chan *integration.SessionErrorEvent + Errors chan error + RequestDataUpdates chan postgres.Integration // not pointer because it could change in other thread } - func NewManager() *manager { - return &manager { - clientMap: make(integration.ClientMap), + return &manager{ + clientMap: make(integration.ClientMap), RequestDataUpdates: make(chan postgres.Integration, 100), - Events: make(chan *integration.SessionErrorEvent, 100), - Errors: make(chan error, 100), + Events: make(chan *integration.SessionErrorEvent, 100), + Errors: make(chan error, 100), } } -func (m* manager) Update(i *postgres.Integration) error { +func (m *manager) Update(i *postgres.Integration) error { key := strconv.Itoa(int(i.ProjectID)) + i.Provider if i.Options == nil { delete(m.clientMap, key) return nil } - c, exists := m.clientMap[ key ] + c, exists := m.clientMap[key] if !exists { c, err := integration.NewClient(i, m.RequestDataUpdates, m.Events, m.Errors) if err != nil { return err } - m.clientMap[ key ] = c + m.clientMap[key] = c return nil } return c.Update(i) diff --git a/backend/services/integrations/integration/cloudwatch.go b/backend/services/integrations/integration/cloudwatch.go index fa2210138..9974f485b 100644 --- a/backend/services/integrations/integration/cloudwatch.go +++ b/backend/services/integrations/integration/cloudwatch.go @@ -2,43 +2,40 @@ package integration import ( "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/cloudwatchlogs" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/cloudwatchlogs" - "strings" - "regexp" "openreplay/backend/pkg/messages" + "regexp" + "strings" ) - var reIsException = regexp.MustCompile(`(?i)exception|error`) type cloudwatch struct { - AwsAccessKeyId string // `json:"aws_access_key_id"` - AwsSecretAccessKey string // `json:"aws_secret_access_key"` - LogGroupName string // `json:"log_group_name"` - Region string // `json:"region"` + AwsAccessKeyId string // `json:"aws_access_key_id"` + AwsSecretAccessKey string // `json:"aws_secret_access_key"` + LogGroupName string // `json:"log_group_name"` + Region string // `json:"region"` } - func (cw *cloudwatch) Request(c *client) error { - startTs := int64(c.getLastMessageTimestamp() + 1) // From next millisecond + startTs := int64(c.getLastMessageTimestamp() + 1) // From next millisecond //endTs := utils.CurrentTimestamp() sess, err := session.NewSession(aws.NewConfig(). - WithRegion(cw.Region). - WithCredentials( - credentials.NewStaticCredentials(cw.AwsAccessKeyId, cw.AwsSecretAccessKey, ""), - ), + WithRegion(cw.Region). + WithCredentials( + credentials.NewStaticCredentials(cw.AwsAccessKeyId, cw.AwsSecretAccessKey, ""), + ), ) if err != nil { return err } svc := cloudwatchlogs.New(sess) - filterOptions := new(cloudwatchlogs.FilterLogEventsInput). - SetStartTime(startTs). // Inclusively both startTime and endTime + SetStartTime(startTs). // Inclusively both startTime and endTime // SetEndTime(endTs). // Default nil? // SetLimit(10000). // Default 10000 SetLogGroupName(cw.LogGroupName). @@ -56,7 +53,7 @@ func (cw *cloudwatch) Request(c *client) error { } if !reIsException.MatchString(*e.Message) { // too weak condition ? continue - } + } token, err := GetToken(*e.Message) if err != nil { c.errChan <- err @@ -72,18 +69,18 @@ func (cw *cloudwatch) Request(c *client) error { //SessionID: sessionID, Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "cloudwatch", - Timestamp: timestamp, // e.IngestionTime ?? - Name: name, - Payload: strings.ReplaceAll(e.String(), "\n", ""), + Source: "cloudwatch", + Timestamp: timestamp, // e.IngestionTime ?? + Name: name, + Payload: strings.ReplaceAll(e.String(), "\n", ""), }, } } if output.NextToken == nil { - break; + break } filterOptions.NextToken = output.NextToken } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/elasticsearch.go b/backend/services/integrations/integration/elasticsearch.go index dd6f5d5f9..6b8181073 100644 --- a/backend/services/integrations/integration/elasticsearch.go +++ b/backend/services/integrations/integration/elasticsearch.go @@ -53,14 +53,14 @@ func (es *elasticsearch) Request(c *client) error { "query": map[string]interface{}{ "bool": map[string]interface{}{ "filter": []map[string]interface{}{ - map[string]interface{}{ + { "match": map[string]interface{}{ "message": map[string]interface{}{ "query": "openReplaySessionToken=", // asayer_session_id= }, }, }, - map[string]interface{}{ + { "range": map[string]interface{}{ "utc_time": map[string]interface{}{ "gte": strconv.FormatUint(gteTs, 10), @@ -68,7 +68,7 @@ func (es *elasticsearch) Request(c *client) error { }, }, }, - map[string]interface{}{ + { "term": map[string]interface{}{ "tags": "error", }, diff --git a/backend/services/integrations/integration/rollbar.go b/backend/services/integrations/integration/rollbar.go index 369ee31f9..53a5c6d5b 100644 --- a/backend/services/integrations/integration/rollbar.go +++ b/backend/services/integrations/integration/rollbar.go @@ -1,15 +1,15 @@ package integration import ( - "net/http" "encoding/json" + "errors" "fmt" - "time" - "strings" - "strconv" "io" - "io/ioutil" - "errors" + "io/ioutil" + "net/http" + "strconv" + "strings" + "time" "openreplay/backend/pkg/messages" ) @@ -17,42 +17,42 @@ import ( // Old name: asayerSessionId // QUERY: what can be modified? -const RB_QUERY = - "SELECT item.id, item.title,body.message.openReplaySessionToken,item.level,"+ - " item.counter,item.environment,body.crash_report.raw,body.message.body,timestamp"+ - " FROM item_occurrence"+ - " WHERE body.message.openReplaySessionToken != null"+ - " AND timestamp>= %v"+ - " AND item.level>30"+ - " ORDER BY timestamp"+ +const RB_QUERY = "SELECT item.id, item.title,body.message.openReplaySessionToken,item.level," + + " item.counter,item.environment,body.crash_report.raw,body.message.body,timestamp" + + " FROM item_occurrence" + + " WHERE body.message.openReplaySessionToken != null" + + " AND timestamp>= %v" + + " AND item.level>30" + + " ORDER BY timestamp" + " LIMIT 1000" + // ASC by default // \n\t symbols can spoil the request body, so it wouldn't work (OR probably it happend because of job hashing) /* - - `read` Access Token required - - timstamp in seconds + - `read` Access Token required + - timstamp in seconds */ type rollbar struct { - AccessToken string // `json:"access_token"` + AccessToken string // `json:"access_token"` } type rollbarJobResponce struct { - Err int + Err int Message string - Result struct { + Result struct { Id int } } type rollbarJobStatusResponce struct { - Err int + Err int Result struct { Status string Result struct { - Rows [][] json.Number - Columns[] string + Rows [][]json.Number + Columns []string } } } @@ -65,7 +65,7 @@ type rollbarEvent map[string]string */ func (rb *rollbar) Request(c *client) error { fromTs := c.getLastMessageTimestamp() + 1000 // From next second - c.setLastMessageTimestamp(fromTs) // anti-job-hashing + c.setLastMessageTimestamp(fromTs) // anti-job-hashing fromTsSec := fromTs / 1e3 query := fmt.Sprintf(RB_QUERY, fromTsSec) jsonBody := fmt.Sprintf(`{ @@ -111,7 +111,7 @@ func (rb *rollbar) Request(c *client) error { tick := time.Tick(5 * time.Second) for { - <- tick + <-tick resp, err = http.DefaultClient.Do(req) if err != nil { return err // continue + timeout/maxAttempts @@ -131,14 +131,14 @@ func (rb *rollbar) Request(c *client) error { e := make(rollbarEvent) for i, col := range jobStatus.Result.Result.Columns { //if len(row) <= i { error } - e[ col ] = row[ i ].String() // here I make them all string. That's not good + e[col] = row[i].String() // here I make them all string. That's not good } // sessionID, err := strconv.ParseUint(e[ "body.message.asayerSessionId" ], 10, 64) // if err != nil { // c.errChan <- err // continue // } - if e[ "body.message.openReplaySessionToken" ] == "" { + if e["body.message.openReplaySessionToken"] == "" { c.errChan <- errors.New("Token is empty!") continue } @@ -147,7 +147,7 @@ func (rb *rollbar) Request(c *client) error { c.errChan <- err continue } - timestampSec, err := strconv.ParseUint(e[ "timestamp" ], 10, 64) + timestampSec, err := strconv.ParseUint(e["timestamp"], 10, 64) if err != nil { c.errChan <- err continue @@ -155,22 +155,22 @@ func (rb *rollbar) Request(c *client) error { timestamp := timestampSec * 1000 c.setLastMessageTimestamp(timestamp) c.evChan <- &SessionErrorEvent{ - Token: e[ "body.message.openReplaySessionToken" ], + Token: e["body.message.openReplaySessionToken"], RawErrorEvent: &messages.RawErrorEvent{ - Source: "rollbar", + Source: "rollbar", Timestamp: timestamp, - Name: e[ "item.title" ], - Payload: string(payload), + Name: e["item.title"], + Payload: string(payload), }, } } break } - if jobStatus.Result.Status != "new" && + if jobStatus.Result.Status != "new" && jobStatus.Result.Status != "running" { // error break } } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/utils.go b/backend/services/integrations/integration/utils.go index 396a177bd..36a473c02 100644 --- a/backend/services/integrations/integration/utils.go +++ b/backend/services/integrations/integration/utils.go @@ -1,34 +1,37 @@ package integration import ( + "fmt" "regexp" "strconv" "strings" - "fmt" ) var reSessionID = regexp.MustCompile(`(?i)asayer_session_id=([0-9]+)`) -func GetAsayerSessionId(s string) (uint64, error) { + +func GetAsayerSessionId(s string) (uint64, error) { matches := reSessionID.FindStringSubmatch(s) if len(matches) < 2 { return 0, fmt.Errorf("'asayer_session_id' not found in '%v' ", s) } - return strconv.ParseUint(matches[ 1 ], 10, 64) + return strconv.ParseUint(matches[1], 10, 64) } func GetLinkFromAngularBrackets(s string) string { beg := strings.Index(s, "<") + 1 end := strings.Index(s, ">") - if end < 0 { return "" } + if end < 0 { + return "" + } return strings.TrimSpace(s[beg:end]) } - var reToken = regexp.MustCompile(`(?i)openReplaySessionToken=([0-9a-zA-Z\.]+)`) -func GetToken(s string) (string, error) { + +func GetToken(s string) (string, error) { matches := reToken.FindStringSubmatch(s) if len(matches) < 2 { return "", fmt.Errorf("'openReplaySessionToken' not found in '%v' ", s) } - return matches[ 1 ], nil -} \ No newline at end of file + return matches[1], nil +} diff --git a/backend/services/storage/gzip.go b/backend/services/storage/gzip.go index d574ec4ae..f3e96394a 100644 --- a/backend/services/storage/gzip.go +++ b/backend/services/storage/gzip.go @@ -1,19 +1,18 @@ package main import ( - "io" gzip "github.com/klauspost/pgzip" + "io" ) - func gzipFile(file io.ReadSeeker) io.Reader { reader, writer := io.Pipe() - go func() { - gw, _ := gzip.NewWriterLevel(writer, gzip.BestSpeed) - io.Copy(gw, file) + go func() { + gw, _ := gzip.NewWriterLevel(writer, gzip.BestSpeed) + io.Copy(gw, file) - gw.Close() - writer.Close() - }() - return reader -} \ No newline at end of file + gw.Close() + writer.Close() + }() + return reader +} From fbb039f0c7936664493cb2c9e73603c0f58ed0b0 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Apr 2022 17:02:13 +0200 Subject: [PATCH 010/221] fix(backend):pprof launch addr: use port only --- backend/pkg/pprof/pprof.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/pkg/pprof/pprof.go b/backend/pkg/pprof/pprof.go index a05080178..8ea1c1b5f 100644 --- a/backend/pkg/pprof/pprof.go +++ b/backend/pkg/pprof/pprof.go @@ -8,6 +8,6 @@ import ( func StartProfilingServer() { go func() { - log.Println(http.ListenAndServe("localhost:6060", nil)) + log.Println(http.ListenAndServe(":6060", nil)) }() } From d69934167651dbac4ba27661b4ddf4a7dbb6f2c0 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Apr 2022 17:02:53 +0200 Subject: [PATCH 011/221] fix(backend): Dockerfile.bundle fix --- backend/Dockerfile.bundle | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/backend/Dockerfile.bundle b/backend/Dockerfile.bundle index efbcb2684..79ef57db5 100644 --- a/backend/Dockerfile.bundle +++ b/backend/Dockerfile.bundle @@ -1,4 +1,4 @@ -FROM golang:1.13-alpine3.10 AS prepare +FROM golang:1.18-alpine3.15 AS prepare RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash @@ -13,7 +13,7 @@ FROM prepare AS build COPY pkg pkg COPY services services -RUN for name in alerts assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/services/$name; done +RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/services/$name; done FROM alpine @@ -26,8 +26,9 @@ ENV TZ=UTC \ MAXMINDDB_FILE=/root/geoip.mmdb \ UAPARSER_FILE=/root/regexes.yaml \ HTTP_PORT=80 \ - BEACON_SIZE_LIMIT=1000000 \ + BEACON_SIZE_LIMIT=7000000 \ KAFKA_USE_SSL=true \ + KAFKA_MAX_POLL_INTERVAL_MS=400000 \ REDIS_STREAMS_MAX_LEN=3000 \ TOPIC_RAW_WEB=raw \ TOPIC_RAW_IOS=raw-ios \ @@ -42,10 +43,10 @@ ENV TZ=UTC \ AWS_REGION_WEB=eu-central-1 \ AWS_REGION_IOS=eu-west-1 \ AWS_REGION_ASSETS=eu-central-1 \ - CACHE_ASSETS=false \ + CACHE_ASSETS=true \ ASSETS_SIZE_LIMIT=6291456 \ - FS_CLEAN_HRS=12 - + FS_CLEAN_HRS=12 \ + LOG_QUEUE_STATS_INTERVAL_SEC=60 RUN mkdir $FS_DIR #VOLUME [ $FS_DIR ] # Uncomment in case of using Bind mount. From 9856e36f44b0e8c4a0bed33d90fd1a8a495059e2 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Thu, 28 Apr 2022 17:55:56 +0200 Subject: [PATCH 012/221] fix(backend): fixed possible panic in the defer --- backend/services/storage/main.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/backend/services/storage/main.go b/backend/services/storage/main.go index 9579fbe4f..21054a750 100644 --- a/backend/services/storage/main.go +++ b/backend/services/storage/main.go @@ -16,6 +16,8 @@ import ( "openreplay/backend/pkg/storage" ) +const RetryTimeout = 2 * time.Minute + func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) @@ -28,17 +30,18 @@ func main() { if retryCount <= 0 { return } + file, err := os.Open(FS_DIR + "/" + key) - defer file.Close() if err != nil { log.Printf("File error: %v; Will retry %v more time(s)\n", err, retryCount) - time.AfterFunc(2*time.Minute, func() { + time.AfterFunc(RetryTimeout, func() { uploadKey(key, retryCount-1) }) - } else { - if err := storage.Upload(gzipFile(file), key, "application/octet-stream", true); err != nil { - log.Fatalf("Storage upload error: %v\n", err) - } + } + defer file.Close() + + if err := storage.Upload(gzipFile(file), key, "application/octet-stream", true); err != nil { + log.Fatalf("Storage upload error: %v\n", err) } } From 0bbf8012f144a123579f244ed289fec5b1ceb627 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Thu, 28 Apr 2022 18:02:56 +0200 Subject: [PATCH 013/221] fix(backend): added missed return in error case --- backend/services/storage/main.go | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/services/storage/main.go b/backend/services/storage/main.go index 21054a750..3e391426a 100644 --- a/backend/services/storage/main.go +++ b/backend/services/storage/main.go @@ -37,6 +37,7 @@ func main() { time.AfterFunc(RetryTimeout, func() { uploadKey(key, retryCount-1) }) + return } defer file.Close() From 1e5deed0d503a56754cf88d17aeaba906f33ea90 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Apr 2022 19:14:23 +0200 Subject: [PATCH 014/221] feat(backend/storage):split files into 2 --- backend/services/storage/main.go | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/backend/services/storage/main.go b/backend/services/storage/main.go index 3e391426a..47691ad36 100644 --- a/backend/services/storage/main.go +++ b/backend/services/storage/main.go @@ -6,6 +6,10 @@ import ( "strconv" "time" + "bytes" + "io" + "ioutill" + "os/signal" "syscall" @@ -18,6 +22,8 @@ import ( const RetryTimeout = 2 * time.Minute +const SESSION_FILE_SPLIT_SIZE = 200000 // ~200 kB + func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) @@ -41,8 +47,27 @@ func main() { } defer file.Close() - if err := storage.Upload(gzipFile(file), key, "application/octet-stream", true); err != nil { - log.Fatalf("Storage upload error: %v\n", err) + fileR2 := new(bytes.Buffer) + fileR1 := io.TeeReader(file, fileR2) + + startBytes := make([]byte, SESSION_FILE_SPLIT_SIZE) + nRead, err := fileR1.Read(startBytes) + if err != nil { + log.Printf("File read error: %f", err) + return + } + startReader = bytes.NewBuffer(startBytes) + if err := storage.Upload(gzipFile(startReader), key+"-s", "application/octet-stream", true); err != nil { + log.Fatalf("Storage: start upload failed. %v\n", err) + } + if nRead == SESSION_FILE_SPLIT_SIZE { + if err := storage.Upload(gzipFile(fileR1), key+"-e", "application/octet-stream", true); err != nil { + log.Fatalf("Storage: end upload failed. %v\n", err) + } + } + + if err := storage.Upload(gzipFile(fileR2), key, "application/octet-stream", true); err != nil { + log.Fatalf("Storage: upload failed. %v\n", err) } } From 6412c2a862c7939098d344bb122097353b3c539f Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Apr 2022 19:21:45 +0200 Subject: [PATCH 015/221] fix(backend/storage): codefix --- backend/services/storage/gzip.go | 2 +- backend/services/storage/main.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/services/storage/gzip.go b/backend/services/storage/gzip.go index f3e96394a..0e662efaa 100644 --- a/backend/services/storage/gzip.go +++ b/backend/services/storage/gzip.go @@ -5,7 +5,7 @@ import ( "io" ) -func gzipFile(file io.ReadSeeker) io.Reader { +func gzipFile(file io.Reader) io.Reader { reader, writer := io.Pipe() go func() { gw, _ := gzip.NewWriterLevel(writer, gzip.BestSpeed) diff --git a/backend/services/storage/main.go b/backend/services/storage/main.go index 47691ad36..9afc7d819 100644 --- a/backend/services/storage/main.go +++ b/backend/services/storage/main.go @@ -8,7 +8,7 @@ import ( "bytes" "io" - "ioutill" + //"io/ioutil" "os/signal" "syscall" @@ -56,7 +56,7 @@ func main() { log.Printf("File read error: %f", err) return } - startReader = bytes.NewBuffer(startBytes) + startReader := bytes.NewBuffer(startBytes) if err := storage.Upload(gzipFile(startReader), key+"-s", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %v\n", err) } From 31f9e49673c159c0b0a5b0fca178a3c8881bd59d Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Fri, 29 Apr 2022 11:58:07 +0200 Subject: [PATCH 016/221] chore(vagrant): Adding development readme Signed-off-by: rjshrjndrn --- api/development.md | 43 ++++++++++++++++++++++++ backend/development.md | 21 ++++++++++++ frontend/development.md | 23 +++++++++++++ scripts/vagrant/README.md | 69 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 156 insertions(+) create mode 100644 api/development.md create mode 100644 backend/development.md create mode 100644 frontend/development.md create mode 100644 scripts/vagrant/README.md diff --git a/api/development.md b/api/development.md new file mode 100644 index 000000000..116518604 --- /dev/null +++ b/api/development.md @@ -0,0 +1,43 @@ +### Prerequisites + +- [Vagrant](../scripts/vagrant/README.md) +- Python 3.9 +- Pipenv + +### Development environment + +```bash +**FOSS:** +cd openreplay/api +# Make your own copy of .env file and edit it as you want +cp .env.dev .env + +# Create a .venv folder to contain all you dependencies +mkdir .venv + +# Installing dependencies (pipenv will detect the .venv folder and use it as a target) +pipenv install -r requirements.txt [--skip-lock] + +# Create a .venv folder to contain all you dependencies +mkdir .venv + +# Installing dependencies (pipenv will detect the .venv folder and use it as a target) +pipenv install -r requirements.txt [--skip-lock] + +# These commands must bu used everytime you make changes to FOSS. +# To clean the unused files before getting new ones +bash clean.sh +# To copy commun files from FOSS +bash prepare-dev.sh +``` + +### Building and deploying locally + +```bash +cd openreplay-contributions +vagrant ssh +cd openreplay-dev/openreplay/scripts/helmcharts +# For complete list of options +# bash local_deploy.sh help +bash local_deploy.sh api +``` diff --git a/backend/development.md b/backend/development.md new file mode 100644 index 000000000..0e7295c8b --- /dev/null +++ b/backend/development.md @@ -0,0 +1,21 @@ +### Prerequisites + +- [Vagrant](../scripts/vagrant/README.md) + +### Development environment + +```bash +docker build -f Dockerfile.bundle . + +``` + +### Building and deploying locally + +```bash +cd openreplay-contributions +vagrant ssh +cd openreplay-dev/openreplay/scripts/helmcharts +# For complete list of options +# bash local_deploy.sh help +bash local_deploy.sh +``` diff --git a/frontend/development.md b/frontend/development.md new file mode 100644 index 000000000..904c85f46 --- /dev/null +++ b/frontend/development.md @@ -0,0 +1,23 @@ +### Prerequisites + +- [Vagrant](../scripts/vagrant/README.md) +- Node Version 17 +- npm + +### Development environment + +```bash +cd openreplay/frontend +# Change endpoints to local openreplay installation +sed -i 's#PRODUCTION: true#PRODUCTION: false#g' env.js +sed -i "s#API_EDP: .*#API_EDP: 'http://openreplay.local/api',#g" env.js +sed -i "s#ASSETS_HOST: .*#ASSETS_HOST: 'http://openreplay.local/assets',#g" env.js + +# Installing dependencies +npm install + +# Generating assets +npm run gen:css-types +npm run gen:icons +npm run gen:colors +``` diff --git a/scripts/vagrant/README.md b/scripts/vagrant/README.md new file mode 100644 index 000000000..ffe132c73 --- /dev/null +++ b/scripts/vagrant/README.md @@ -0,0 +1,69 @@ + + +### Installation + +- Vagrant: [https://www.vagrantup.com/downloads](https://www.vagrantup.com/downloads) +- VirtualBox: [https://www.virtualbox.org/wiki/Downloads](https://www.virtualbox.org/wiki/Downloads) + +### Configuration + +```bash +mkdir openreplay-contributions +cd openreplay-contributions +git clone https://github.com/openreplay/openreplay -b dev +cp -rf openreplay/scripts/vagrant/ . +vagrant up +``` + +### To access OpenReplay instance + +```bash +Add ip address from about output to your local resolver + +## Mac/Linux + +Copy paste the command from the vagrant output + +## Windows + +Use the following instructions if you’re running Windows 10 or Windows 8: + Press the Windows key. + Type Notepad in the search field. + In the search results, right-click Notepad and select Run as administrator. + From Notepad, open the following file: + c:\Windows\System32\Drivers\etc\hosts + add the below line in the hosts file + openreplay.local + Select File > Save to save your changes. + +**Open browser** +http://openreplay.local +``` + +### To start developing + +- [Frontend](../../frontend/development.md) +- [API](../../api/development.md) +- [Backend](../../backend/development.md) + +### Notes + +It’ll be a good practice to take a snapshot once the initial setup is complete, so that if something is not working as expected, you can always fall back to a stable known version. +```bash +cd openreplay-dev +vagrant snapshot save +# For example +vagrant snapshot save openreplay-160-base +``` + +```bash +# To restore the snapshot +cd openreplay-dev +vagrant snapshot restore openreplay-160-base +``` + + From 63e897594f5205242905fce7e11844392a5d8221 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 13:19:11 +0200 Subject: [PATCH 017/221] feat(db): EE fixed widget-size for upgrade --- ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql index d7eeff911..bb0d7b7c0 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql @@ -305,7 +305,7 @@ VALUES ('Captured sessions', 'overview', '{ "position": 0 }', true, true, true, 'missing_resources', 'predefined', 'table'), ('Slowest Resources', 'resources', '{ - "col": 2, + "col": 4, "row": 2, "position": 0 }', true, true, true, 'slowest_resources', 'predefined', 'table'), From fff8f75fd0831dc47d6df4769c1a7b515a49efb8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 14:06:00 +0200 Subject: [PATCH 018/221] feat(api): changed Dockerfile --- api/Dockerfile | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index f3b5e85f5..000576611 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,20 +1,7 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env ENV APP_NAME chalice -# Installing Nodejs -RUN apt update && apt install -y curl && \ - curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ - apt install -y nodejs && \ - apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* -RUN cd sourcemap-reader && \ - npm install - # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -22,5 +9,23 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* \ + +COPY requirements.txt requirements.txt +RUN pip install -r requirements.txt +WORKDIR /work_tmp +COPY sourcemap-reader/*.json . +RUN npm install + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/. + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh From 84a43bcd8b4d9c22b9144c10843b9415c4796245 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 14:16:36 +0200 Subject: [PATCH 019/221] feat(api): fixed description default value --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 105ead87e..091eae0c3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,7 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) - description: str = Field(default=None) + description: str = Field(default='') is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) From 423f416015a4854d4c6c54acef5fd9322216c7d0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 16:08:38 +0200 Subject: [PATCH 020/221] feat(api): fixed description optional value --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 091eae0c3..1d92f5fce 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,7 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) - description: str = Field(default='') + description: Optional[str] = Field(default='') is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) From 144e58adefe27372c4df4c3538cd0b44c2cc91ce Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:00:40 +0200 Subject: [PATCH 021/221] feat(api): updated dependencies --- api/requirements.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index 198b535dd..d615851d1 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,15 +1,15 @@ -requests==2.26.0 -urllib3==1.26.6 -boto3==1.16.1 -pyjwt==1.7.1 -psycopg2-binary==2.8.6 +requests==2.27.1 +urllib3==1.26.9 +boto3==1.22.6 +pyjwt==2.3.0 +psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -fastapi==0.75.0 -uvicorn[standard]==0.17.5 +fastapi==0.75.2 +uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 -apscheduler==3.8.1 \ No newline at end of file +apscheduler==3.9.1 \ No newline at end of file From f90a25c75a3505821e86b3c0a09b097ca4ddf76f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:10:48 +0200 Subject: [PATCH 022/221] feat(api): EE updated dependencies --- ee/api/requirements.txt | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 5909d31c1..f14d6022d 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,16 +1,16 @@ -requests==2.26.0 -urllib3==1.26.6 -boto3==1.16.1 -pyjwt==1.7.1 -psycopg2-binary==2.8.6 +requests==2.27.1 +urllib3==1.26.9 +boto3==1.22.6 +pyjwt==2.3.0 +psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -clickhouse-driver==0.2.2 +clickhouse-driver==0.2.3 python3-saml==1.12.0 -fastapi==0.75.0 +fastapi==0.75.2 python-multipart==0.0.5 -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 -apscheduler==3.8.1 \ No newline at end of file +apscheduler==3.9.1 \ No newline at end of file From 36b466665c30ba0e51bd094c5e438afb2c9b5139 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:14:25 +0200 Subject: [PATCH 023/221] feat(api): changed replay file URL --- api/chalicelib/core/sessions_mobs.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 8f61d436b..ccbda20bb 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -5,14 +5,23 @@ from chalicelib.utils.s3 import client def get_web(sessionId): - return client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) - }, - ExpiresIn=100000 - ) + return [ + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + }, + ExpiresIn=100000 + ), + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + "e" + }, + ExpiresIn=100000 + )] def get_ios(sessionId): From 47be240dfb34d20458fdfc80856c28510205deb9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:32:17 +0200 Subject: [PATCH 024/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/Dockerfile b/api/Dockerfile index 000576611..20d9f649a 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -17,9 +17,9 @@ RUN apt update && apt install -y curl && \ apt remove --purge -y curl && \ rm -rf /var/lib/apt/lists/* \ +WORKDIR /work_tmp COPY requirements.txt requirements.txt RUN pip install -r requirements.txt -WORKDIR /work_tmp COPY sourcemap-reader/*.json . RUN npm install From 8d5cf84d9069269db2b7e4f9d2f261eba2d68b92 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:36:52 +0200 Subject: [PATCH 025/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/Dockerfile b/api/Dockerfile index 20d9f649a..ae2ded77c 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -20,7 +20,7 @@ RUN apt update && apt install -y curl && \ WORKDIR /work_tmp COPY requirements.txt requirements.txt RUN pip install -r requirements.txt -COPY sourcemap-reader/*.json . +COPY sourcemap-reader/*.json ./ RUN npm install WORKDIR /work From 42f3b6d0186efc2f39870d687a0b84f474aca41a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:50:09 +0200 Subject: [PATCH 026/221] feat(api): changed Dockerfile --- api/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index ae2ded77c..682286786 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -18,10 +18,10 @@ RUN apt update && apt install -y curl && \ rm -rf /var/lib/apt/lists/* \ WORKDIR /work_tmp -COPY requirements.txt requirements.txt -RUN pip install -r requirements.txt -COPY sourcemap-reader/*.json ./ -RUN npm install +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt +COPY sourcemap-reader/*.json /work_tmp/ +RUN cd /work_tmp && npm install WORKDIR /work COPY . . From 172508dcf3b50c7bd127e71cccd3cc6a24abbe99 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 18:21:47 +0200 Subject: [PATCH 027/221] feat(DB): changed sessions_metadata sort expression --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 72 ++++++++++++++++++- .../clickhouse/create/sessions_metadata.sql | 2 +- 2 files changed, 72 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 412f3ae2a..a8f90613d 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -1 +1,71 @@ -ALTER TABLE sessions DROP COLUMN pages_count; \ No newline at end of file +ALTER TABLE sessions + DROP COLUMN pages_count; + +CREATE TABLE default.sessions_metadata_temp +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime) + TTL datetime + INTERVAL 1 MONTH; + +INSERT INTO default.sessions_metadata_temp(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, + user_browser, user_browser_version, user_device, user_device_type, + user_country, + datetime, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, + metadata_4, + metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.sessions_metadata; + +DROP TABLE default.sessions_metadata; +RENAME TABLE default.sessions_metadata_temp TO default.sessions_metadata; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index ddf8aed01..f6b77930e 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -27,5 +27,5 @@ CREATE TABLE IF NOT EXISTS sessions_metadata metadata_10 Nullable(String) ) ENGINE = MergeTree PARTITION BY toDate(datetime) - ORDER BY (session_id) + ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file From acaef59590ff794962768385f896814e0053d0f2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 20:37:37 +0200 Subject: [PATCH 028/221] feat(DB): traces/trails index feat(api): get all traces/trails --- ee/api/chalicelib/core/traces.py | 26 ++++++++++++++++++- ee/api/routers/ee.py | 11 +++++++- ee/api/schemas_ee.py | 10 +++++++ .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 2 ++ .../db/init_dbs/postgresql/init_schema.sql | 1 + 5 files changed, 48 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index fd0ae6c2b..d77b0f580 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -9,7 +9,8 @@ from pydantic import BaseModel, Field from starlette.background import BackgroundTask import app as main_app -from chalicelib.utils import pg_client +import schemas_ee +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from schemas import CurrentContext @@ -151,6 +152,29 @@ async def process_traces_queue(): await write_traces_batch(traces) +def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + """SELECT COUNT(*) AS count, + COALESCE(JSONB_AGG(full_traces) + FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn + FROM traces + WHERE tenant_id=%(tenant_id)s + AND created_at>=%(startDate)s + AND created_at<=%(endDate)s + ORDER BY created_at) AS full_traces;""", + {"tenant_id": tenant_id, + "startDate": data.startDate, + "endDate": data.endDate, + "p_start": (data.page - 1) * data.limit, + "p_end": data.page * data.limit}) + ) + rows = cur.fetchall() + return helper.list_to_camel_case(rows) + + cron_jobs = [ {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60), "misfire_grace_time": 20} diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index 1a9589eaa..f63d0dd3a 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,6 +1,7 @@ -from chalicelib.core import roles +from chalicelib.core import roles, traces from chalicelib.core import unlock from chalicelib.utils import assist_helper +from chalicelib.utils.TimeUTC import TimeUTC unlock.check() @@ -58,3 +59,11 @@ def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_contex @app.get('/assist/credentials', tags=["assist"]) def get_assist_credentials(): return {"data": assist_helper.get_full_config()} + + +@app.post('/trails', tags=["traces", "trails"]) +def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': traces.get_all(tenant_id=context.tenant_id, data=data) + } diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 59a58f94b..06ae8f2ba 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -3,6 +3,7 @@ from typing import Optional, List from pydantic import BaseModel, Field import schemas +from chalicelib.utils.TimeUTC import TimeUTC class RolePayloadSchema(BaseModel): @@ -22,3 +23,12 @@ class CreateMemberSchema(schemas.CreateMemberSchema): class EditMemberSchema(schemas.EditMemberSchema): roleId: int = Field(...) + + +class TrailSearchPayloadSchema(schemas._PaginatedSchema): + startDate: int = Field(default=TimeUTC.now(-7)) + endDate: int = Field(default=TimeUTC.now(1)) + user_id: Optional[int] = Field(default=None) + + class Config: + alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index e94ccc4e1..00d871cac 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -9,4 +9,6 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 7d6bdece7..d78a99c27 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -785,6 +785,7 @@ $$ ); CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id); CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id); + CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); From 21d8d28a791d53641d3ddf9b0c12e57b98c8911f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 20:42:08 +0200 Subject: [PATCH 029/221] feat(api): return createdAt with the list of users --- api/chalicelib/core/users.py | 2 ++ ee/api/chalicelib/core/users.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index ceada34f8..0ef2f2088 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -377,6 +377,7 @@ def get_members(tenant_id): users.email, users.role, users.name, + users.created_at, basic_authentication.generated_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, @@ -393,6 +394,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index b70f6a269..d34e2f5f9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -414,6 +414,7 @@ def get_members(tenant_id): users.email, users.role, users.name, + users.created_at, basic_authentication.generated_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, @@ -435,6 +436,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: From ef0edebb3d2caf360c6d4264d74f5b357b0ab78c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 11:56:03 +0200 Subject: [PATCH 030/221] feat(DB): traces/trails index feat(api): get all possible traces/trails actions feat(api): search traces/trails by actions feat(api): search traces/trails by user --- ee/api/chalicelib/core/traces.py | 26 +++++++++++++++---- ee/api/routers/ee.py | 5 ++++ ee/api/schemas_ee.py | 1 + .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 1 + 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index d77b0f580..64c1c6df1 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -154,27 +154,43 @@ async def process_traces_queue(): def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): with pg_client.PostgresClient() as cur: + conditions = ["tenant_id=%(tenant_id)s", "created_at>=%(startDate)s", "created_at<=%(endDate)s"] + if data.user_id is not None: + conditions.append("user_id=%(user_id)s") + if data.action is not None: + conditions.append("action=%(action)s") cur.execute( cur.mogrify( - """SELECT COUNT(*) AS count, + f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(full_traces) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn FROM traces - WHERE tenant_id=%(tenant_id)s - AND created_at>=%(startDate)s - AND created_at<=%(endDate)s + WHERE {" AND ".join(conditions)} ORDER BY created_at) AS full_traces;""", {"tenant_id": tenant_id, "startDate": data.startDate, "endDate": data.endDate, "p_start": (data.page - 1) * data.limit, - "p_end": data.page * data.limit}) + "p_end": data.page * data.limit, + **data.dict()}) ) rows = cur.fetchall() return helper.list_to_camel_case(rows) +def get_available_actions(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify( + f"""SELECT DISTINCT action + FROM traces + WHERE tenant_id=%(tenant_id)s + ORDER BY 1""", + {"tenant_id": tenant_id})) + rows = cur.fetchall() + return [r["action"] for r in rows] + + cron_jobs = [ {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60), "misfire_grace_time": 20} diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index f63d0dd3a..9a79551b7 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -67,3 +67,8 @@ def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), return { 'data': traces.get_all(tenant_id=context.tenant_id, data=data) } + + +@app.post('/trails/actions', tags=["traces", "trails"]) +def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)): + return {'data': traces.get_available_actions(tenant_id=context.tenant_id)} diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 06ae8f2ba..50eb3d03f 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -29,6 +29,7 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) + action: Optional[str] = Field(default=None) class Config: alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 00d871cac..b28f28b62 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -11,4 +11,5 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); +CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index d78a99c27..a59e25e54 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -786,6 +786,7 @@ $$ CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id); CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id); CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); + CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); From ac9c10393f0f0c40270285bb7b3c206a95eb5f2d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:07:03 +0200 Subject: [PATCH 031/221] feat(api): fixed return createdAt with the list of users --- api/chalicelib/core/users.py | 2 +- ee/api/chalicelib/core/users.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 0ef2f2088..40cc0f7db 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -394,7 +394,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: - r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index d34e2f5f9..cf2a808e7 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -436,7 +436,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: - r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: From 50b476316a3c87623205fbbeb01ff3eec3b37287 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:11:38 +0200 Subject: [PATCH 032/221] feat(api): changed root path --- ee/api/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/app.py b/ee/api/app.py index 0041ec12e..505f1393c 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -16,7 +16,7 @@ from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.subs import dashboard, insights, metrics, v1_api_ee -app = FastAPI() +app = FastAPI(root_path="/api") @app.middleware('http') From 0c84c89b4f8afe8e0994d1761ebc0a9ae0854bc2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:16:07 +0200 Subject: [PATCH 033/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- ee/api/Dockerfile | 33 +++++++++++++++++++-------------- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index 682286786..cc8f36ece 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -15,7 +15,7 @@ RUN apt update && apt install -y curl && \ curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ apt install -y nodejs && \ apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* \ + rm -rf /var/lib/apt/lists/* WORKDIR /work_tmp COPY requirements.txt /work_tmp/requirements.txt diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index aee6aecb2..c99e576e4 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,21 +1,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env ENV APP_NAME chalice -# Installing Nodejs -RUN apt update && apt install -y curl && \ - curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ - apt install -y nodejs && \ - apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* && \ - cd sourcemap-reader && \ - npm install - +RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -23,5 +10,23 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* + +WORKDIR /work_tmp +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt +COPY sourcemap-reader/*.json /work_tmp/ +RUN cd /work_tmp && npm install + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/. + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh From ec445f88c7bd6216ed5330972a9e5ca16c40f31c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 15:09:50 +0200 Subject: [PATCH 034/221] feat(api): EE updated authorizer --- ee/api/chalicelib/core/authorizers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index 149d570ab..5adf3e61a 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -52,7 +52,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None): key=config("jwt_secret"), algorithm=config("jwt_algorithm") ) - return token.decode("utf-8") + return token def api_key_authorizer(token): From d8078c220dcdf0831ce0807f2b16940cb039596a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 17:27:43 +0200 Subject: [PATCH 035/221] feat(api): search user trails by username feat(db): index to search user trails by username --- ee/api/chalicelib/core/traces.py | 34 ++++++++++++------- ee/api/schemas_ee.py | 4 ++- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 1 + 4 files changed, 26 insertions(+), 14 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 64c1c6df1..5fbfafc0c 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -9,6 +9,7 @@ from pydantic import BaseModel, Field from starlette.background import BackgroundTask import app as main_app +import schemas import schemas_ee from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -154,29 +155,36 @@ async def process_traces_queue(): def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): with pg_client.PostgresClient() as cur: - conditions = ["tenant_id=%(tenant_id)s", "created_at>=%(startDate)s", "created_at<=%(endDate)s"] + conditions = ["traces.tenant_id=%(tenant_id)s", + "traces.created_at>=%(startDate)s", + "traces.created_at<=%(endDate)s"] + params = {"tenant_id": tenant_id, + "startDate": data.startDate, + "endDate": data.endDate, + "p_start": (data.page - 1) * data.limit, + "p_end": data.page * data.limit, + **data.dict()} if data.user_id is not None: conditions.append("user_id=%(user_id)s") if data.action is not None: conditions.append("action=%(action)s") + if data.query is not None and len(data.query) > 0: + conditions.append("users.name ILIKE %(query)s") + params["query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) cur.execute( cur.mogrify( f"""SELECT COUNT(*) AS count, - COALESCE(JSONB_AGG(full_traces) + COALESCE(JSONB_AGG(full_traces ORDER BY rn) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn - FROM traces + FROM (SELECT traces.*,users.email,users.name AS username, + ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn + FROM traces LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY created_at) AS full_traces;""", - {"tenant_id": tenant_id, - "startDate": data.startDate, - "endDate": data.endDate, - "p_start": (data.page - 1) * data.limit, - "p_end": data.page * data.limit, - **data.dict()}) + ORDER BY traces.created_at {data.order}) AS full_traces;""", params) ) - rows = cur.fetchall() - return helper.list_to_camel_case(rows) + rows = cur.fetchone() + return helper.dict_to_camel_case(rows) def get_available_actions(tenant_id): diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 50eb3d03f..9d1440b44 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -1,4 +1,4 @@ -from typing import Optional, List +from typing import Optional, List, Literal from pydantic import BaseModel, Field @@ -29,7 +29,9 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) + query: Optional[str] = Field(default=None) action: Optional[str] = Field(default=None) + order: Literal["asc", "desc"] = Field(default="desc") class Config: alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index b28f28b62..46fd953bf 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -12,4 +12,5 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); +CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a59e25e54..c6d05dd34 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -257,6 +257,7 @@ $$ internal_id text NULL DEFAULT NULL ); CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; + CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); CREATE TABLE IF NOT EXISTS basic_authentication From 516e5b04468474b549b7f18814a64d72db94645d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 17:43:55 +0200 Subject: [PATCH 036/221] feat(api): changed search user trails by username --- ee/api/chalicelib/core/traces.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 5fbfafc0c..35339a133 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -170,8 +170,9 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): conditions.append("action=%(action)s") if data.query is not None and len(data.query) > 0: conditions.append("users.name ILIKE %(query)s") + conditions.append("users.tenant_id = %(tenant_id)s") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator._contains) cur.execute( cur.mogrify( f"""SELECT COUNT(*) AS count, From 202bf73456a1a6efcd9e427fbf787e941ada958c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 18:30:59 +0200 Subject: [PATCH 037/221] feat(api): vault support --- ee/api/.gitignore | 1 - .../core/sessions_favorite_viewed.py | 74 +++++++++++++++++++ ee/api/chalicelib/utils/s3_extra.py | 30 ++++++++ 3 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 ee/api/chalicelib/core/sessions_favorite_viewed.py create mode 100644 ee/api/chalicelib/utils/s3_extra.py diff --git a/ee/api/.gitignore b/ee/api/.gitignore index c5a8d9ce4..fb839d5e6 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -207,7 +207,6 @@ Pipfile /chalicelib/core/mobile.py /chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py -/chalicelib/core/sessions_favorite_viewed.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py /chalicelib/core/significance.py diff --git a/ee/api/chalicelib/core/sessions_favorite_viewed.py b/ee/api/chalicelib/core/sessions_favorite_viewed.py new file mode 100644 index 000000000..bef7787d1 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_favorite_viewed.py @@ -0,0 +1,74 @@ +from chalicelib.core import sessions +from chalicelib.utils import pg_client, s3_extra +from decouple import config + + +def add_favorite_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + INSERT INTO public.user_favorite_sessions + (user_id, session_id) + VALUES + (%(userId)s,%(sessionId)s);""", + {"userId": user_id, "sessionId": session_id}) + ) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, + include_fav_viewed=True) + + +def remove_favorite_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + DELETE FROM public.user_favorite_sessions + WHERE + user_id = %(userId)s + AND session_id = %(sessionId)s;""", + {"userId": user_id, "sessionId": session_id}) + ) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, + include_fav_viewed=True) + + +def add_viewed_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + INSERT INTO public.user_viewed_sessions + (user_id, session_id) + VALUES + (%(userId)s,%(sessionId)s) + ON CONFLICT DO NOTHING;""", + {"userId": user_id, "sessionId": session_id}) + ) + + +def favorite_session(project_id, user_id, session_id): + if favorite_session_exists(user_id=user_id, session_id=session_id): + s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_D_VALUE', default='default')) + s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_D_VALUE', default='default')) + return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_L_VALUE', default='vault')) + s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_L_VALUE', default='vault')) + return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + + +def view_session(project_id, user_id, session_id): + return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id) + + +def favorite_session_exists(user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + """SELECT + session_id + FROM public.user_favorite_sessions + WHERE + user_id = %(userId)s + AND session_id = %(sessionId)s""", + {"userId": user_id, "sessionId": session_id}) + ) + r = cur.fetchone() + return r is not None diff --git a/ee/api/chalicelib/utils/s3_extra.py b/ee/api/chalicelib/utils/s3_extra.py new file mode 100644 index 000000000..bd74d8277 --- /dev/null +++ b/ee/api/chalicelib/utils/s3_extra.py @@ -0,0 +1,30 @@ +from chalicelib.utils.s3 import client +from decouple import config + +def tag_file( session_id, tag_key='retention', tag_value='vault'): + return client.put_object_tagging( + Bucket=config("sessions_bucket"), + Key=session_id, + # VersionId='string', + # ContentMD5='string', + # ChecksumAlgorithm='CRC32'|'CRC32C'|'SHA1'|'SHA256', + Tagging={ + 'TagSet': [ + { + 'Key': tag_key, + 'Value': tag_value + }, + ] + }, + # ExpectedBucketOwner='string', + # RequestPayer='requester' + ) + + # generate_presigned_url( + # 'put_object', + # Params={ + # 'Bucket': bucket, + # 'Key': key + # }, + # ExpiresIn=expires_in + # ) From 7625eb9f8c9be16ff17496f5871d687273de5462 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 18:36:46 +0200 Subject: [PATCH 038/221] feat(alerts): changed Dockerfile.alerts --- api/Dockerfile.alerts | 15 +++++++++------ ee/api/Dockerfile.alerts | 14 +++++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index 76e8c262a..c7e8c7a37 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -1,13 +1,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh -ENV pg_minconn 2 ENV APP_NAME alerts - +ENV pg_minconn 2 # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -15,5 +10,13 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh \ No newline at end of file diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index 6aec0f98b..2864848e9 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -2,12 +2,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh -ENV pg_minconn 2 ENV APP_NAME alerts +ENV pg_minconn 2 # Add Tini # Startup daemon @@ -16,5 +12,13 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh \ No newline at end of file From d3be02fd9d265687066832333fad1cf38005de53 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 9 May 2022 15:30:28 +0200 Subject: [PATCH 039/221] feat(api): user trail limit changed --- ee/api/schemas_ee.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 9d1440b44..794dfdd64 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -26,6 +26,7 @@ class EditMemberSchema(schemas.EditMemberSchema): class TrailSearchPayloadSchema(schemas._PaginatedSchema): + limit: int = Field(default=200, gt=0) startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) From efec096ffefa8d94426afc41a8842716c9dbd5b8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 10 May 2022 17:13:19 +0200 Subject: [PATCH 040/221] feat(api): fixed sourcemaps reader endpoint --- api/.env.default | 2 +- ee/api/.env.default | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/.env.default b/api/.env.default index 7dd248bec..30ff0b02d 100644 --- a/api/.env.default +++ b/api/.env.default @@ -44,6 +44,6 @@ sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/ +sourcemaps_reader=http://127.0.0.1:9000/sourcemaps stage=default-foss version_number=1.4.0 \ No newline at end of file diff --git a/ee/api/.env.default b/ee/api/.env.default index 094579f1b..8215908b2 100644 --- a/ee/api/.env.default +++ b/ee/api/.env.default @@ -53,6 +53,6 @@ sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/ +sourcemaps_reader=http://127.0.0.1:9000/sourcemaps stage=default-ee version_number=1.0.0 From ac4e32aba3646a0abdb10fea896d2e1b1a48194c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 12 May 2022 16:24:58 +0200 Subject: [PATCH 041/221] feat(DB): changed partition expression --- ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql | 2 +- .../helm/db/init_dbs/clickhouse/create/sessions_metadata.sql | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql index 7781d2328..b9322a403 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql @@ -16,6 +16,6 @@ CREATE TABLE IF NOT EXISTS clicks label String, hesitation_time Nullable(UInt32) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql index eed67c990..fb4b2c881 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql @@ -17,6 +17,6 @@ CREATE TABLE IF NOT EXISTS customs payload Nullable(String), level Enum8('info'=0, 'error'=1) DEFAULT 'info' ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql index 4560f6500..98052071a 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql @@ -18,6 +18,6 @@ CREATE TABLE IF NOT EXISTS errors message String, error_id String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql index 523d2d468..83b475d0f 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql @@ -15,6 +15,6 @@ CREATE TABLE IF NOT EXISTS inputs datetime DateTime, label String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql index 9770fb380..90a90a104 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql @@ -20,7 +20,7 @@ CREATE TABLE IF NOT EXISTS longtasks container_name String, container_src String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql index 71d9503cf..3902abd33 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql @@ -35,6 +35,6 @@ CREATE TABLE IF NOT EXISTS pages dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), load_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(load_event_end, load_event_start), minus(load_event_end, load_event_start), Null) ) ENGINE = MergeTree -PARTITION BY toDate(datetime) +PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql index fa64967f4..650895662 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql @@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS performance avg_used_js_heap_size UInt64, max_used_js_heap_size UInt64 ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql index cc2c7cd6d..bfd4f0ea1 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql @@ -27,6 +27,6 @@ CREATE TABLE IF NOT EXISTS resources method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), status Nullable(UInt16) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql index 712cbd6d4..59df20242 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql @@ -20,6 +20,6 @@ CREATE TABLE IF NOT EXISTS sessions utm_medium Nullable(String), utm_campaign Nullable(String) ) ENGINE = ReplacingMergeTree(duration) - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index f6b77930e..2884b4515 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS sessions_metadata metadata_9 Nullable(String), metadata_10 Nullable(String) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file From 839f4c092766c98e7f8c39e4f2b21141e0fda236 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 13 May 2022 15:49:17 +0200 Subject: [PATCH 042/221] feat(api): fixed CH client format --- ee/api/chalicelib/utils/ch_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index aa45699f7..a51230a19 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -26,7 +26,7 @@ class ClickHouseClient: return self.__client def format(self, query, params): - return self.__client.substitute_params(query, params) + return self.__client.substitute_params(query, params, self.__client.connection.context) def __exit__(self, *args): pass From c84d39d38ec6157d293fab2d6b0329f3e50469e6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 13 May 2022 19:15:31 +0200 Subject: [PATCH 043/221] feat(api): upgraded python base image feat(alerts): upgraded python base image --- api/Dockerfile | 2 +- api/Dockerfile.alerts | 2 +- api/Dockerfile.bundle | 2 +- ee/api/Dockerfile | 2 +- ee/api/Dockerfile.alerts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index cc8f36ece..4465b0432 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME chalice diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index c7e8c7a37..7d8dd8200 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME alerts diff --git a/api/Dockerfile.bundle b/api/Dockerfile.bundle index e5ccd23f6..2f58635f2 100644 --- a/api/Dockerfile.bundle +++ b/api/Dockerfile.bundle @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" WORKDIR /work COPY . . diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index c99e576e4..b5dffb40d 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME chalice diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index 2864848e9..ae8d308c8 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* From f054b130bfe71eb351f64d4f60edf218ee9a6419 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 16 May 2022 18:24:16 +0200 Subject: [PATCH 044/221] feat(DB): changed metrics category from Overview to Monitoring Essentials --- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 113 ++++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 40 +++---- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 112 +++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 40 +++---- 4 files changed, 265 insertions(+), 40 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 46fd953bf..d8624d06d 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -13,4 +13,117 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index c6d05dd34..95f247af1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1269,102 +1269,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'overview', '{ +VALUES ('Captured sessions', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'overview', '{ + ('Request Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'overview', '{ + ('Page Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'overview', '{ + ('Image Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'overview', '{ + ('DOM Content Load Start', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'overview', '{ + ('First Meaningful paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'overview', '{ + ('No. of Visited Pages', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'overview', '{ + ('Session Duration', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'overview', '{ + ('DOM Build Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'overview', '{ + ('Pages Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'overview', '{ + ('Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'overview', '{ + ('First Paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'overview', '{ + ('DOM Content Loaded', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'overview', '{ + ('Time Till First byte', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'overview', '{ + ('Time To Interactive', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'overview', '{ + ('Captured requests', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'overview', '{ + ('Time To Render', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'overview', '{ + ('Memory Consumption', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'overview', '{ + ('CPU Load', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'overview', '{ + ('Frame rate', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index c61efae19..6a4e151e9 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -9,4 +9,116 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a4b41fefe..c3ee2fdb1 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1060,102 +1060,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'overview', '{ +VALUES ('Captured sessions', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'overview', '{ + ('Request Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'overview', '{ + ('Page Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'overview', '{ + ('Image Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'overview', '{ + ('DOM Content Load Start', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'overview', '{ + ('First Meaningful paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'overview', '{ + ('No. of Visited Pages', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'overview', '{ + ('Session Duration', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'overview', '{ + ('DOM Build Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'overview', '{ + ('Pages Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'overview', '{ + ('Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'overview', '{ + ('First Paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'overview', '{ + ('DOM Content Loaded', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'overview', '{ + ('Time Till First byte', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'overview', '{ + ('Time To Interactive', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'overview', '{ + ('Captured requests', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'overview', '{ + ('Time To Render', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'overview', '{ + ('Memory Consumption', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'overview', '{ + ('CPU Load', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'overview', '{ + ('Frame rate', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 From 2544a3e166bd1d7ac5502605607e5d8a8c2a0283 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 19:08:08 +0200 Subject: [PATCH 045/221] feat(api): centralized 'order' feat(api): transform 'order' casing --- api/chalicelib/core/alerts_processor.py | 4 ++-- api/chalicelib/core/errors.py | 2 +- api/chalicelib/core/sessions.py | 6 +++--- api/schemas.py | 12 +++++++++++- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 56fde11da..ece75bfe5 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -99,10 +99,10 @@ def Build(a): j_s = True if a["seriesId"] is not None: a["filter"]["sort"] = "session_id" - a["filter"]["order"] = "DESC" + a["filter"]["order"] = schemas.SortOrderType.desc a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() - full_args, query_part= sessions.search_query_parts( + full_args, query_part = sessions.search_query_parts( data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index a7f863e79..983d091f8 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -463,7 +463,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = "DESC" + order = schemas.SortOrderType.desc if data.order is not None: order = data.order extra_join = "" diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index adc549d1e..e717f1d07 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -201,12 +201,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = "DESC" + data.order = schemas.SortOrderType.desc else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" + g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" else: sort = 'start_ts' @@ -230,7 +230,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e full_args) else: if data.order is None: - data.order = "DESC" + data.order = schemas.SortOrderType.desc sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/api/schemas.py b/api/schemas.py index 1d92f5fce..ae3720624 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -618,17 +618,27 @@ class _PaginatedSchema(BaseModel): page: int = Field(default=1, gt=0) +class SortOrderType(str, Enum): + asc = "ASC" + desc = "DESC" + + class SessionsSearchPayloadSchema(_PaginatedSchema): events: List[_SessionSearchEventSchema] = Field([]) filters: List[SessionSearchFilterSchema] = Field([]) startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: Literal["asc", "desc"] = Field(default="desc") + order: Literal[SortOrderType] = Field(default=SortOrderType.desc) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) bookmarked: bool = Field(default=False) + @root_validator(pre=True) + def transform_order(cls, values): + if values.get("order") is not None: + values["order"] = values["order"].upper() + class Config: alias_generator = attribute_to_camel_case From 4a55d93f5277b9fc43ff032d7b6e61c5ee6d902f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 19:43:18 +0200 Subject: [PATCH 046/221] feat(api): changed SearchSession payload schema --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index ae3720624..54a7bf9d9 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -629,7 +629,7 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: Literal[SortOrderType] = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.desc) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) bookmarked: bool = Field(default=False) From 6df7bbe7d148a9826d86fe716d1c4226c23fcb2a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 20:02:09 +0200 Subject: [PATCH 047/221] feat(api): fixed changed SearchSession payload schema --- api/schemas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/schemas.py b/api/schemas.py index 54a7bf9d9..ff42fd7d3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -638,6 +638,7 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): def transform_order(cls, values): if values.get("order") is not None: values["order"] = values["order"].upper() + return values class Config: alias_generator = attribute_to_camel_case From b5540998d9131ee76f779276da446588e5e52897 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 May 2022 11:20:25 +0200 Subject: [PATCH 048/221] feat(api): metrics changed web vitals description feat(db): changed metric's monitoring essentials category to web vitals --- api/chalicelib/core/dashboards.py | 5 ++- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/init_schema.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/init_schema.sql | 40 +++++++++---------- 5 files changed, 83 insertions(+), 82 deletions(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index bce5d3ad0..25dbdada3 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -6,8 +6,9 @@ from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC +# category name should be lower cased CATEGORY_DESCRIPTION = { - 'overview': 'High-level metrics and web vitals.', + 'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.', 'custom': 'Previously created custom metrics by me and my team.', 'errors': 'Keep a closer eye on errors and track their type, origin and domain.', 'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.', @@ -33,7 +34,7 @@ def get_templates(project_id, user_id): cur.execute(pg_query) rows = cur.fetchall() for r in rows: - r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "") + r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "") for w in r["widgets"]: w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index d8624d06d..325d419ba 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -16,102 +16,102 @@ CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 95f247af1..ec29b1dfc 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1269,102 +1269,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 6a4e151e9..4f1c7c28f 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -11,102 +11,102 @@ ALTER TABLE IF EXISTS dashboards INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index c3ee2fdb1..91a590688 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1060,102 +1060,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 From b1aae16f60ce2042ff25de8e7f5dd5bcae9ed74c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 31 May 2022 10:14:55 +0100 Subject: [PATCH 049/221] feat(api): refactored user-auth --- api/auth/auth_jwt.py | 18 ++++++++++-------- api/chalicelib/core/users.py | 14 +++++++------- ee/api/chalicelib/core/users.py | 14 +++++++------- 3 files changed, 24 insertions(+), 22 deletions(-) diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index 1ac8d5d79..4eff80789 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -19,10 +19,14 @@ class JWTAuth(HTTPBearer): if not credentials.scheme == "Bearer": raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + auth_exists = jwt_payload is not None \ + and users.auth_exists(user_id=jwt_payload.get("userId", -1), + tenant_id=jwt_payload.get("tenantId", -1), + jwt_iat=jwt_payload.get("iat", 100), + jwt_aud=jwt_payload.get("aud", "")) if jwt_payload is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ - or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): + or not auth_exists: print("JWTAuth: Token issue") if jwt_payload is not None: print(jwt_payload) @@ -34,21 +38,19 @@ class JWTAuth(HTTPBearer): print("JWTAuth: iat is None") if jwt_payload is not None and jwt_payload.get("aud") is None: print("JWTAuth: aud is None") - if jwt_payload is not None and \ - not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): + if jwt_payload is not None and not auth_exists: print("JWTAuth: not users.auth_exists") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") - user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"]) + user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) if user is None: print("JWTAuth: User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") jwt_payload["authorizer_identity"] = "jwt" print(jwt_payload) request.state.authorizer_identity = "jwt" - request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"], - user_id=jwt_payload["userId"], + request.state.currentContext = CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), + user_id=jwt_payload.get("userId", -1), email=user["email"]) return request.state.currentContext diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 40cc0f7db..3a4067f68 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -564,13 +564,13 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): {"userId": user_id}) ) r = cur.fetchone() - return r is not None \ - and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + return r is not None \ + and r.get("jwt_iat") is not None \ + and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ + or (jwt_aud.startswith("plugin") \ + and (r["changed_at"] is None \ + or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) + ) def authenticate(email, password, for_change_password=False, for_plugin=False): diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index cf2a808e7..5d28dc395 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -613,13 +613,13 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): {"userId": user_id, "tenant_id": tenant_id}) ) r = cur.fetchone() - return r is not None \ - and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + return r is not None \ + and r.get("jwt_iat") is not None \ + and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ + or (jwt_aud.startswith("plugin") \ + and (r["changed_at"] is None \ + or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) + ) def change_jwt_iat(user_id): From 95088518aa9e685924d87085637071cbef9f4794 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 31 May 2022 13:46:13 +0100 Subject: [PATCH 050/221] feat(api): clean script --- ee/api/clean.sh | 84 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100755 ee/api/clean.sh diff --git a/ee/api/clean.sh b/ee/api/clean.sh new file mode 100755 index 000000000..861d1d9f1 --- /dev/null +++ b/ee/api/clean.sh @@ -0,0 +1,84 @@ +#!/bin/bash + +rm -rf ./chalicelib/core/alerts.py +rm -rf ./chalicelib/core/alerts_processor.py +rm -rf ./chalicelib/core/announcements.py +rm -rf ./chalicelib/core/collaboration_slack.py +rm -rf ./chalicelib/core/errors_favorite_viewed.py +rm -rf ./chalicelib/core/events.py +rm -rf ./chalicelib/core/events_ios.py +rm -rf ./chalicelib/core/dashboards.py +rm -rf ./chalicelib/core/funnels.py +rm -rf ./chalicelib/core/integration_base.py +rm -rf ./chalicelib/core/integration_base_issue.py +rm -rf ./chalicelib/core/integration_github.py +rm -rf ./chalicelib/core/integration_github_issue.py +rm -rf ./chalicelib/core/integration_jira_cloud.py +rm -rf ./chalicelib/core/integration_jira_cloud_issue.py +rm -rf ./chalicelib/core/integrations_manager.py +rm -rf ./chalicelib/core/issues.py +rm -rf ./chalicelib/core/jobs.py +rm -rf ./chalicelib/core/log_tool_bugsnag.py +rm -rf ./chalicelib/core/log_tool_cloudwatch.py +rm -rf ./chalicelib/core/log_tool_datadog.py +rm -rf ./chalicelib/core/log_tool_elasticsearch.py +rm -rf ./chalicelib/core/log_tool_newrelic.py +rm -rf ./chalicelib/core/log_tool_rollbar.py +rm -rf ./chalicelib/core/log_tool_sentry.py +rm -rf ./chalicelib/core/log_tool_stackdriver.py +rm -rf ./chalicelib/core/log_tool_sumologic.py +rm -rf ./chalicelib/core/metadata.py +rm -rf ./chalicelib/core/mobile.py +rm -rf ./chalicelib/core/sessions.py +rm -rf ./chalicelib/core/sessions_assignments.py +rm -rf ./chalicelib/core/sessions_metas.py +rm -rf ./chalicelib/core/sessions_mobs.py +rm -rf ./chalicelib/core/significance.py +rm -rf ./chalicelib/core/slack.py +rm -rf ./chalicelib/core/socket_ios.py +rm -rf ./chalicelib/core/sourcemaps.py +rm -rf ./chalicelib/core/sourcemaps_parser.py +rm -rf ./chalicelib/core/weekly_report.py +rm -rf ./chalicelib/saml +rm -rf ./chalicelib/utils/html/ +rm -rf ./chalicelib/utils/__init__.py +rm -rf ./chalicelib/utils/args_transformer.py +rm -rf ./chalicelib/utils/captcha.py +rm -rf ./chalicelib/utils/dev.py +rm -rf ./chalicelib/utils/email_handler.py +rm -rf ./chalicelib/utils/email_helper.py +rm -rf ./chalicelib/utils/event_filter_definition.py +rm -rf ./chalicelib/utils/github_client_v3.py +rm -rf ./chalicelib/utils/helper.py +rm -rf ./chalicelib/utils/jira_client.py +rm -rf ./chalicelib/utils/metrics_helper.py +rm -rf ./chalicelib/utils/pg_client.py +rm -rf ./chalicelib/utils/s3.py +rm -rf ./chalicelib/utils/smtp.py +rm -rf ./chalicelib/utils/strings.py +rm -rf ./chalicelib/utils/TimeUTC.py +rm -rf ./routers/app/__init__.py +rm -rf ./routers/crons/__init__.py +rm -rf ./routers/subs/__init__.py +rm -rf ./routers/__init__.py +rm -rf ./chalicelib/core/assist.py +rm -rf ./auth/auth_apikey.py +rm -rf ./auth/auth_jwt.py +rm -rf ./build.sh +rm -rf ./routers/core.py +rm -rf ./routers/crons/core_crons.py +rm -rf ./routers/subs/dashboard.py +rm -rf ./db_changes.sql +rm -rf ./Dockerfile.bundle +rm -rf ./entrypoint.bundle.sh +rm -rf ./entrypoint.sh +rm -rf ./chalicelib/core/heatmaps.py +rm -rf ./routers/subs/insights.py +rm -rf ./schemas.py +rm -rf ./routers/subs/v1_api.py +rm -rf ./routers/subs/metrics.py +rm -rf ./chalicelib/core/custom_metrics.py +rm -rf ./chalicelib/core/performance_event.py +rm -rf ./chalicelib/core/saved_search.py +rm -rf ./app_alerts.py +rm -rf ./build_alerts.sh From caaf7793e34569baad46b6e0e7ddc9bf2340efbc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Jun 2022 19:51:42 +0100 Subject: [PATCH 051/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 236 +++++++++++++++--- 1 file changed, 195 insertions(+), 41 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index a8f90613d..385908163 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -1,7 +1,81 @@ ALTER TABLE sessions DROP COLUMN pages_count; -CREATE TABLE default.sessions_metadata_temp + +CREATE TABLE IF NOT EXISTS events_s +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, session_id); + +CREATE TABLE IF NOT EXISTS sessions_s ( session_id UInt64, project_id UInt32, @@ -16,8 +90,66 @@ CREATE TABLE default.sessions_metadata_temp user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + +-- CREATE TABLE IF NOT EXISTS sessions_meta +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- tracker_version String, +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os String, +-- user_os_version Nullable(String), +-- user_browser String, +-- user_browser_version Nullable(String), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- duration UInt32, +-- pages_count UInt16, +-- events_count UInt16, +-- errors_count UInt16, +-- utm_source Nullable(String), +-- utm_medium Nullable(String), +-- utm_campaign Nullable(String), +-- user_id Nullable(String), +-- metadata_1 Nullable(String), +-- metadata_2 Nullable(String), +-- metadata_3 Nullable(String), +-- metadata_4 Nullable(String), +-- metadata_5 Nullable(String), +-- metadata_6 Nullable(String), +-- metadata_7 Nullable(String), +-- metadata_8 Nullable(String), +-- metadata_9 Nullable(String), +-- metadata_10 Nullable(String), +-- _timestamp DateTime DEFAULT now() +-- ) ENGINE = ReplacingMergeTree(_timestamp) +-- PARTITION BY toYYYYMMDD(datetime) +-- ORDER BY (project_id, datetime, session_id) +-- TTL datetime + INTERVAL 1 MONTH +-- SETTINGS index_granularity = 512; + +CREATE TABLE IF NOT EXISTS metadata_s +( + session_id UInt64, + project_id UInt32, + datetime DateTime, user_id Nullable(String), - user_anonymous_id Nullable(String), metadata_1 Nullable(String), metadata_2 Nullable(String), metadata_3 Nullable(String), @@ -27,45 +159,67 @@ CREATE TABLE default.sessions_metadata_temp metadata_7 Nullable(String), metadata_8 Nullable(String), metadata_9 Nullable(String), - metadata_10 Nullable(String) -) ENGINE = MergeTree + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toDate(datetime) - ORDER BY (project_id, datetime) - TTL datetime + INTERVAL 1 MONTH; + ORDER BY (project_id, datetime, session_id); -INSERT INTO default.sessions_metadata_temp(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, - user_os_version, - user_browser, user_browser_version, user_device, user_device_type, - user_country, - datetime, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, - metadata_4, - metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) -SELECT session_id, - project_id, - tracker_version, - rev_id, - user_uuid, - user_os, - user_os_version, - user_browser, - user_browser_version, - user_device, - user_device_type, - user_country, - datetime, - user_id, - user_anonymous_id, - metadata_1, - metadata_2, - metadata_3, - metadata_4, - metadata_5, - metadata_6, - metadata_7, - metadata_8, - metadata_9, - metadata_10 -FROM default.sessions_metadata; +CREATE TABLE IF NOT EXISTS autocomplete +( + project_id UInt32 NOT NULL, + type LowCardinality(String) NOT NULL, + value String NOT NULL, + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id, type) + TTL _timestamp + INTERVAL 1 MONTH; -DROP TABLE default.sessions_metadata; -RENAME TABLE default.sessions_metadata_temp TO default.sessions_metadata; \ No newline at end of file +CREATE MATERIALIZED VIEW sessions_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT * +FROM massive_split.sessions_s +WHERE datetime >= now() - INTERVAL 1 DAY + AND isNotNull(duration) + AND duration > 0; + +CREATE MATERIALIZED VIEW events_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT *, now() AS _timestamp +FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 1 DAY; + +CREATE MATERIALIZED VIEW sessions_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT * +FROM massive_split.sessions_s +WHERE datetime >= now() - INTERVAL 7 DAY + AND isNotNull(duration) + AND duration > 0; + +CREATE MATERIALIZED VIEW events_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT *, now() AS _timestamp +FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file From e7e0296b6bc953c86ffc57bd2ce7d9bca43e40bc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Jun 2022 12:37:52 +0100 Subject: [PATCH 052/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 385908163..0339fd4b8 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -222,4 +222,26 @@ CREATE MATERIALIZED VIEW events_l7d_mv AS SELECT *, now() AS _timestamp FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 7 DAY; + +CREATE MATERIALIZED VIEW metadata_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT * +FROM massive_split.metadata_s +WHERE datetime >= now() - INTERVAL 1 DAY; + +CREATE MATERIALIZED VIEW metadata_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT * +FROM massive_split.metadata_s WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file From 31a577b6ccb2edb96d903e3d03677fa90a319e7f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Jun 2022 16:56:37 +0100 Subject: [PATCH 053/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 223 +- .../db/init_dbs/clickhouse/1.6.1/fill.sql | 2878 +++++++++++++++++ .../db/init_dbs/clickhouse/1.6.1/queries.sql | 983 ++++++ 3 files changed, 3978 insertions(+), 106 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 0339fd4b8..f6ba9d751 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -2,6 +2,25 @@ ALTER TABLE sessions DROP COLUMN pages_count; +CREATE TABLE projects_metadata +( + project_id UInt32, + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id) + SETTINGS index_granularity = 512; + CREATE TABLE IF NOT EXISTS events_s ( session_id UInt64, @@ -12,7 +31,7 @@ CREATE TABLE IF NOT EXISTS events_s hesitation_time Nullable(UInt32), name Nullable(String), payload Nullable(String), - level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), message Nullable(String), error_id Nullable(String), @@ -70,85 +89,35 @@ CREATE TABLE IF NOT EXISTS events_s compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), success Nullable(UInt8), method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), - status Nullable(UInt16) + status Nullable(UInt16), + _timestamp DateTime DEFAULT now() ) ENGINE = MergeTree PARTITION BY toYYYYMM(datetime) - ORDER BY (project_id, datetime, event_type, session_id); + ORDER BY (project_id, datetime, event_type, session_id) + TTL datetime + INTERVAL 1 MONTH; -CREATE TABLE IF NOT EXISTS sessions_s +CREATE TABLE IF NOT EXISTS sessions ( - session_id UInt64, - project_id UInt32, - tracker_version String, - rev_id Nullable(String), - user_uuid UUID, - user_os String, - user_os_version Nullable(String), - user_browser String, - user_browser_version Nullable(String), + session_id UInt64, + project_id UInt32, + tracker_version LowCardinality(String), + rev_id LowCardinality(Nullable(String)), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), user_device Nullable(String), user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), - datetime DateTime, - duration UInt32, - pages_count UInt16, - events_count UInt16, - errors_count UInt16, + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, utm_source Nullable(String), utm_medium Nullable(String), utm_campaign Nullable(String), - _timestamp DateTime DEFAULT now() -) ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 MONTH - SETTINGS index_granularity = 512; - --- CREATE TABLE IF NOT EXISTS sessions_meta --- ( --- session_id UInt64, --- project_id UInt32, --- tracker_version String, --- rev_id Nullable(String), --- user_uuid UUID, --- user_os String, --- user_os_version Nullable(String), --- user_browser String, --- user_browser_version Nullable(String), --- user_device Nullable(String), --- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), --- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), --- datetime DateTime, --- duration UInt32, --- pages_count UInt16, --- events_count UInt16, --- errors_count UInt16, --- utm_source Nullable(String), --- utm_medium Nullable(String), --- utm_campaign Nullable(String), --- user_id Nullable(String), --- metadata_1 Nullable(String), --- metadata_2 Nullable(String), --- metadata_3 Nullable(String), --- metadata_4 Nullable(String), --- metadata_5 Nullable(String), --- metadata_6 Nullable(String), --- metadata_7 Nullable(String), --- metadata_8 Nullable(String), --- metadata_9 Nullable(String), --- metadata_10 Nullable(String), --- _timestamp DateTime DEFAULT now() --- ) ENGINE = ReplacingMergeTree(_timestamp) --- PARTITION BY toYYYYMMDD(datetime) --- ORDER BY (project_id, datetime, session_id) --- TTL datetime + INTERVAL 1 MONTH --- SETTINGS index_granularity = 512; - -CREATE TABLE IF NOT EXISTS metadata_s -( - session_id UInt64, - project_id UInt32, - datetime DateTime, user_id Nullable(String), metadata_1 Nullable(String), metadata_2 Nullable(String), @@ -160,10 +129,12 @@ CREATE TABLE IF NOT EXISTS metadata_s metadata_8 Nullable(String), metadata_9 Nullable(String), metadata_10 Nullable(String), - _timestamp DateTime DEFAULT now() + _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toDate(datetime) - ORDER BY (project_id, datetime, session_id); + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; CREATE TABLE IF NOT EXISTS autocomplete ( @@ -176,35 +147,13 @@ CREATE TABLE IF NOT EXISTS autocomplete ORDER BY (project_id, type) TTL _timestamp + INTERVAL 1 MONTH; -CREATE MATERIALIZED VIEW sessions_l24h_mv - ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY - POPULATE -AS -SELECT * -FROM massive_split.sessions_s -WHERE datetime >= now() - INTERVAL 1 DAY - AND isNotNull(duration) - AND duration > 0; - -CREATE MATERIALIZED VIEW events_l24h_mv - ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMM(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY - POPULATE -AS -SELECT *, now() AS _timestamp -FROM massive_split.events_s -WHERE datetime >= now() - INTERVAL 1 DAY; CREATE MATERIALIZED VIEW sessions_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 7 DAY + SETTINGS index_granularity = 512 POPULATE AS SELECT * @@ -220,28 +169,90 @@ CREATE MATERIALIZED VIEW events_l7d_mv TTL datetime + INTERVAL 7 DAY POPULATE AS -SELECT *, now() AS _timestamp +SELECT * FROM massive_split.events_s WHERE datetime >= now() - INTERVAL 7 DAY; -CREATE MATERIALIZED VIEW metadata_l24h_mv + +CREATE MATERIALIZED VIEW sessions_info_l1m_mv ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) + PARTITION BY toYYYYMM(datetime) ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512 POPULATE AS -SELECT * -FROM massive_split.metadata_s -WHERE datetime >= now() - INTERVAL 1 DAY; +SELECT project_id, + session_id, + datetime, + now() AS _timestamp, + toJSONString(map('project_id', toString(project_id), + 'session_id', toString(session_id), + 'user_uuid', toString(user_uuid), + 'user_id', user_id, + 'user_os', user_os, + 'user_browser', user_browser, + 'user_device', user_device, + --'user_device_type', user_device_type, +--'user_country', user_country, + 'start_ts', toString(datetime), + 'duration', toString(duration), + 'events_count', toString(events_count), + 'pages_count', toString(pages_count), + 'errors_count', toString(errors_count), + -- 'user_anonymous_id', user_anonymous_id, +-- 'platform', platform, +-- 'issue_score', issue_score, +-- issue_types, +-- favorite, +-- viewed, + 'metadata', CAST((arrayFilter(x->isNotNull(x), + arrayMap( + x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[1]), + NULL), + [ + [projects_meta.metadata_1,sessions.metadata_1], + [projects_meta.metadata_2,sessions.metadata_2], + [projects_meta.metadata_3,sessions.metadata_3], + [projects_meta.metadata_4,sessions.metadata_4], + [projects_meta.metadata_5,sessions.metadata_5], + [projects_meta.metadata_6,sessions.metadata_6], + [projects_meta.metadata_7,sessions.metadata_7], + [projects_meta.metadata_8,sessions.metadata_8], + [projects_meta.metadata_9,sessions.metadata_9], + [projects_meta.metadata_10,sessions.metadata_10] + ])), + arrayFilter(x->isNotNull(x), + arrayMap( + x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[2]), + NULL), + [ + [projects_meta.metadata_1,sessions.metadata_1], + [projects_meta.metadata_2,sessions.metadata_2], + [projects_meta.metadata_3,sessions.metadata_3], + [projects_meta.metadata_4,sessions.metadata_4], + [projects_meta.metadata_5,sessions.metadata_5], + [projects_meta.metadata_6,sessions.metadata_6], + [projects_meta.metadata_7,sessions.metadata_7], + [projects_meta.metadata_8,sessions.metadata_8], + [projects_meta.metadata_9,sessions.metadata_9], + [projects_meta.metadata_10,sessions.metadata_10] + ]))), 'Map(String,String)') + )) AS info +FROM massive_split.sessions + INNER JOIN projects_metadata USING (project_id) +WHERE datetime >= now() - INTERVAL 1 MONTH + AND isNotNull(duration) + AND duration > 0; -CREATE MATERIALIZED VIEW metadata_l7d_mv +CREATE MATERIALIZED VIEW sessions_info_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 7 DAY + SETTINGS index_granularity = 512 POPULATE AS SELECT * -FROM massive_split.metadata_s -WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file +FROM sessions_info_l1m_mv +WHERE datetime >= now() - INTERVAL 7 DAY; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql new file mode 100644 index 000000000..e22b73848 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql @@ -0,0 +1,2878 @@ +-- CREATE TABLE IF NOT EXISTS single_t.events +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), +-- tracker_version LowCardinality(String), +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os LowCardinality(String), +-- user_os_version LowCardinality(Nullable(String)), +-- user_browser LowCardinality(String), +-- user_browser_version LowCardinality(Nullable(String)), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- label Nullable(String), +-- hesitation_time Nullable(UInt32), +-- name Nullable(String), +-- payload Nullable(String), +-- level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), +-- source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), +-- message Nullable(String), +-- error_id Nullable(String), +-- duration Nullable(UInt16), +-- context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), +-- container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), +-- container_id Nullable(String), +-- container_name Nullable(String), +-- container_src Nullable(String), +-- url Nullable(String), +-- url_host Nullable(String) MATERIALIZED lower(domain(url)), +-- url_path Nullable(String) MATERIALIZED lower(pathFull(url)), +-- request_start Nullable(UInt16), +-- response_start Nullable(UInt16), +-- response_end Nullable(UInt16), +-- dom_content_loaded_event_start Nullable(UInt16), +-- dom_content_loaded_event_end Nullable(UInt16), +-- load_event_start Nullable(UInt16), +-- load_event_end Nullable(UInt16), +-- first_paint Nullable(UInt16), +-- first_contentful_paint Nullable(UInt16), +-- speed_index Nullable(UInt16), +-- visually_complete Nullable(UInt16), +-- time_to_interactive Nullable(UInt16), +-- ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), +-- minus(response_start, request_start), Null), +-- ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), +-- minus(response_end, request_start), Null), +-- response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), +-- minus(response_end, response_start), Null), +-- dom_building_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_start, response_end), +-- minus(dom_content_loaded_event_start, response_end), Null), +-- dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), +-- minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), +-- load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), +-- minus(load_event_end, load_event_start), Null), +-- min_fps Nullable(UInt8), +-- avg_fps Nullable(UInt8), +-- max_fps Nullable(UInt8), +-- min_cpu Nullable(UInt8), +-- avg_cpu Nullable(UInt8), +-- max_cpu Nullable(UInt8), +-- min_total_js_heap_size Nullable(UInt64), +-- avg_total_js_heap_size Nullable(UInt64), +-- max_total_js_heap_size Nullable(UInt64), +-- min_used_js_heap_size Nullable(UInt64), +-- avg_used_js_heap_size Nullable(UInt64), +-- max_used_js_heap_size Nullable(UInt64), +-- type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), +-- header_size Nullable(UInt16), +-- encoded_body_size Nullable(UInt32), +-- decoded_body_size Nullable(UInt32), +-- compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), +-- success Nullable(UInt8), +-- method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), +-- status Nullable(UInt16) +-- ) ENGINE = MergeTree +-- PARTITION BY toDate(datetime) +-- ORDER BY (project_id, datetime); +-- -- TTL datetime + INTERVAL 1 MONTH; +-- DROP TABLE single_t.events; +-- +-- INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label, hesitation_time, name, payload, level, source, message, +-- error_id, duration, context, container_type, container_id, container_name, container_src, +-- url, request_start, response_start, response_end, dom_content_loaded_event_start, +-- dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, +-- first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, +-- avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, +-- max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, +-- type, header_size, encoded_body_size, decoded_body_size, success, method, status) +-- SELECT session_id, +-- project_id, +-- event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time, +-- name, +-- payload, +-- level, +-- source, +-- message, +-- error_id, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size, +-- type, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM ( +-- SELECT session_id, +-- project_id, +-- 'CLICK' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM clicks +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'ERROR' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- message, +-- error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM errors +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'INPUT' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM inputs +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'LONGTASK' event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM longtasks +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'PAGE' event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM pages +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'PERFORMANCE' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label label, +-- null AS label hesitation_time, +-- null AS label name, +-- null AS label payload, +-- null AS label level, +-- null AS label source, +-- null AS label message, +-- null AS label error_id, +-- null AS label duration, +-- null AS label context, +-- null AS label container_type, +-- null AS label container_id, +-- null AS label container_name, +-- null AS label container_src, +-- null AS labelurl, +-- null AS label request_start, +-- null AS label response_start, +-- null AS label response_end, +-- null AS label dom_content_loaded_event_start, +-- null AS label dom_content_loaded_event_end, +-- null AS label load_event_startnull, +-- null AS label load_event_end, +-- null AS label first_paint, +-- null AS label first_contentful_paint, +-- null AS label speed_index, +-- null AS label visually_complete, +-- null AS label time_to_interactive, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size, +-- null AS label type, +-- null AS label header_size, +-- null AS label encoded_body_size, +-- null AS label decoded_body_size, +-- null AS label success, +-- null AS label method, +-- null AS label status +-- FROM performance +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'RESOURCE' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- type, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM resources); +-- +-- +-- INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label, hesitation_time) +-- SELECT session_id, +-- project_id, +-- 'CLICK' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time +-- FROM clicks; +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, +-- user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, +-- source, name, message, error_id) +-- +-- SELECT 'ERROR' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- source, +-- name, +-- message, +-- error_id +-- FROM errors; +-- +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label) +-- +-- SELECT 'INPUT' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label +-- FROM inputs; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, duration, context, container_type, container_id, container_name, +-- container_src) +-- SELECT 'LONGTASK' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src +-- FROM longtasks; +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, url, request_start, response_start, response_end, +-- dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, +-- load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, +-- time_to_interactive) +-- SELECT 'PAGE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive +-- FROM pages; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, +-- min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, +-- min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size) +-- SELECT 'PERFORMANCE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size +-- FROM performance; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, url, type, duration, header_size, encoded_body_size, +-- decoded_body_size, success, method, status) +-- SELECT 'RESOURCE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- url, +-- type, +-- duration, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM resources; +-- +-- +-- SELECT table, formatReadableSize(size) as size, rows, days, formatReadableSize(avgDaySize) as avgDaySize +-- FROM ( +-- SELECT table, +-- sum(bytes) AS size, +-- sum(rows) AS rows, +-- min(min_date) AS min_date, +-- max(max_date) AS max_date, +-- (max_date - min_date) AS days, +-- size / (max_date - min_date) AS avgDaySize +-- FROM system.parts +-- WHERE active +-- GROUP BY table +-- ORDER BY rows DESC +-- ); +-- +-- SELECT database, +-- table, +-- formatReadableSize(sum(bytes)) as size, +-- min(min_date) as min_date, +-- max(max_date) as max_date +-- FROM system.parts +-- WHERE active +-- GROUP BY database, table; +-- +-- SELECT count(*) +-- FROM single_t.events; +-- -- 449 484 932 +-- -- 449 484 932 +-- +-- SELECT (SELECT count(*) FROM clicks) + (SELECT count(*) FROM inputs) + (SELECT count(*) FROM longtasks) + +-- (SELECT count(*) FROM errors) + (SELECT count(*) FROM pages) + (SELECT count(*) FROM resources) + +-- (SELECT count(*) FROM performance) AS totl; +-- +-- +-- +-- CREATE TABLE IF NOT EXISTS single_t.events3 +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), +-- tracker_version LowCardinality(String), +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os LowCardinality(String), +-- user_os_version LowCardinality(Nullable(String)), +-- user_browser LowCardinality(String), +-- user_browser_version LowCardinality(Nullable(String)), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- label Nullable(String), +-- hesitation_time Nullable(UInt32), +-- name Nullable(String), +-- payload Nullable(String), +-- level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), +-- source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), +-- message Nullable(String), +-- error_id Nullable(String), +-- duration Nullable(UInt16), +-- context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), +-- container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), +-- container_id Nullable(String), +-- container_name Nullable(String), +-- container_src Nullable(String), +-- url Nullable(String), +-- url_host Nullable(String) MATERIALIZED lower(domain(url)), +-- url_path Nullable(String) MATERIALIZED lower(pathFull(url)), +-- request_start Nullable(UInt16), +-- response_start Nullable(UInt16), +-- response_end Nullable(UInt16), +-- dom_content_loaded_event_start Nullable(UInt16), +-- dom_content_loaded_event_end Nullable(UInt16), +-- load_event_start Nullable(UInt16), +-- load_event_end Nullable(UInt16), +-- first_paint Nullable(UInt16), +-- first_contentful_paint Nullable(UInt16), +-- speed_index Nullable(UInt16), +-- visually_complete Nullable(UInt16), +-- time_to_interactive Nullable(UInt16), +-- ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), +-- minus(response_start, request_start), Null), +-- ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), +-- minus(response_end, request_start), Null), +-- response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), +-- minus(response_end, response_start), Null), +-- dom_building_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_start, response_end), +-- minus(dom_content_loaded_event_start, response_end), Null), +-- dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), +-- minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), +-- load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), +-- minus(load_event_end, load_event_start), Null), +-- min_fps Nullable(UInt8), +-- avg_fps Nullable(UInt8), +-- max_fps Nullable(UInt8), +-- min_cpu Nullable(UInt8), +-- avg_cpu Nullable(UInt8), +-- max_cpu Nullable(UInt8), +-- min_total_js_heap_size Nullable(UInt64), +-- avg_total_js_heap_size Nullable(UInt64), +-- max_total_js_heap_size Nullable(UInt64), +-- min_used_js_heap_size Nullable(UInt64), +-- avg_used_js_heap_size Nullable(UInt64), +-- max_used_js_heap_size Nullable(UInt64), +-- type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), +-- header_size Nullable(UInt16), +-- encoded_body_size Nullable(UInt32), +-- decoded_body_size Nullable(UInt32), +-- compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), +-- success Nullable(UInt8), +-- method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), +-- status Nullable(UInt16) +-- ) ENGINE = MergeTree +-- PARTITION BY toDate(datetime) +-- ORDER BY (project_id, datetime,event_type); +-- +-- -- INSERT INTO eng_t.events42(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, label, hesitation_time, name, payload, level, source, message, error_id, duration, context, container_type, container_id, container_name, container_src, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, decoded_body_size, success, method, status) +-- -- SELECT session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, label, hesitation_time, name, payload, level, source, message, error_id, duration, context, container_type, container_id, container_name, container_src, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, decoded_body_size, success, method, status FROM single_t.events; + +CREATE TABLE IF NOT EXISTS single_t.events3 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(BOOLEAN), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, event_type); + +INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time) +SELECT session_id, + project_id, + 'CLICK' AS event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time +FROM clicks +WHERE mod(session_id, 2) = 1; + + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, + source, name, message, error_id) + +SELECT 'ERROR' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id +FROM errors +WHERE mod(session_id, 2) = 0; +TRUNCATE TABLE eng_t.events4; +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label) + +SELECT 'INPUT' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label +FROM inputs +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src) +SELECT 'LONGTASK' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src +FROM longtasks +WHERE mod(session_id, 2) = 0; + + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive) +SELECT 'PAGE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive +FROM pages +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size) +SELECT 'PERFORMANCE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size +FROM performance +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status) +SELECT 'RESOURCE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status +FROM resources +WHERE type != 'fetch' + AND mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status) +SELECT 'REQUEST' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status +FROM resources +WHERE type = 'fetch' + AND mod(session_id, 2) = 0; + +CREATE TABLE IF NOT EXISTS eng_t.events4 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(BOOLEAN), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = Join(ALL, INNER, session_id); + +TRUNCATE TABLE eng_t.events4; + + +-- merge metadata with events +CREATE DATABASE full_meerge; + +CREATE TABLE IF NOT EXISTS massive.events6 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime); +-- TTL datetime + INTERVAL 1 MONTH; +INSERT INTO massive.events6(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10) +SELECT session_id + 6651141467121565 * 3 AS session_id, + project_id, + 'CLICK' AS event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.clicks + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, + datetime, source, name, message, error_id, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) + +SELECT 'ERROR' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.errors + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) + +SELECT 'INPUT' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.inputs + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'LONGTASK' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.longtasks + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'PAGE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.pages + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'PERFORMANCE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.performance + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'RESOURCE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.resources + LEFT JOIN default.sessions_metadata USING (session_id) +WHERE type != 'fetch'; + +INSERT INTO massive2.events7(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'REQUEST' AS event_type, + session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.resources + LEFT JOIN default.sessions_metadata USING (session_id) +WHERE type = 'fetch' + AND mod(project_id, 2) = 0; + +-- -- TO GENERATE RANDOM USER IDS +-- INSERT INTO sessions_metadata(session_id, user_id, datetime, project_id, user_device_type) +-- SELECT session_id, +-- arrayElement( +-- array('Ze2wc7lvYi', 'NYd7m0Ytg8', 'qgNpvEkXap', 'wvWqM4Ow2G', 'n5Y6DK7ZdP', 'uW4SEYjXxI', 't4EfJiNxk9', +-- 'qWQ8WuIRLS', 'fnRWCwkFyB', '8wf298MFWR', 'G3A3DL0Fdd', 'cQcZHNNiAJ', 'MKcW2adQ38', 'OBzk9EFxVe', +-- '8SBiqoFail', '3Wh9Ur0eOr', 'z6KuuxiPXX', '7j4HaReEsF', 'Ros0kDOVeV', 'PvHi3cBkgV', 'HLjUo6oBlJ', +-- '4Tmi34faA0', 'O9ZATbPjaB', '7ATvuWQCIH', 'kXW4LHnW5X', 'HIHc9TTyTc', 'i5p9jRe7I0', '7dRnUEFoZO', +-- 'u3PDLkI5uG', 'HTYjxmDJCG', '6hKHjcKniO', 'qmPNUWgDIx', 'RfoN9oeYZD', 'HHXpBaYm3k', 'VdpZDfnL9J', +-- 'Qfwa1dPrrF', 'cgdD2GfFVT', 'iRvT6l7qj3', 'QokprB2GMV', 'umqISqbncX', '7bvRdQ4al3', 'VGKZAUIRjy', +-- 'SNTEGLKbCD', 'zfUaVSD8Jn', 'De7zUojKNt', 'lXiotVRkil', 'bQaDX5kESw', 'tngESCaH6I', 'uucUZvTpPd', +-- 'BFJpni8D3I'), mod(session_id, 50)) AS user_id, +-- datetime, +-- project_id, +-- user_device_type +-- FROM sessions +-- WHERE project_id = 2460; + +INSERT INTO massive2.sessions2(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, + datetime, + duration, events_count, errors_count, utm_source, utm_medium, utm_campaign) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign +FROM default.sessions; + + +CREATE DATABASE massive2; +CREATE TABLE IF NOT EXISTS massive2.events7 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, event_type); + + + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 43 +FROM massive.events6 +WHERE event_type = 'REQUEST' + AND mod(project_id, 2) = 0; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 42 +FROM massive.events6 +WHERE event_type = 'REQUEST' + AND mod(project_id, 2) = 1; + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 41 +FROM massive.events6 +WHERE event_type = 'RESOURCE' + AND mod(project_id, 2) = 0; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 40 +FROM massive.events6 +WHERE event_type = 'RESOURCE' + AND mod(project_id, 2) = 1; + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'CLICK'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, + metadata_7, metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'PERFORMANCE'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, + metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, + riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'PAGE'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, user_id, user_anonymous_id, metadata_1, metadata_2, + metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, + metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'INPUT'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, source, name, message, error_id, user_id, user_anonymous_id, + metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, + metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'ERROR'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, + metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'LONGTASK'; + +ALTER TABLE massive2.events7 + ADD COLUMN riteration UInt8 DEFAULT 0; +ALTER TABLE massive2.sessions2 + ADD COLUMN riteration UInt8 DEFAULT 0; + + + +INSERT INTO massive2.sessions2(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, + datetime, + duration, events_count, errors_count, utm_source, utm_medium, utm_campaign, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + 4 +FROM massive.sessions; + +SELECT COUNT(*) +FROM massive2.events7; + +CREATE DATABASE massive30; +CREATE TABLE IF NOT EXISTS massive30.events30 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type); + +ALTER TABLE massive30.events30 + ADD COLUMN riteration UInt8; + +INSERT INTO massive30.events30(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, name, payload, level, source, message, + error_id, duration, context, container_type, container_id, container_name, container_src, + url, request_start, response_start, response_end, dom_content_loaded_event_start, + dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, + first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, + avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, + avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, + avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + name, + payload, + level, + source, + message, + error_id, + duration, + context, + container_type, + container_id, + container_name, + container_src, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + type, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 9 AS riteration +FROM massive2.events7 +WHERE mod(project_id, 10) = 9; +-- ORDER BY datetime LIMIT 500000; + +DROP TABLE massive30.events30; + + +DESCRIBE TABLE massive2.events7; + + +-- ----------------------------------------------------- +CREATE DATABASE massive_split; +CREATE TABLE IF NOT EXISTS massive_split.events_s +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, session_id); + +CREATE TABLE IF NOT EXISTS massive_split.sessions_s +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + + +CREATE TABLE IF NOT EXISTS massive_split.metadata_s +( + session_id UInt64, + project_id UInt32, + datetime DateTime, + user_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, session_id); + +INSERT INTO massive_split.sessions_s(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, pages_count, events_count, errors_count, + utm_source, utm_medium, utm_campaign, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + mod(rand(), 100) AS pages_count, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + 4 AS riteration +FROM default.sessions; + +ALTER TABLE massive_split.sessions_s + ADD COLUMN riteration UInt8; + +INSERT INTO massive_split.metadata_s(session_id, project_id, datetime, user_id, metadata_1, metadata_2, metadata_3, + metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, + metadata_10) +SELECT session_id, + project_id, + datetime, + user_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM massive2.events7 AS s +LIMIT 1 BY session_id; + +INSERT INTO massive_split.events_s(session_id, project_id, event_type, datetime, label, hesitation_time, name, payload, + level, source, message, error_id, duration, context, container_type, container_id, + container_name, container_src, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, + header_size, encoded_body_size, decoded_body_size, success, method, status) +SELECT session_id, + project_id, + event_type, + datetime, + label, + hesitation_time, + name, + payload, + level, + source, + message, + error_id, + duration, + context, + container_type, + container_id, + container_name, + container_src, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + type, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status +FROM massive2.events7; + +SELECT COUNT(*) +FROM massive_split.sessions_s; +SELECT COUNT(*) +FROM massive_split.metadata_s; +SELECT COUNT(*) +FROM massive_split.events_s; +SELECT COUNT(*) +FROM massive2.events7; +-- SELECT COUNT(*) FROM massive2.sessions2; + + +CREATE TABLE IF NOT EXISTS massive_split.sessions_meta +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + user_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + +INSERT INTO massive_split.sessions_meta(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, + user_device_type, user_country, datetime, duration, pages_count, events_count, + errors_count, utm_source, utm_medium, utm_campaign, user_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, + metadata_8, metadata_9, metadata_10) +SELECT session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + pages_count, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + user_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM massive_split.sessions_s AS s + LEFT JOIN massive_split.metadata_s AS m ON (s.project_id = m.project_id AND s.session_id = m.session_id); diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql new file mode 100644 index 000000000..556209c79 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql @@ -0,0 +1,983 @@ +-- Q1 +SELECT session_id +-- FROM massive2.events7 +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + + +-- Q1.1 +SELECT session_id +FROM massive2.events7 +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.2 +SELECT session_id +FROM +-- massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.2.1 +SELECT session_id +FROM +-- massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) +events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) +WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.3 +SELECT session_id +FROM +-- massive_split.events_s +-- events_l7d_mv +events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.4 +SELECT session_id +FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.5 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive2.events7 +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM massive2.events7 + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) + FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.3 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.5 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.1 +SELECT session_id +FROM massive2.events7 +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.2 +SELECT session_id +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) +FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.2.1 +SELECT session_id +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) +FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) +WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST') + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.3 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.4 +SELECT session_id +FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.5 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM massive2.events7 + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) + FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST') + AND metadata_s.user_id = 'uucUZvTpPd' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' + -- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.3 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.5 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM events_l24h_mv +-- FROM events_l7d_mv +-- FROM massive_split.events_s + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM massive2.events7 AS events + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.1 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM massive2.events7 AS events + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.2 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.3 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.4 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.4-A +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.5 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s AS events USING (session_id) +-- INNER JOIN events_l7d_mv AS events USING (session_id) + INNER JOIN events_l24h_mv AS events USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.6 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.6-A +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QM4: +SELECT timestamp, + groupArray([toString(t.type), toString(t.count)]) AS types +FROM (SELECT toUnixTimestamp(toStartOfInterval(events7.datetime, INTERVAL 37565 second)) * 1000 AS timestamp, + events7.type, + COUNT(events7.session_id) AS count +-- FROM massive_split.events_s AS events7 +-- FROM events_l7d_mv AS events7 + FROM events_l24h_mv AS events7 + WHERE events7.project_id = toUInt32(2460) + AND toStartOfInterval(events7.datetime, INTERVAL 37565 second) >= '2022-04-02 00:00:00' + AND events7.datetime <= '2022-04-03 00:00:00' +-- AND events7.datetime <= '2022-04-10 00:00:00' +-- AND events7.datetime < '2022-05-02 00:00:00' + AND events7.event_type = 'RESOURCE' + GROUP BY timestamp, events7.type + ORDER BY timestamp) AS t +GROUP BY timestamp + SETTINGS + max_threads = 4; From cbe78cc58eef477b7d5ffabf30400c1c195ebf49 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 6 Jun 2022 19:33:26 +0200 Subject: [PATCH 054/221] feat(db): removed user's appearance feat(db): removed generated_password feat(api): merged account&client feat(api): cleaned account response feat(api): removed user's appearance feat(api): removed generated_password feat(api): limits endpoint feat(api): notifications/count endpoint --- api/chalicelib/core/license.py | 15 +---- api/chalicelib/core/notifications.py | 18 ++++++ api/chalicelib/core/signup.py | 4 +- api/chalicelib/core/users.py | 55 +++++----------- api/routers/core.py | 12 ++-- api/routers/core_dynamic.py | 42 ++++++------ api/schemas.py | 5 -- ee/api/chalicelib/core/license.py | 16 +---- ee/api/chalicelib/core/notifications.py | 21 ++++++ ee/api/chalicelib/core/signup.py | 4 +- ee/api/chalicelib/core/users.py | 56 ++++------------ ee/api/routers/core_dynamic.py | 43 ++++++------- .../{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} | 33 +++++++--- .../db/init_dbs/postgresql/init_schema.sql | 64 +------------------ .../{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} | 8 ++- .../db/init_dbs/postgresql/init_schema.sql | 64 +------------------ 16 files changed, 153 insertions(+), 307 deletions(-) rename ee/scripts/helm/db/init_dbs/postgresql/{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} (88%) rename scripts/helm/db/init_dbs/postgresql/{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} (97%) diff --git a/api/chalicelib/core/license.py b/api/chalicelib/core/license.py index ab704778a..4a562ea7b 100644 --- a/api/chalicelib/core/license.py +++ b/api/chalicelib/core/license.py @@ -3,19 +3,10 @@ from chalicelib.utils import pg_client def get_status(tenant_id=None): with pg_client.PostgresClient() as cur: - cur.execute("SELECT * FROM public.tenants;") + # cur.execute("SELECT * FROM public.tenants;") + cur.execute("SELECT edition FROM public.tenants;") r = cur.fetchone() return { "hasActivePlan": True, - "current": { - "edition": r.get("edition", "").upper(), - "versionNumber": r.get("version_number", ""), - "license": "", - "expirationDate": -1 - }, - "count": { - "teamMember": r.get("t_users"), - "projects": r.get("t_projects"), - "capturedSessions": r.get("t_sessions") - } + "edition": r.get("edition", "").upper() } diff --git a/api/chalicelib/core/notifications.py b/api/chalicelib/core/notifications.py index 0d9b5be20..ce3c4d61a 100644 --- a/api/chalicelib/core/notifications.py +++ b/api/chalicelib/core/notifications.py @@ -25,6 +25,24 @@ def get_all(tenant_id, user_id): return rows +def get_all_count(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + SELECT COUNT(notifications.*) AS count + FROM public.notifications + LEFT JOIN (SELECT notification_id + FROM public.user_viewed_notifications + WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) + WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL + ORDER BY created_at DESC + LIMIT 100;""", + {"user_id": user_id}) + ) + row = cur.fetchone() + return row + + def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: return False diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index ab23eef68..4d320e0be 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -77,8 +77,8 @@ def create_step1(data: schemas.UserSignupSchema): RETURNING user_id,email,role,name ), au AS (INSERT - INTO public.basic_authentication (user_id, password, generated_password) - VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE) + INTO public.basic_authentication (user_id, password) + VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12))) ) INSERT INTO public.projects (name, active) VALUES (%(projectName)s, TRUE) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 3a4067f68..408fb03c1 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -21,10 +21,10 @@ def create_new_member(email, invitation_token, admin, name, owner=False): query = cur.mogrify(f"""\ WITH u AS (INSERT INTO public.users (email, role, name, data) VALUES (%(email)s, %(role)s, %(name)s, %(data)s) - RETURNING user_id,email,role,name,appearance + RETURNING user_id,email,role,name ), - au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) - VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) + au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at) + VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now())) RETURNING invitation_token ) SELECT u.user_id, @@ -32,7 +32,6 @@ def create_new_member(email, invitation_token, admin, name, owner=False): u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -61,7 +60,6 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False): email, role, name, - TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""", @@ -73,8 +71,7 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False): result = cur.fetchone() query = cur.mogrify("""\ UPDATE public.basic_authentication - SET generated_password = TRUE, - invitation_token = %(invitation_token)s, + SET invitation_token = %(invitation_token)s, invited_at = timezone('utc'::text, now()), change_pwd_expire_at = NULL, change_pwd_token = NULL @@ -132,11 +129,7 @@ def update(tenant_id, user_id, changes): else: sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s") else: - if key == "appearance": - sub_query_users.append(f"appearance = %(appearance)s::jsonb") - changes["appearance"] = json.dumps(changes[key]) - else: - sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s") + sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s") with pg_client.PostgresClient() as cur: if len(sub_query_users) > 0: @@ -151,11 +144,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""", {"user_id": user_id, **changes}) ) if len(sub_query_bauth) > 0: @@ -170,11 +161,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""", {"user_id": user_id, **changes}) ) @@ -244,15 +233,13 @@ def get(user_id, tenant_id): cur.execute( cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, email, role, - name, - basic_authentication.generated_password, + name, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - appearance, api_key FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE @@ -262,7 +249,7 @@ def get(user_id, tenant_id): {"userId": user_id}) ) r = cur.fetchone() - return helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + return helper.dict_to_camel_case(r) def generate_new_api_key(user_id): @@ -282,7 +269,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance"] + ALLOW_EDIT = ["name", "email", "admin"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -315,11 +302,6 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): return {"data": user} -def edit_appearance(user_id, tenant_id, changes): - updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) - return {"data": updated_user} - - def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -329,8 +311,7 @@ def get_by_email_only(email): 1 AS tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -353,8 +334,7 @@ def get_by_email_reset(email, reset_token): 1 AS tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -377,8 +357,7 @@ def get_members(tenant_id): users.email, users.role, users.name, - users.created_at, - basic_authentication.generated_password, + users.created_at, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -581,11 +560,9 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): 1 AS tenant_id, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) @@ -599,7 +576,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if r is not None: if for_change_password: return True - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) query = cur.mogrify( f"""UPDATE public.users SET jwt_iat = timezone('utc'::text, now()) diff --git a/api/routers/core.py b/api/routers/core.py index 813577b88..3008e94b7 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -966,6 +966,11 @@ def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} +@app.get('/notifications/count', tags=['notifications']) +def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)} + + @app.get('/notifications/{notificationId}/view', tags=['notifications']) def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)} @@ -1075,13 +1080,6 @@ def edit_account(data: schemas.EditUserSchema = Body(...), editor_id=context.user_id) -@app.post('/account/appearance', tags=["account"]) -@app.put('/account/appearance', tags=["account"]) -def edit_account_appearance(data: schemas.EditUserAppearanceSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return users.edit_appearance(tenant_id=context.tenant_id, user_id=context.user_id, changes=data.dict()) - - @app.post('/account/password', tags=["account"]) @app.put('/account/password', tags=["account"]) def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index e7e87e76c..7791c5677 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -67,17 +67,17 @@ def login(data: schemas.UserLoginSchema = Body(...)): @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + t = tenants.get_by_tenant_id(context.tenant_id) + if t is not None: + t.pop("createdAt") + t["tenantName"] = t.pop("name") return { 'data': { **r, - "limits": { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) - }, + **t, **license.get_status(context.tenant_id), "smtp": helper.has_smtp(), - "iceServers": assist.get_ice_servers() + # "iceServers": assist.get_ice_servers() } } @@ -199,29 +199,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = m_key=key, project_id=projectId)} -@app.get('/plans', tags=["plan"]) -def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": license.get_status(context.tenant_id) - } - - @public_app.get('/general_stats', tags=["private"], include_in_schema=False) def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} -@app.get('/client', tags=['projects']) -def get_client(context: schemas.CurrentContext = Depends(OR_context)): - r = tenants.get_by_tenant_id(context.tenant_id) - if r is not None: - r.pop("createdAt") - return { - 'data': r - } - - @app.get('/projects', tags=['projects']) def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True)} + + +@app.get('/limits', tags=['accounts']) +def get_limits(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': { + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + }, + } + } diff --git a/api/schemas.py b/api/schemas.py index ff42fd7d3..fc61999ad 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -38,15 +38,10 @@ class EditUserSchema(BaseModel): name: Optional[str] = Field(None) email: Optional[EmailStr] = Field(None) admin: Optional[bool] = Field(False) - appearance: Optional[dict] = Field({}) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) -class EditUserAppearanceSchema(BaseModel): - appearance: dict = Field(...) - - class ForgetPasswordPayloadSchema(_Grecaptcha): email: EmailStr = Field(...) diff --git a/ee/api/chalicelib/core/license.py b/ee/api/chalicelib/core/license.py index 2423567de..c1c9823d7 100644 --- a/ee/api/chalicelib/core/license.py +++ b/ee/api/chalicelib/core/license.py @@ -7,21 +7,11 @@ from chalicelib.utils import pg_client def get_status(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) + # cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) + cur.mogrify("SELECT edition FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) r = cur.fetchone() license = unlock.get_license() return { "hasActivePlan": unlock.is_valid(), - "current": { - "edition": r.get("edition", "").lower(), - "versionNumber": r.get("version_number", ""), - "license": license[0:2] + "*" * (len(license) - 4) + license[-2:], - "expirationDate": unlock.get_expiration_date(), - "teamMember": config("numberOfSeats", cast=int, default=0) - }, - "count": { - "teamMember": r.get("t_users"), - "projects": r.get("t_projects"), - "capturedSessions": r.get("t_sessions") - } + "edition": r.get("edition", "").lower(), } diff --git a/ee/api/chalicelib/core/notifications.py b/ee/api/chalicelib/core/notifications.py index 41c26b74c..5ba58f242 100644 --- a/ee/api/chalicelib/core/notifications.py +++ b/ee/api/chalicelib/core/notifications.py @@ -26,6 +26,27 @@ def get_all(tenant_id, user_id): return rows +def get_all_count(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + SELECT COUNT(notifications.*) + FROM public.notifications + LEFT JOIN (SELECT notification_id + FROM public.user_viewed_notifications + WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) + WHERE (notifications.tenant_id =%(tenant_id)s + OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL + ORDER BY created_at DESC + LIMIT 100;""", + {"tenant_id": tenant_id, "user_id": user_id}) + ) + rows = helper.list_to_camel_case(cur.fetchall()) + for r in rows: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + return rows + + def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: return False diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 4014f5e92..0415efc79 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -80,8 +80,8 @@ def create_step1(data: schemas.UserSignupSchema): RETURNING user_id,email,role,name,role_id ), au AS ( - INSERT INTO public.basic_authentication (user_id, password, generated_password) - VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE) + INSERT INTO public.basic_authentication (user_id, password) + VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12))) ) INSERT INTO public.projects (tenant_id, name, active) VALUES ((SELECT t.tenant_id FROM t), %(projectName)s, TRUE) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 5d28dc395..1f97fbd09 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -25,10 +25,10 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))) - RETURNING tenant_id,user_id,email,role,name,appearance, role_id + RETURNING tenant_id,user_id,email,role,name, role_id ), - au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) - VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) + au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at) + VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now())) RETURNING invitation_token ) SELECT u.user_id AS id, @@ -36,7 +36,6 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -74,7 +73,6 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own email, role, name, - TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -88,8 +86,7 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own result = cur.fetchone() query = cur.mogrify("""\ UPDATE public.basic_authentication - SET generated_password = TRUE, - invitation_token = %(invitation_token)s, + SET invitation_token = %(invitation_token)s, invited_at = timezone('utc'::text, now()), change_pwd_expire_at = NULL, change_pwd_token = NULL @@ -147,10 +144,7 @@ def update(tenant_id, user_id, changes): else: sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s") else: - if key == "appearance": - sub_query_users.append(f"appearance = %(appearance)s::jsonb") - changes["appearance"] = json.dumps(changes[key]) - elif helper.key_to_snake_case(key) == "role_id": + if helper.key_to_snake_case(key) == "role_id": sub_query_users.append("""role_id=(SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))""") @@ -171,11 +165,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -192,11 +184,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -272,12 +262,10 @@ def get(user_id, tenant_id): users.user_id AS id, email, role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - appearance, api_key, origin, role_id, @@ -296,7 +284,7 @@ def get(user_id, tenant_id): {"userId": user_id, "tenant_id": tenant_id}) ) r = cur.fetchone() - return helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + return helper.dict_to_camel_case(r) def generate_new_api_key(user_id): @@ -316,7 +304,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"] + ALLOW_EDIT = ["name", "email", "admin", "roleId"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -349,11 +337,6 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): return {"data": user} -def edit_appearance(user_id, tenant_id, changes): - updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) - return {"data": updated_user} - - def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -363,8 +346,7 @@ def get_by_email_only(email): users.tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -389,8 +371,7 @@ def get_by_email_reset(email, reset_token): users.tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -414,8 +395,7 @@ def get_members(tenant_id): users.email, users.role, users.name, - users.created_at, - basic_authentication.generated_password, + users.created_at, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -642,11 +622,9 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): users.tenant_id, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.origin, users.role_id, roles.name AS role_name, @@ -678,7 +656,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if r is not None: if for_change_password: return True - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) jwt_iat = change_jwt_iat(r['id']) return { "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], @@ -698,11 +676,9 @@ def authenticate_sso(email, internal_id, exp=None): users.tenant_id, users.role, users.name, - False AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, origin, role_id FROM public.users AS users @@ -713,7 +689,7 @@ def authenticate_sso(email, internal_id, exp=None): r = cur.fetchone() if r is not None: - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) return authorizers.generate_jwt(r['id'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", @@ -740,11 +716,9 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id= u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - u.appearance, origin FROM u;""", {"tenant_id": tenant_id, "email": email, "internal_id": internal_id, @@ -774,7 +748,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in created_at= default, api_key= default, jwt_iat= NULL, - appearance= default, weekly_report= default WHERE user_id = %(user_id)s RETURNING * @@ -782,7 +755,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in au AS ( UPDATE public.basic_authentication SET password= default, - generated_password= default, invitation_token= default, invited_at= default, change_pwd_token= default, @@ -795,11 +767,9 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - u.appearance, origin FROM u;""", {"tenant_id": tenant_id, "email": email, "internal_id": internal_id, diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 31ed1d099..6d7cf8e73 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -72,18 +72,18 @@ def login(data: schemas.UserLoginSchema = Body(...)): @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + t = tenants.get_by_tenant_id(context.tenant_id) + if t is not None: + t.pop("createdAt") + t["tenantName"] = t.pop("name") return { 'data': { **r, - "limits": { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) - }, + **t, **license.get_status(context.tenant_id), "smtp": helper.has_smtp(), "saml2": SAML2_helper.is_saml2_available(), - "iceServers": assist.get_ice_servers() + # "iceServers": assist.get_ice_servers() } } @@ -209,30 +209,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = m_key=key, project_id=projectId)} -@app.get('/plans', tags=["plan"]) -def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": license.get_status(context.tenant_id) - } - - @public_app.get('/general_stats', tags=["private"], include_in_schema=False) def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} -@app.get('/client', tags=['projects']) -def get_client(context: schemas.CurrentContext = Depends(OR_context)): - r = tenants.get_by_tenant_id(context.tenant_id) - if r is not None: - r.pop("createdAt") - - return { - 'data': r - } - - @app.get('/projects', tags=['projects']) def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True, user_id=context.user_id)} + + +@app.get('/limits', tags=['accounts']) +def get_limits(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': { + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + } + } + } diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql similarity index 88% rename from ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql rename to ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 325d419ba..1ab026565 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,18 +1,25 @@ BEGIN; -CREATE OR REPLACE FUNCTION openreplay_version() +CREATE OR REPLACE +FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1-ee' -$$ LANGUAGE sql IMMUTABLE; +SELECT 'v1.6.1-ee' $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards - ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + ADD COLUMN IF NOT +EXISTS description text NOT NULL DEFAULT ''; -CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); -CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); -CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); +CREATE +INDEX IF NOT +EXISTS traces_created_at_idx ON traces (created_at); +CREATE +INDEX IF NOT +EXISTS traces_action_idx ON traces (action); +CREATE +INDEX IF NOT +EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) @@ -115,9 +122,9 @@ VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 - }', true, true, true, 'avg_fps', 'predefined', 'overview') -ON CONFLICT (predefined_key) DO UPDATE - SET name=excluded.name, + }', true, true, true, 'avg_fps', 'predefined', 'overview') ON CONFLICT (predefined_key) DO +UPDATE +SET name =excluded.name, category=excluded.category, default_config=excluded.default_config, is_predefined=excluded.is_predefined, @@ -126,4 +133,10 @@ ON CONFLICT (predefined_key) DO UPDATE metric_type=excluded.metric_type, view_type=excluded.view_type; +ALTER TABLE users + DROP COLUMN appearance; + +ALTER TABLE basic_authentication + DROP COLUMN generated_password; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index ec29b1dfc..efb4bb1d1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.0-ee' +SELECT 'v1.7.0-ee' $$ LANGUAGE sql IMMUTABLE; @@ -187,67 +187,6 @@ $$ name text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, api_key text UNIQUE default generate_api_key(20) not null, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT'{}'::jsonb, @@ -264,7 +203,6 @@ $$ ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, invitation_token text NULL DEFAULT NULL, invited_at timestamp without time zone NULL DEFAULT NULL, change_pwd_token text NULL DEFAULT NULL, diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql similarity index 97% rename from scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql rename to scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 4f1c7c28f..233f4fc5e 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -2,7 +2,7 @@ BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1' +SELECT 'v1.7.0' $$ LANGUAGE sql IMMUTABLE; @@ -121,4 +121,10 @@ ON CONFLICT (predefined_key) DO UPDATE metric_type=excluded.metric_type, view_type=excluded.view_type; +ALTER TABLE users + DROP COLUMN appearance; + +ALTER TABLE basic_authentication + DROP COLUMN generated_password; + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 91a590688..519997f72 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.0' +SELECT 'v1.7.0' $$ LANGUAGE sql IMMUTABLE; -- --- accounts.sql --- @@ -142,67 +142,6 @@ $$ name text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, api_key text UNIQUE default generate_api_key(20) not null, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT '{}'::jsonb, @@ -213,7 +152,6 @@ $$ ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, invitation_token text NULL DEFAULT NULL, invited_at timestamp without time zone NULL DEFAULT NULL, change_pwd_token text NULL DEFAULT NULL, From 3a70c8bef6d01dff1694e39a0e066aecf450c680 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:12:08 +0200 Subject: [PATCH 055/221] feat(api): fixed edition feat(api): fixed expiration date feat(api): fixed change name feat(api): fixed change role feat(api): fixed has password feat(api): refactored edit user feat(api): refactored edit member --- api/chalicelib/core/license.py | 9 +-- api/chalicelib/core/signup.py | 4 +- api/chalicelib/core/telemetry.py | 5 +- api/chalicelib/core/tenants.py | 6 +- api/chalicelib/core/users.py | 49 +++++++++-------- api/chalicelib/utils/helper.py | 4 -- api/routers/core.py | 2 +- api/routers/core_dynamic.py | 4 +- api/schemas.py | 9 ++- ee/api/chalicelib/core/license.py | 13 ++--- ee/api/chalicelib/core/signup.py | 4 +- ee/api/chalicelib/core/telemetry.py | 9 +-- ee/api/chalicelib/core/tenants.py | 8 +-- ee/api/chalicelib/core/users.py | 55 +++++++++++-------- ee/api/routers/core_dynamic.py | 2 +- ee/api/schemas_ee.py | 11 +++- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 51 ++++++++++------- .../db/init_dbs/postgresql/init_schema.sql | 3 +- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 26 ++++++--- .../db/init_dbs/postgresql/init_schema.sql | 3 +- 20 files changed, 149 insertions(+), 128 deletions(-) diff --git a/api/chalicelib/core/license.py b/api/chalicelib/core/license.py index 4a562ea7b..469753878 100644 --- a/api/chalicelib/core/license.py +++ b/api/chalicelib/core/license.py @@ -1,12 +1,9 @@ -from chalicelib.utils import pg_client +EDITION = 'foss' def get_status(tenant_id=None): - with pg_client.PostgresClient() as cur: - # cur.execute("SELECT * FROM public.tenants;") - cur.execute("SELECT edition FROM public.tenants;") - r = cur.fetchone() return { "hasActivePlan": True, - "edition": r.get("edition", "").upper() + "edition": EDITION, + "expirationDate": -1 } diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 4d320e0be..146da7305 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -67,8 +67,8 @@ def create_step1(data: schemas.UserSignupSchema): } query = f"""\ WITH t AS ( - INSERT INTO public.tenants (name, version_number, edition) - VALUES (%(organizationName)s, (SELECT openreplay_version()), 'fos') + INSERT INTO public.tenants (name, version_number) + VALUES (%(organizationName)s, (SELECT openreplay_version())) RETURNING api_key ), u AS ( diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index fa27fbe1c..e12200809 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -1,10 +1,11 @@ from chalicelib.utils import pg_client import requests +from chalicelib.core import license -def process_data(data, edition='fos'): +def process_data(data): return { - 'edition': edition, + 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], 'user_id': data["user_id"], diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index db154525c..e5b8cc63c 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -1,7 +1,7 @@ import schemas from chalicelib.utils import pg_client from chalicelib.utils import helper -from chalicelib.core import users +from chalicelib.core import users, license def get_by_tenant_id(tenant_id): @@ -13,7 +13,7 @@ def get_by_tenant_id(tenant_id): name, api_key, created_at, - edition, + '{license.EDITION}' AS edition, version_number, opt_out FROM public.tenants @@ -67,7 +67,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: - return {"error": "unauthorized"} + return {"errors": ["unauthorized, needs admin or owner"]} if data.name is None and data.opt_out is None: return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} changes = {} diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 408fb03c1..0e9852e2d 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -4,6 +4,7 @@ import secrets from decouple import config from fastapi import BackgroundTasks +import schemas from chalicelib.core import authorizers, metadata, projects from chalicelib.core import tenants, assist from chalicelib.utils import dev, email_helper @@ -240,7 +241,8 @@ def get(user_id, tenant_id): (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - api_key + api_key, + TRUE AS has_password FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.user_id = %(userId)s @@ -268,37 +270,36 @@ def generate_new_api_key(user_id): return helper.dict_to_camel_case(r) -def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin"] +def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id): user = get(user_id=user_id_to_update, tenant_id=tenant_id) - if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} + _changes = {} if editor_id == user_id_to_update: - if user["superAdmin"]: - changes.pop("admin") - elif user["admin"] != changes["admin"]: - return {"errors": ["cannot change your own role"]} + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} - keys = list(changes.keys()) - for k in keys: - if k not in ALLOW_EDIT or changes[k] is None: - changes.pop(k) - keys = list(changes.keys()) + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email - if len(keys) > 0: - if "email" in keys and changes["email"] != user["email"]: - if email_exists(changes["email"]): - return {"errors": ["email already exists."]} - if get_deleted_user_by_email(changes["email"]) is not None: - return {"errors": ["email previously deleted."]} - if "admin" in keys: - changes["role"] = "admin" if changes.pop("admin") else "member" - if len(changes.keys()) > 0: - updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes) + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name - return {"data": updated_user} + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if len(_changes.keys()) > 0: + updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": updated_user} return {"data": user} diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 042b2a94b..2716cf111 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -365,10 +365,6 @@ def has_smtp(): return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0 -def get_edition(): - return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss" - - def old_search_payload_to_flat(values): # in case the old search body was passed if values.get("events") is not None: diff --git a/api/routers/core.py b/api/routers/core.py index 3008e94b7..5265287e6 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1076,7 +1076,7 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context @app.put('/account', tags=["account"]) def edit_account(data: schemas.EditUserSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 7791c5677..918d81541 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -24,7 +24,7 @@ def get_all_signup(): return {"data": {"tenants": tenants.tenants_exists(), "sso": None, "ssoProvider": None, - "edition": helper.get_edition()}} + "edition": license.EDITION}} @public_app.post('/login', tags=["authentication"]) @@ -181,7 +181,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/api/schemas.py b/api/schemas.py index fc61999ad..cb83789cd 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -12,7 +12,7 @@ def attribute_to_camel_case(snake_str): def transform_email(email: str) -> str: - return email.lower() if isinstance(email, str) else email + return email.lower().strip() if isinstance(email, str) else email class _Grecaptcha(BaseModel): @@ -37,7 +37,7 @@ class UserSignupSchema(UserLoginSchema): class EditUserSchema(BaseModel): name: Optional[str] = Field(None) email: Optional[EmailStr] = Field(None) - admin: Optional[bool] = Field(False) + admin: Optional[bool] = Field(None) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) @@ -127,13 +127,11 @@ class CreateMemberSchema(BaseModel): _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) -class EditMemberSchema(BaseModel): +class EditMemberSchema(EditUserSchema): name: str = Field(...) email: EmailStr = Field(...) admin: bool = Field(False) - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - class EditPasswordByInvitationSchema(BaseModel): invitation: str = Field(...) @@ -796,6 +794,7 @@ class MetricTableViewType(str, Enum): class MetricType(str, Enum): timeseries = "timeseries" table = "table" + predefined = "predefined" class TableMetricOfType(str, Enum): diff --git a/ee/api/chalicelib/core/license.py b/ee/api/chalicelib/core/license.py index c1c9823d7..c067d4758 100644 --- a/ee/api/chalicelib/core/license.py +++ b/ee/api/chalicelib/core/license.py @@ -1,17 +1,12 @@ -from decouple import config - from chalicelib.core import unlock -from chalicelib.utils import pg_client + +EDITION = 'ee' def get_status(tenant_id): - with pg_client.PostgresClient() as cur: - cur.execute( - # cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) - cur.mogrify("SELECT edition FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) - r = cur.fetchone() license = unlock.get_license() return { "hasActivePlan": unlock.is_valid(), - "edition": r.get("edition", "").lower(), + "edition": EDITION, + "expirationDate": unlock.get_expiration_date() } diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 0415efc79..605520df4 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -64,8 +64,8 @@ def create_step1(data: schemas.UserSignupSchema): "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})} query = """\ WITH t AS ( - INSERT INTO public.tenants (name, version_number, edition) - VALUES (%(companyName)s, (SELECT openreplay_version()), 'ee') + INSERT INTO public.tenants (name, version_number) + VALUES (%(companyName)s, (SELECT openreplay_version())) RETURNING tenant_id, api_key ), r AS ( diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index 9c82290fb..51fd55787 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -1,10 +1,11 @@ from chalicelib.utils import pg_client +from chalicelib.core import license import requests -def process_data(data, edition='fos'): +def process_data(data): return { - 'edition': edition, + 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], 'user_id': data["user_id"], @@ -56,7 +57,7 @@ def compute(): ) data = cur.fetchall() requests.post('https://api.openreplay.com/os/telemetry', - json={"stats": [process_data(d, edition='ee') for d in data]}) + json={"stats": [process_data(d) for d in data]}) def new_client(tenant_id): @@ -67,4 +68,4 @@ def new_client(tenant_id): FROM public.tenants WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id})) data = cur.fetchone() - requests.post('https://api.openreplay.com/os/signup', json=process_data(data, edition='ee')) \ No newline at end of file + requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 45491f654..cecb8a9cf 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -1,4 +1,4 @@ -from chalicelib.core import users +from chalicelib.core import users, license from chalicelib.utils import helper from chalicelib.utils import pg_client @@ -12,7 +12,7 @@ def get_by_tenant_key(tenant_key): t.name, t.api_key, t.created_at, - t.edition, + '{license.EDITION}' AS edition, t.version_number, t.opt_out FROM public.tenants AS t @@ -32,7 +32,7 @@ def get_by_tenant_id(tenant_id): t.name, t.api_key, t.created_at, - t.edition, + '{license.EDITION}' AS edition, t.version_number, t.opt_out, t.user_id AS tenant_key @@ -90,7 +90,7 @@ def update(tenant_id, user_id, data): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: - return {"error": "unauthorized"} + return {"errors": ["unauthorized, needs admin or owner"]} if "name" not in data and "optOut" not in data: return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} changes = {} diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 1f97fbd09..91c2384c4 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -4,6 +4,8 @@ import secrets from decouple import config from fastapi import BackgroundTasks +import schemas +import schemas_ee from chalicelib.core import authorizers, metadata, projects, roles from chalicelib.core import tenants, assist from chalicelib.utils import dev, SAML2_helper @@ -303,37 +305,44 @@ def generate_new_api_key(user_id): return helper.dict_to_camel_case(r) -def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "roleId"] +def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id): user = get(user_id=user_id_to_update, tenant_id=tenant_id) - if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} + _changes = {} if editor_id == user_id_to_update: - if user["superAdmin"]: - changes.pop("admin") - elif user["admin"] != changes["admin"]: - return {"errors": ["cannot change your own role"]} + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + if changes.roleId is not None: + if user["superAdmin"]: + changes.roleId = None + elif changes.roleId != user["roleId"]: + return {"errors": ["cannot change your own role"]} - keys = list(changes.keys()) - for k in keys: - if k not in ALLOW_EDIT or changes[k] is None: - changes.pop(k) - keys = list(changes.keys()) + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email - if len(keys) > 0: - if "email" in keys and changes["email"] != user["email"]: - if email_exists(changes["email"]): - return {"errors": ["email already exists."]} - if get_deleted_user_by_email(changes["email"]) is not None: - return {"errors": ["email previously deleted."]} - if "admin" in keys: - changes["role"] = "admin" if changes.pop("admin") else "member" - if len(changes.keys()) > 0: - updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes) + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name - return {"data": updated_user} + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if changes.roleId is not None: + _changes["roleId"] = changes.roleId + + if len(_changes.keys()) > 0: + updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": updated_user} return {"data": user} diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 6d7cf8e73..667d42c79 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -187,7 +187,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 794dfdd64..0375521ad 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -1,6 +1,6 @@ from typing import Optional, List, Literal -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, EmailStr import schemas from chalicelib.utils.TimeUTC import TimeUTC @@ -21,7 +21,14 @@ class CreateMemberSchema(schemas.CreateMemberSchema): roleId: Optional[int] = Field(None) -class EditMemberSchema(schemas.EditMemberSchema): +class EditUserSchema(schemas.EditUserSchema): + roleId: Optional[int] = Field(None) + + +class EditMemberSchema(EditUserSchema): + name: str = Field(...) + email: EmailStr = Field(...) + admin: bool = Field(False) roleId: int = Field(...) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 1ab026565..01153848f 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,25 +1,42 @@ BEGIN; CREATE OR REPLACE -FUNCTION openreplay_version() + FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1-ee' $$ LANGUAGE sql IMMUTABLE; +SELECT 'v1.6.1-ee' +$$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT -EXISTS description text NOT NULL DEFAULT ''; + EXISTS description text NOT NULL DEFAULT ''; CREATE -INDEX IF NOT -EXISTS traces_created_at_idx ON traces (created_at); + INDEX IF NOT + EXISTS traces_created_at_idx ON traces (created_at); CREATE -INDEX IF NOT -EXISTS traces_action_idx ON traces (action); + INDEX IF NOT + EXISTS traces_action_idx ON traces (action); CREATE -INDEX IF NOT -EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + INDEX IF NOT + EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + + + +ALTER TABLE users + DROP COLUMN IF EXISTS appearance; + +ALTER TABLE basic_authentication + DROP COLUMN IF EXISTS generated_password; + +ALTER TABLE tenants + DROP COLUMN IF EXISTS edition; + +ALTER TABLE dashboards + ALTER COLUMN user_id DROP NOT NULL; + +COMMIT; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) @@ -122,21 +139,13 @@ VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 - }', true, true, true, 'avg_fps', 'predefined', 'overview') ON CONFLICT (predefined_key) DO -UPDATE -SET name =excluded.name, + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name =excluded.name, category=excluded.category, default_config=excluded.default_config, is_predefined=excluded.is_predefined, is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; - -ALTER TABLE users - DROP COLUMN appearance; - -ALTER TABLE basic_authentication - DROP COLUMN generated_password; - -COMMIT; \ No newline at end of file + view_type=excluded.view_type; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index efb4bb1d1..5bf02f4e1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -147,7 +147,6 @@ $$ api_key text UNIQUE default generate_api_key(20) not null, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - edition varchar(3) NOT NULL, version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, @@ -777,7 +776,7 @@ $$ ( dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + user_id integer REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 233f4fc5e..3f5552640 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -9,6 +9,22 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + + +ALTER TABLE users + DROP COLUMN IF EXISTS appearance; + +ALTER TABLE basic_authentication + DROP COLUMN IF EXISTS generated_password; + +ALTER TABLE tenants + DROP COLUMN IF EXISTS edition; + +ALTER TABLE dashboards + ALTER COLUMN user_id DROP NOT NULL; + +COMMIT; + INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) VALUES ('Captured sessions', 'web vitals', '{ @@ -119,12 +135,4 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; - -ALTER TABLE users - DROP COLUMN appearance; - -ALTER TABLE basic_authentication - DROP COLUMN generated_password; - -COMMIT; \ No newline at end of file + view_type=excluded.view_type; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 519997f72..6cbd17dc8 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -121,7 +121,6 @@ $$ name text NOT NULL, api_key text NOT NULL DEFAULT generate_api_key(20), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - edition varchar(3) NOT NULL, version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, @@ -928,7 +927,7 @@ $$ ( dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + user_id integer REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, From 932c18f65a9e246174a429d960272923e02965cc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:18:22 +0200 Subject: [PATCH 056/221] feat(api): fixed notifications count query --- api/chalicelib/core/notifications.py | 4 +--- ee/api/chalicelib/core/notifications.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/notifications.py b/api/chalicelib/core/notifications.py index ce3c4d61a..c3eadcccd 100644 --- a/api/chalicelib/core/notifications.py +++ b/api/chalicelib/core/notifications.py @@ -34,9 +34,7 @@ def get_all_count(tenant_id, user_id): LEFT JOIN (SELECT notification_id FROM public.user_viewed_notifications WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) - WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL - ORDER BY created_at DESC - LIMIT 100;""", + WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""", {"user_id": user_id}) ) row = cur.fetchone() diff --git a/ee/api/chalicelib/core/notifications.py b/ee/api/chalicelib/core/notifications.py index 5ba58f242..0069063c7 100644 --- a/ee/api/chalicelib/core/notifications.py +++ b/ee/api/chalicelib/core/notifications.py @@ -36,9 +36,7 @@ def get_all_count(tenant_id, user_id): FROM public.user_viewed_notifications WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) WHERE (notifications.tenant_id =%(tenant_id)s - OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL - ORDER BY created_at DESC - LIMIT 100;""", + OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""", {"tenant_id": tenant_id, "user_id": user_id}) ) rows = helper.list_to_camel_case(cur.fetchall()) From 734d1333a956ae4e445a258e76a756d53fe3df35 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:34:52 +0200 Subject: [PATCH 057/221] feat(api): EE fixed edition --- ee/api/routers/core_dynamic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 667d42c79..196764ad9 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -24,7 +24,7 @@ def get_all_signup(): return {"data": {"tenants": tenants.tenants_exists(), "sso": SAML2_helper.is_saml2_available(), "ssoProvider": SAML2_helper.get_saml2_provider(), - "edition": helper.get_edition()}} + "edition": license.EDITION}} @public_app.post('/login', tags=["authentication"]) From 1445c7273728d185c5da6ab4a5aca361cf562c04 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 19:17:55 +0200 Subject: [PATCH 058/221] feat(api): funnel widget --- api/chalicelib/core/custom_metrics.py | 7 ++++++- api/chalicelib/core/funnels.py | 16 ++++++++++++++++ api/schemas.py | 1 + 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 3e7fc100a..f26fdb6a9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -2,7 +2,7 @@ import json from typing import Union import schemas -from chalicelib.core import sessions +from chalicelib.core import sessions, funnels from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -43,6 +43,11 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): + if data.metric_type == schemas.MetricType.funnel: + if len(data.series) == 0: + return {} + return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: return series_charts diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 16e95989d..e0eb99dc3 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -251,6 +251,22 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu "totalDropDueToIssues": total_drop_due_to_issues}} +# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): +def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema): + data.events = filter_stages(__parse_events(data.events)) + data.events = __fix_stages(data.events) + if len(data.events) == 0: + return {"stages": [], "totalDropDueToIssues": 0} + insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) + insights = helper.list_to_camel_case(insights) + if len(insights) > 0: + if total_drop_due_to_issues > insights[0]["sessionsCount"]: + total_drop_due_to_issues = insights[0]["sessionsCount"] + insights[-1]["dropDueToIssues"] = total_drop_due_to_issues + return {"stages": insights, + "totalDropDueToIssues": total_drop_due_to_issues} + + def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: diff --git a/api/schemas.py b/api/schemas.py index cb83789cd..0902fb269 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -795,6 +795,7 @@ class MetricType(str, Enum): timeseries = "timeseries" table = "table" predefined = "predefined" + funnel = "funnel" class TableMetricOfType(str, Enum): From b2a778a0d74353f4232447e37216eb495653984e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 20:10:40 +0200 Subject: [PATCH 059/221] feat(api): funnel widget issues --- api/chalicelib/core/custom_metrics.py | 19 +++++++++++++ api/chalicelib/core/funnels.py | 13 +++++++++ api/routers/subs/metrics.py | 28 +++++++++++++++++-- api/schemas.py | 8 ++++++ .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 12 ++++---- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 ++ .../db/init_dbs/postgresql/init_schema.sql | 12 ++++---- 8 files changed, 80 insertions(+), 15 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index f26fdb6a9..0e9061a11 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -110,6 +110,25 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi return results +def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + results.append({"seriesId": s.series_id, "seriesName": s.name, + **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}) + + return results + + def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): results = [] if data.series is None: diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index e0eb99dc3..22774fb16 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -296,6 +296,19 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe last_stage=len(data.events)))} +# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): +def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema): + data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) + if len(data.events) < 0: + return {"issues": []} + + return { + "issues": helper.dict_to_camel_case( + significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, + last_stage=len(data.events)))} + + def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): with pg_client.PostgresClient() as cur: cur.execute( diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index a33b75d0b..57e3b28f7 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -1,7 +1,7 @@ from fastapi import Body, Depends import schemas -from chalicelib.core import dashboards, custom_metrics +from chalicelib.core import dashboards, custom_metrics, funnels from or_dependencies import OR_context from routers.base import get_routers @@ -107,13 +107,24 @@ def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchem @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) -def try_custom_metric_sessions(projectId: int, - data: schemas.CustomMetricSessionsPayloadSchema = Body(...), +def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) return {"data": data} +@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) +def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if len(data.series) == 0: + return {"data": []} + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp + data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) + return {"data": data} + + @app.post('/{projectId}/metrics', tags=["dashboard"]) @app.put('/{projectId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) @@ -149,6 +160,17 @@ def get_custom_metric_sessions(projectId: int, metric_id: int, return {"data": data} +@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) +def get_custom_metric__funnel_issues(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), diff --git a/api/schemas.py b/api/schemas.py index 0902fb269..c1979811e 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -872,6 +872,14 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) + @root_validator(pre=True) + def transform_series(cls, values): + if values.get("series") is not None and len(values["series"]) > 1 and values.get( + "metric_type") == MetricType.funnel.value: + values["series"] = [values["series"][0]] + + return values + class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema): series_id: Optional[int] = Field(None) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 01153848f..1fb572626 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -37,6 +37,7 @@ ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; COMMIT; +ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 5bf02f4e1..d044f1636 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -201,11 +201,11 @@ $$ CREATE TABLE IF NOT EXISTS basic_authentication ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + password text DEFAULT NULL, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, changed_at timestamp, UNIQUE (user_id) ); @@ -726,7 +726,7 @@ $$ CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); - CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); + CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined','funnel'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); CREATE TABLE IF NOT EXISTS metrics ( diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 3f5552640..b0275a3a8 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -25,6 +25,8 @@ ALTER TABLE dashboards COMMIT; +ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; + INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) VALUES ('Captured sessions', 'web vitals', '{ diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 6cbd17dc8..f870b7824 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -150,11 +150,11 @@ $$ CREATE TABLE basic_authentication ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + password text DEFAULT NULL, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, changed_at timestamp, UNIQUE (user_id) ); @@ -877,7 +877,7 @@ $$ CREATE INDEX jobs_start_at_idx ON jobs (start_at); CREATE INDEX jobs_project_id_idx ON jobs (project_id); - CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); + CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined', 'funnel'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); CREATE TABLE metrics ( From 6816dedaff4adebd1adfd998502f21cf881208fa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Jun 2022 17:21:13 +0200 Subject: [PATCH 060/221] feat(api): errors widget --- api/chalicelib/core/custom_metrics.py | 28 +++++++++++++++++++++++++-- api/chalicelib/core/errors.py | 15 ++++++-------- api/routers/core.py | 2 +- api/routers/subs/metrics.py | 23 +++++++++++++++++----- api/schemas.py | 10 +++++++--- ee/api/chalicelib/core/errors.py | 21 +++++++++----------- 6 files changed, 67 insertions(+), 32 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 0e9061a11..7c04b6c6b 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -2,7 +2,7 @@ import json from typing import Union import schemas -from chalicelib.core import sessions, funnels +from chalicelib.core import sessions, funnels, errors from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -42,11 +42,16 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): return results -def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): +def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): if data.metric_type == schemas.MetricType.funnel: if len(data.series) == 0: return {} return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + elif data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.issues \ + and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ + and data.metric_format == schemas.MetricFormatType.errors_list: + return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: @@ -129,6 +134,25 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric return results +def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + results.append({"seriesId": s.series_id, "seriesName": s.name, + **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}) + + return results + + def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): results = [] if data.series is None: diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 983d091f8..2026f9232 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -425,10 +425,9 @@ def __get_sort_key(key): def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): - empty_response = {"data": { - 'total': 0, - 'errors': [] - }} + empty_response = {'total': 0, + 'errors': [] + } platform = None for f in data.filters: @@ -544,7 +543,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): rows = cur.fetchall() total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: - return {"data": {"count": total}} + return {"count": total} if total == 0: rows = [] @@ -592,10 +591,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] offset -= len(rows) return { - "data": { - 'total': total - offset, - 'errors': helper.list_to_camel_case(rows) - } + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) } diff --git a/api/routers/core.py b/api/routers/core.py index 5265287e6..2a38d0a75 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -903,7 +903,7 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), @app.post('/{projectId}/errors/search', tags=['errors']) def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return errors.search(data, projectId, user_id=context.user_id) + return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index 57e3b28f7..e00d2d4f7 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -102,7 +102,7 @@ def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_c @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.merged_live(project_id=projectId, data=data)} + return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) @@ -162,10 +162,23 @@ def get_custom_metric_sessions(projectId: int, metric_id: int, @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) -def get_custom_metric__funnel_issues(projectId: int, metric_id: int, - data: schemas.CustomMetricSessionsPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) +def get_custom_metric_funnel_issues(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) +def get_custom_metric_errors_list(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} diff --git a/api/schemas.py b/api/schemas.py index c1979811e..d1b84e915 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -479,6 +479,11 @@ class IssueType(str, Enum): js_exception = 'js_exception' +class MetricFormatType(str, Enum): + session_count = 'sessionCount' + errors_list = 'errors' + + class __MixedSearchFilter(BaseModel): is_event: bool = Field(...) @@ -761,8 +766,7 @@ class MobileSignPayloadSchema(BaseModel): keys: List[str] = Field(...) -class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema): - # class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema): +class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema, SearchErrorsSchema): startDate: Optional[int] = Field(None) endDate: Optional[int] = Field(None) sort: Optional[str] = Field(None) @@ -836,7 +840,7 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): metric_type: MetricType = Field(MetricType.timeseries) metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id) metric_value: List[IssueType] = Field([]) - metric_format: Optional[str] = Field(None) + metric_format: Optional[MetricFormatType] = Field(None) # metricFraction: float = Field(None, gt=0, lt=1) # This is used to handle wrong values sent by the UI diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index ecf1aeda2..9477f8ec7 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -83,7 +83,7 @@ def __rearrange_chart_details(start_at, end_at, density, chart): for i in range(len(chart)): chart[i] = {"timestamp": chart[i][0], "count": chart[i][1]} chart = metrics.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density, - neutral={"count": 0}) + neutral={"count": 0}) return chart @@ -466,10 +466,9 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): - empty_response = {"data": { - 'total': 0, - 'errors': [] - }} + empty_response = {'total': 0, + 'errors': [] + } platform = None for f in data.filters: @@ -585,7 +584,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): rows = cur.fetchall() total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: - return {"data": {"count": total}} + return {"count": total} if total == 0: rows = [] @@ -633,10 +632,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] offset -= len(rows) return { - "data": { - 'total': total - offset, - 'errors': helper.list_to_camel_case(rows) - } + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) } @@ -790,8 +787,8 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo for i in range(len(r["chart"])): r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]} r["chart"] = metrics.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, - end_time=data.endDate, - density=data.density, neutral={"count": 0}) + end_time=data.endDate, + density=data.density, neutral={"count": 0}) offset = len(rows) rows = [r for r in rows if r["stack"] is None or (len(r["stack"]) == 0 or len(r["stack"]) > 1 From adb8e2c404b5987a5141c97b7be43fb7bf38aeb7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Jun 2022 19:03:06 +0200 Subject: [PATCH 061/221] feat(api): errors widget chart feat(api): funnels widget chart --- api/chalicelib/core/custom_metrics.py | 61 +++++++++++++++++++-------- api/chalicelib/core/events.py | 10 ++++- api/chalicelib/core/sessions.py | 4 +- 3 files changed, 54 insertions(+), 21 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 7c04b6c6b..5f7e12656 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -42,16 +42,34 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): return results +def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema): + return data.metric_type == schemas.MetricType.funnel + + +def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): + if len(data.series) == 0: + return {} + return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + + +def __is_errors_list(data): + return data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.issues \ + and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ + and data.metric_format == schemas.MetricFormatType.errors_list + + +def __get_errors_list(project_id, user_id, data): + if len(data.series) == 0: + return [] + return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) + + def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): - if data.metric_type == schemas.MetricType.funnel: - if len(data.series) == 0: - return {} - return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) - elif data.metric_type == schemas.MetricType.table \ - and data.metric_of == schemas.TableMetricOfType.issues \ - and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ - and data.metric_format == schemas.MetricFormatType.errors_list: - return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) + if __is_funnel_chart(data): + return __get_funnel_chart(project_id=project_id, data=data) + elif __is_errors_list(data): + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: @@ -85,15 +103,22 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa if metric is None: return None metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) - series_charts = __try_live(project_id=project_id, data=metric) - if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: - return series_charts - results = [{}] * len(series_charts[0]) - for i in range(len(results)): - for j, series_chart in enumerate(series_charts): - results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], - metric.series[j].name: series_chart[i]["count"]} - return results + + return merged_live(project_id=project_id, data=metric, user_id=user_id) + # if __is_funnel_chart(metric): + # return __get_funnel_chart(project_id=project_id, data=metric) + # elif __is_errors_list(metric): + # return __get_errors_list(project_id=project_id, user_id=user_id, data=metric) + # + # series_charts = __try_live(project_id=project_id, data=metric) + # if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: + # return series_charts + # results = [{}] * len(series_charts[0]) + # for i in range(len(results)): + # for j, series_chart in enumerate(series_charts): + # results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], + # metric.series[j].name: series_chart[i]["count"]} + # return results def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 272b86002..d07cf1042 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -435,7 +435,15 @@ def __get_autocomplete_table(value, project_id): query = cur.mogrify(" UNION ".join(sub_queries) + ";", {"project_id": project_id, "value": helper.string_to_sql_like(value), "svalue": helper.string_to_sql_like("^" + value)}) - cur.execute(query) + try: + cur.execute(query) + except Exception as err: + print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") + print(query.decode('UTF-8')) + print("--------- VALUE -----------") + print(value) + print("--------------------") + raise err results = helper.list_to_camel_case(cur.fetchall()) return results diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index e717f1d07..7543f9c9d 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -254,9 +254,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e cur.execute(main_query) except Exception as err: print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") - print(main_query) + print(main_query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(data.dict()) + print(data.json()) print("--------------------") raise err if errors_only: From 7b1e854c5373643f4175092c88ec0f3a0ee5316c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:13:05 +0200 Subject: [PATCH 062/221] feat(api): table of sessions widget --- api/chalicelib/core/custom_metrics.py | 26 ++++++++++++++++++++++++-- api/schemas.py | 1 + 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5f7e12656..d6ebebc76 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -48,7 +48,10 @@ def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema): def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): if len(data.series) == 0: - return {} + return { + "stages": [], + "totalDropDueToIssues": 0 + } return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) @@ -61,15 +64,34 @@ def __is_errors_list(data): def __get_errors_list(project_id, user_id, data): if len(data.series) == 0: - return [] + return { + "total": 0, + "errors": [] + } return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) +def __is_sessions_list(data): + return data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.sessions + + +def __get_sessions_list(project_id, user_id, data): + if len(data.series) == 0: + return { + "total": 0, + "sessions": [] + } + return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) + + def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): if __is_funnel_chart(data): return __get_funnel_chart(project_id=project_id, data=data) elif __is_errors_list(data): return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + elif __is_sessions_list(data): + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: diff --git a/api/schemas.py b/api/schemas.py index d1b84e915..ab063a9b9 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -810,6 +810,7 @@ class TableMetricOfType(str, Enum): user_id = FilterType.user_id.value issues = FilterType.issue.value visited_url = EventType.location.value + sessions = "SESSIONS" class TimeseriesMetricOfType(str, Enum): From 9411f0f576f268463709ff93f8cdd3a3d273441b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:42:52 +0200 Subject: [PATCH 063/221] feat(api): changed slowest_domains response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 05c5233f8..fd5809d06 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1721,7 +1721,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT resources.url_host AS domain, - AVG(resources.duration) AS avg + AVG(resources.duration) AS value FROM events.resources INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY resources.url_host @@ -1740,7 +1740,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), avg = cur.fetchone()["avg"] else: avg = 0 - return {"avg": avg, "partition": rows} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 2d6aa7201..5c4db36f1 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1661,7 +1661,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT resources.url_host AS domain, - COALESCE(avgOrNull(resources.duration),0) AS avg + COALESCE(avgOrNull(resources.duration),0) AS value FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY resources.url_host @@ -1675,7 +1675,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, "partition": rows} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), From 89b3d84230f4f4108d0e32048ca84c30896b3e9f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:53:55 +0200 Subject: [PATCH 064/221] feat(api): changed speed_location response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index fd5809d06..cf9fc6a1c 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1069,7 +1069,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- pg_sub_query.append("pages.speed_index>0") with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS avg + pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS value FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY sessions.user_country @@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- avg = cur.fetchone()["avg"] else: avg = 0 - return {"avg": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 5c4db36f1..9beb13cc1 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1046,7 +1046,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS avg + ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY pages.user_country @@ -1059,7 +1059,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), From 6e9e5dceb77566d6f66964b44a9f89c8bb8e2706 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:54:25 +0200 Subject: [PATCH 065/221] feat(api): changed speed_location response --- api/chalicelib/core/metrics.py | 2 +- ee/api/chalicelib/core/metrics.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index cf9fc6a1c..abed83cbb 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1073,7 +1073,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY sessions.user_country - ORDER BY avg,sessions.user_country;""" + ORDER BY value, sessions.user_country;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 9beb13cc1..9bbfe13ef 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1050,7 +1050,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY pages.user_country - ORDER BY avg,pages.user_country;""" + ORDER BY value ,pages.user_country;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} From c81ce9bf7d3b2165839b127d6ca4b08f50f41f72 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 14:09:13 +0200 Subject: [PATCH 066/221] feat(api): changed crashes response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index abed83cbb..87b870bbe 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1500,7 +1500,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), pg_sub_query_chart.append("m_issues.type = 'crash'") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COUNT(sessions) AS count + COUNT(sessions) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT sessions.session_id @@ -1558,7 +1558,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), versions.append({v["version"]: v["count"] / (r["total"] / 100)}) r["versions"] = versions - return {"chart": rows, "browsers": browsers} + return {"chart": rows, "browsers": browsers,"unit": schemas.TemplatePredefinedUnits.count} def __get_neutral(rows, add_All_if_empty=True): diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 9bbfe13ef..640394b3c 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1460,7 +1460,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(sessions.session_id) AS count + COUNT(sessions.session_id) AS value FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1514,8 +1514,9 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), result = {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"count": 0}), - "browsers": browsers} + neutral={"value": 0}), + "browsers": browsers, + "unit": schemas.TemplatePredefinedUnits.count} return result From 6e5bdae7da6dbae1db08f5bad71316e2570790c3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 14:12:21 +0200 Subject: [PATCH 067/221] feat(api): changed pages_response_time_distribution response --- api/chalicelib/core/metrics.py | 7 ++++--- ee/api/chalicelib/core/metrics.py | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 87b870bbe..c78200363 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1171,7 +1171,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( else: quantiles = [0 for i in range(len(quantiles_keys))] result = { - "avg": avg, + "value": avg, "total": sum(r["count"] for r in rows), "chart": [], "percentiles": [{ @@ -1179,7 +1179,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( "responseTime": int(quantiles[i]) } for i, v in enumerate(quantiles_keys) ], - "extremeValues": [{"count": 0}] + "extremeValues": [{"count": 0}], + "unit": schemas.TemplatePredefinedUnits.millisecond } rows = helper.list_to_camel_case(rows) _99 = result["percentiles"][-1]["responseTime"] @@ -1558,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), versions.append({v["version"]: v["count"] / (r["total"] / 100)}) r["versions"] = versions - return {"chart": rows, "browsers": browsers,"unit": schemas.TemplatePredefinedUnits.count} + return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count} def __get_neutral(rows, add_All_if_empty=True): diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 640394b3c..a86af9315 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1133,7 +1133,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)}) result = { - "avg": avg, + "value": avg, "total": sum(r["count"] for r in rows), "chart": [], "percentiles": [{ @@ -1142,7 +1142,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( quantiles[0]["values"][i] if quantiles[0]["values"][i] is not None and not math.isnan( quantiles[0]["values"][i]) else 0)} for i, v in enumerate(quantiles_keys) ], - "extremeValues": [{"count": 0}] + "extremeValues": [{"count": 0}], + "unit": schemas.TemplatePredefinedUnits.millisecond } if len(rows) > 0: rows = helper.list_to_camel_case(rows) From 656e13f6e56b1162116436ecad3d899d0e3e7eac Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 17:23:17 +0200 Subject: [PATCH 068/221] feat(api): changes feat(db): changes --- api/chalicelib/core/telemetry.py | 8 +++++--- ee/api/chalicelib/core/telemetry.py | 10 ++++++---- .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 14 ++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 2 +- .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 14 ++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 2 +- 6 files changed, 41 insertions(+), 9 deletions(-) diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index e12200809..8098c9cd7 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -8,7 +8,8 @@ def process_data(data): 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], - 'user_id': data["user_id"], + 'user_id': data["tenant_key"], + 'tenant_key': data["tenant_key"], 'owner_email': None if data["opt_out"] else data["email"], 'organization_name': None if data["opt_out"] else data["name"], 'users_count': data["t_users"], @@ -28,7 +29,7 @@ def compute(): t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0), t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0), t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0) - RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" ) data = cur.fetchone() @@ -40,6 +41,7 @@ def new_client(): cur.execute( f"""SELECT *, (SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email - FROM public.tenants;""") + FROM public.tenants + LIMIT 1;""") data = cur.fetchone() requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index 51fd55787..a002f8501 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -8,7 +8,8 @@ def process_data(data): 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], - 'user_id': data["user_id"], + 'user_id': data["tenant_key"], + 'tenant_key': data["tenant_key"], 'owner_email': None if data["opt_out"] else data["email"], 'organization_name': None if data["opt_out"] else data["name"], 'users_count': data["t_users"], @@ -51,7 +52,7 @@ def compute(): FROM public.tenants ) AS all_tenants WHERE tenants.tenant_id = all_tenants.tenant_id - RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, (SELECT openreplay_version()) AS version_number, (SELECT email FROM public.users WHERE role = 'owner' AND users.tenant_id=tenants.tenant_id LIMIT 1);""" ) @@ -64,8 +65,9 @@ def new_client(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""SELECT *, - (SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s) AS email + (SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s AND role='owner' LIMIT 1) AS email FROM public.tenants - WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id})) + WHERE tenant_id=%(tenant_id)s + LIMIT 1;""", {"tenant_id": tenant_id})) data = cur.fetchone() requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 1fb572626..6569ef682 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -36,6 +36,20 @@ ALTER TABLE tenants ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; +DO +$$ + BEGIN + IF EXISTS(SELECT * + FROM information_schema.columns + WHERE table_name = 'tenants' + and column_name = 'user_id') + THEN + ALTER TABLE tenants + RENAME COLUMN user_id TO tenant_key; + END IF; + END +$$; + COMMIT; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index d044f1636..50cd912fa 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -142,7 +142,7 @@ $$ CREATE TABLE IF NOT EXISTS tenants ( tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id text NOT NULL DEFAULT generate_api_key(20), + tenant_key text NOT NULL DEFAULT generate_api_key(20), name text NOT NULL, api_key text UNIQUE default generate_api_key(20) not null, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index b0275a3a8..8f07edf0f 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -23,6 +23,20 @@ ALTER TABLE tenants ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; +DO +$$ + BEGIN + IF EXISTS(SELECT * + FROM information_schema.columns + WHERE table_name = 'tenants' + and column_name = 'user_id') + THEN + ALTER TABLE tenants + RENAME COLUMN user_id TO tenant_key; + END IF; + END +$$; + COMMIT; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index f870b7824..b26483e9b 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -117,7 +117,7 @@ $$ CREATE TABLE tenants ( tenant_id integer NOT NULL DEFAULT 1, - user_id text NOT NULL DEFAULT generate_api_key(20), + tenant_key text NOT NULL DEFAULT generate_api_key(20), name text NOT NULL, api_key text NOT NULL DEFAULT generate_api_key(20), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), From b16b3e3b8749fc21951dad5045e2a1682c814d09 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 17:37:49 +0200 Subject: [PATCH 069/221] feat(api): changes --- ee/api/chalicelib/core/tenants.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index cecb8a9cf..71119fd13 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -16,9 +16,9 @@ def get_by_tenant_key(tenant_key): t.version_number, t.opt_out FROM public.tenants AS t - WHERE t.user_id = %(user_id)s AND t.deleted_at ISNULL + WHERE t.tenant_key = %(tenant_key)s AND t.deleted_at ISNULL LIMIT 1;""", - {"user_id": tenant_key}) + {"tenant_key": tenant_key}) ) return helper.dict_to_camel_case(cur.fetchone()) @@ -35,7 +35,7 @@ def get_by_tenant_id(tenant_id): '{license.EDITION}' AS edition, t.version_number, t.opt_out, - t.user_id AS tenant_key + t.tenant_key FROM public.tenants AS t WHERE t.tenant_id = %(tenantId)s AND t.deleted_at ISNULL LIMIT 1;""", From e1b233bac8ffad8d3a27c0dba49c7bae7a04398e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 11:35:25 +0200 Subject: [PATCH 070/221] feat(api): changed connexion pool configuration feat(alerts): changed connexion pool configuration --- api/.env.default | 3 ++- api/Dockerfile.alerts | 1 + api/chalicelib/utils/pg_client.py | 4 +++- ee/api/.env.default | 3 ++- ee/api/Dockerfile.alerts | 1 + 5 files changed, 9 insertions(+), 3 deletions(-) diff --git a/api/.env.default b/api/.env.default index 30ff0b02d..aa14fc993 100644 --- a/api/.env.default +++ b/api/.env.default @@ -36,7 +36,8 @@ pg_password=asayerPostgres pg_port=5432 pg_user=postgres pg_timeout=30 -pg_minconn=45 +pg_minconn=20 +pg_maxconn=50 PG_RETRY_MAX=50 PG_RETRY_INTERVAL=2 put_S3_TTL=20 diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index 7d8dd8200..65668f79b 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -3,6 +3,7 @@ LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME alerts ENV pg_minconn 2 +ENV pg_maxconn 10 # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 3d60dda5c..1c4625873 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -52,7 +52,9 @@ def make_pool(): except (Exception, psycopg2.DatabaseError) as error: print("Error while closing all connexions to PostgreSQL", error) try: - postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), 100, **PG_CONFIG) + postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), + config("pg_maxconn", cast=int, default=80), + **PG_CONFIG) if (postgreSQL_pool): print("Connection pool created successfully") except (Exception, psycopg2.DatabaseError) as error: diff --git a/ee/api/.env.default b/ee/api/.env.default index 8215908b2..7687566d7 100644 --- a/ee/api/.env.default +++ b/ee/api/.env.default @@ -45,7 +45,8 @@ pg_password=asayerPostgres pg_port=5432 pg_user=postgres pg_timeout=30 -pg_minconn=45 +pg_minconn=20 +pg_maxconn=50 PG_RETRY_MAX=50 PG_RETRY_INTERVAL=2 put_S3_TTL=20 diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index ae8d308c8..1deff0a57 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -4,6 +4,7 @@ LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* ENV APP_NAME alerts ENV pg_minconn 2 +ENV pg_maxconn 10 # Add Tini # Startup daemon From 4305e037450e5dcabc163d5a4579923b490d82b3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 11:53:47 +0200 Subject: [PATCH 071/221] feat(api): ignore weekly report if SMTP not configured --- api/chalicelib/core/weekly_report.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 3d857ccc0..bebdd9f6e 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -29,6 +29,9 @@ def edit_config(user_id, weekly_report): def cron(): + if not helper.has_smtp(): + print("!!! No SMTP configuration found, ignoring weekly report") + return with pg_client.PostgresClient(long_query=True) as cur: params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), "1_week_ago": TimeUTC.midnight(delta_days=-7), From 5592e13d9b4bb22cadc4ac3f342a931ce205c55f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 12:31:29 +0200 Subject: [PATCH 072/221] feat(api): fixed weekly report feat(api): optimised weekly report --- api/chalicelib/core/weekly_report.py | 13 +++++++------ ee/api/.gitignore | 1 - ee/api/clean.sh | 1 - .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 1 + .../helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- scripts/helm/db/init_dbs/postgresql/init_schema.sql | 2 ++ 7 files changed, 12 insertions(+), 9 deletions(-) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index bebdd9f6e..88e785b94 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -33,7 +33,8 @@ def cron(): print("!!! No SMTP configuration found, ignoring weekly report") return with pg_client.PostgresClient(long_query=True) as cur: - params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), + params = {"tomorrow": TimeUTC.midnight(delta_days=1), + "3_days_ago": TimeUTC.midnight(delta_days=-3), "1_week_ago": TimeUTC.midnight(delta_days=-7), "2_week_ago": TimeUTC.midnight(delta_days=-14), "5_week_ago": TimeUTC.midnight(delta_days=-35)} @@ -46,18 +47,18 @@ def cron(): COALESCE(week_0_issues.count, 0) AS this_week_issues_count, COALESCE(week_1_issues.count, 0) AS past_week_issues_count, COALESCE(month_1_issues.count, 0) AS past_month_issues_count - FROM public.projects + FROM (SELECT project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects INNER JOIN LATERAL ( SELECT sessions.project_id FROM public.sessions WHERE sessions.project_id = projects.project_id AND start_ts >= %(3_days_ago)s + AND start_ts < %(tomorrow)s LIMIT 1) AS recently_active USING (project_id) INNER JOIN LATERAL ( SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails FROM public.users - WHERE users.tenant_id = projects.tenant_id - AND users.deleted_at ISNULL + WHERE users.deleted_at ISNULL AND users.weekly_report ) AS users ON (TRUE) LEFT JOIN LATERAL ( @@ -66,6 +67,7 @@ def cron(): INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count @@ -82,8 +84,7 @@ def cron(): WHERE sessions.project_id = projects.project_id AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT - ) AS month_1_issues ON (TRUE) - WHERE projects.deleted_at ISNULL;"""), params) + ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] for p in projects_data: diff --git a/ee/api/.gitignore b/ee/api/.gitignore index fb839d5e6..a0bd649f3 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -214,7 +214,6 @@ Pipfile /chalicelib/core/socket_ios.py /chalicelib/core/sourcemaps.py /chalicelib/core/sourcemaps_parser.py -/chalicelib/core/weekly_report.py /chalicelib/saml /chalicelib/utils/html/ /chalicelib/utils/__init__.py diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 861d1d9f1..549228366 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -38,7 +38,6 @@ rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py rm -rf ./chalicelib/core/sourcemaps.py rm -rf ./chalicelib/core/sourcemaps_parser.py -rm -rf ./chalicelib/core/weekly_report.py rm -rf ./chalicelib/saml rm -rf ./chalicelib/utils/html/ rm -rf ./chalicelib/utils/__init__.py diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 6569ef682..7b5169c3c 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -51,6 +51,7 @@ $$ $$; COMMIT; +CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 50cd912fa..76ed78d87 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -258,6 +258,7 @@ $$ CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key); + CREATE INDEX IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; DROP TRIGGER IF EXISTS on_insert_or_update ON projects; CREATE TRIGGER on_insert_or_update AFTER INSERT OR UPDATE diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 8f07edf0f..00bf4ec1d 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -38,7 +38,7 @@ $$ $$; COMMIT; - +CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index b26483e9b..f06ff4f9a 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -201,6 +201,8 @@ $$ ); CREATE INDEX projects_project_key_idx ON public.projects (project_key); + CREATE INDEX projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; + CREATE TRIGGER on_insert_or_update AFTER INSERT OR UPDATE ON projects From 0886e3856a717cf18c514521cc4af61a47897df5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 12:33:36 +0200 Subject: [PATCH 073/221] feat(api): EE changed weekly report feat(api): changed login response --- api/routers/core_dynamic.py | 2 - ee/api/chalicelib/core/weekly_report.py | 245 ++++++++++++++++++++++++ ee/api/routers/core_dynamic.py | 2 - 3 files changed, 245 insertions(+), 4 deletions(-) create mode 100644 ee/api/chalicelib/core/weekly_report.py diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 918d81541..a407e2833 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -52,9 +52,7 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") c["smtp"] = helper.has_smtp() - c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] - r["iceServers"] = c["iceServers"] return { 'jwt': r.pop('jwt'), 'data': { diff --git a/ee/api/chalicelib/core/weekly_report.py b/ee/api/chalicelib/core/weekly_report.py new file mode 100644 index 000000000..e652010d4 --- /dev/null +++ b/ee/api/chalicelib/core/weekly_report.py @@ -0,0 +1,245 @@ +from chalicelib.utils import pg_client, helper, email_helper +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.helper import get_issue_title + +LOWEST_BAR_VALUE = 3 + + +def get_config(user_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""\ + SELECT users.weekly_report + FROM public.users + WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s + LIMIT 1;""", {"user_id": user_id})) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit_config(user_id, weekly_report): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""\ + UPDATE public.users + SET weekly_report= %(weekly_report)s + WHERE users.deleted_at ISNULL + AND users.user_id=%(user_id)s + RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report})) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def cron(): + if not helper.has_smtp(): + print("!!! No SMTP configuration found, ignoring weekly report") + return + with pg_client.PostgresClient(long_query=True) as cur: + params = {"tomorrow": TimeUTC.midnight(delta_days=1), + "3_days_ago": TimeUTC.midnight(delta_days=-3), + "1_week_ago": TimeUTC.midnight(delta_days=-7), + "2_week_ago": TimeUTC.midnight(delta_days=-14), + "5_week_ago": TimeUTC.midnight(delta_days=-35)} + cur.execute(cur.mogrify("""\ + SELECT project_id, + name AS project_name, + users.emails AS emails, + TO_CHAR(DATE_TRUNC('day', now()) - INTERVAL '1 week', 'Mon. DDth, YYYY') AS period_start, + TO_CHAR(DATE_TRUNC('day', now()), 'Mon. DDth, YYYY') AS period_end, + COALESCE(week_0_issues.count, 0) AS this_week_issues_count, + COALESCE(week_1_issues.count, 0) AS past_week_issues_count, + COALESCE(month_1_issues.count, 0) AS past_month_issues_count + FROM (SELECT tenant_id, project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects + INNER JOIN LATERAL ( + SELECT sessions.project_id + FROM public.sessions + WHERE sessions.project_id = projects.project_id + AND start_ts >= %(3_days_ago)s + AND start_ts < %(tomorrow)s + LIMIT 1) AS recently_active USING (project_id) + INNER JOIN LATERAL ( + SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails + FROM public.users + WHERE users.tenant_id = projects.tenant_id + AND users.deleted_at ISNULL + AND users.weekly_report + ) AS users ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp < %(tomorrow)s + ) AS week_0_issues ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + ) AS week_1_issues ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + ) AS month_1_issues ON (TRUE);"""), params) + projects_data = cur.fetchall() + emails_to_send = [] + for p in projects_data: + params["project_id"] = p["project_id"] + print(f"checking {p['project_name']} : {p['project_id']}") + if len(p["emails"]) == 0 \ + or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0: + print('ignore') + continue + print("valid") + p["past_week_issues_evolution"] = helper.__decimal_limit( + helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1) + p["past_month_issues_evolution"] = helper.__decimal_limit( + helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1) + cur.execute(cur.mogrify(""" + SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short, + TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, + ( + SELECT COUNT(*) + FROM events_common.issues INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT + AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT + ) AS issues_count + FROM generate_series( + DATE_TRUNC('day', now()) - INTERVAL '7 days', + DATE_TRUNC('day', now()) - INTERVAL '1 day', + '1 day'::INTERVAL + ) AS timestamp_i + ORDER BY timestamp_i;""", params)) + days_partition = cur.fetchall() + max_days_partition = max(x['issues_count'] for x in days_partition) + for d in days_partition: + if max_days_partition <= 0: + d["value"] = LOWEST_BAR_VALUE + else: + d["value"] = d["issues_count"] * 100 / max_days_partition + d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE + cur.execute(cur.mogrify("""\ + SELECT type, COUNT(*) AS count + FROM events_common.issues INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT + GROUP BY type + ORDER BY count DESC, type + LIMIT 4;""", params)) + issues_by_type = cur.fetchall() + max_issues_by_type = sum(i["count"] for i in issues_by_type) + for i in issues_by_type: + i["type"] = get_issue_title(i["type"]) + if max_issues_by_type <= 0: + i["value"] = LOWEST_BAR_VALUE + else: + i["value"] = i["count"] * 100 / max_issues_by_type + cur.execute(cur.mogrify("""\ + SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short, + TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, + COALESCE((SELECT JSONB_AGG(sub) + FROM ( + SELECT type, COUNT(*) AS count + FROM events_common.issues + INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT + AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT + GROUP BY type + ORDER BY count + ) AS sub), '[]'::JSONB) AS partition + FROM generate_series( + DATE_TRUNC('day', now()) - INTERVAL '7 days', + DATE_TRUNC('day', now()) - INTERVAL '1 day', + '1 day'::INTERVAL + ) AS timestamp_i + GROUP BY timestamp_i + ORDER BY timestamp_i;""", params)) + issues_breakdown_by_day = cur.fetchall() + for i in issues_breakdown_by_day: + i["sum"] = sum(x["count"] for x in i["partition"]) + for j in i["partition"]: + j["type"] = get_issue_title(j["type"]) + max_days_partition = max(i["sum"] for i in issues_breakdown_by_day) + for i in issues_breakdown_by_day: + for j in i["partition"]: + if max_days_partition <= 0: + j["value"] = LOWEST_BAR_VALUE + else: + j["value"] = j["count"] * 100 / max_days_partition + j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE + cur.execute(cur.mogrify(""" + SELECT type, + COUNT(*) AS issue_count, + COUNT(DISTINCT session_id) AS sessions_count, + (SELECT COUNT(DISTINCT sessions.session_id) + FROM public.sessions + INNER JOIN events_common.issues AS sci USING (session_id) + INNER JOIN public.issues AS si USING (issue_id) + WHERE si.project_id = %(project_id)s + AND sessions.project_id = %(project_id)s + AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 weeks') * 1000)::BIGINT + AND si.type = mi.type + AND sessions.duration IS NOT NULL + ) AS last_week_sessions_count, + (SELECT COUNT(DISTINCT sci.session_id) + FROM public.sessions + INNER JOIN events_common.issues AS sci USING (session_id) + INNER JOIN public.issues AS si USING (issue_id) + WHERE si.project_id = %(project_id)s + AND sessions.project_id = %(project_id)s + AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 weeks') * 1000)::BIGINT + AND si.type = mi.type + AND sessions.duration IS NOT NULL + ) AS last_month_sessions_count + FROM events_common.issues + INNER JOIN public.issues AS mi USING (issue_id) + INNER JOIN public.sessions USING (session_id) + WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + GROUP BY type + ORDER BY issue_count DESC;""", params)) + issues_breakdown_list = cur.fetchall() + if len(issues_breakdown_list) > 4: + others = {"type": "Others", + "sessions_count": sum(i["sessions_count"] for i in issues_breakdown_list[4:]), + "issue_count": sum(i["issue_count"] for i in issues_breakdown_list[4:]), + "last_week_sessions_count": sum( + i["last_week_sessions_count"] for i in issues_breakdown_list[4:]), + "last_month_sessions_count": sum( + i["last_month_sessions_count"] for i in issues_breakdown_list[4:])} + issues_breakdown_list = issues_breakdown_list[:4] + issues_breakdown_list.append(others) + for i in issues_breakdown_list: + i["type"] = get_issue_title(i["type"]) + i["last_week_sessions_evolution"] = helper.__decimal_limit( + helper.__progress(i["sessions_count"], i["last_week_sessions_count"]), 1) + i["last_month_sessions_evolution"] = helper.__decimal_limit( + helper.__progress(i["sessions_count"], i["last_month_sessions_count"]), 1) + i["sessions_count"] = f'{i["sessions_count"]:,}' + keep_types = [i["type"] for i in issues_breakdown_list] + for i in issues_breakdown_by_day: + keep = [] + for j in i["partition"]: + if j["type"] in keep_types: + keep.append(j) + i["partition"] = keep + emails_to_send.append({"email": p.pop("emails"), + "data": { + **p, + "days_partition": days_partition, + "issues_by_type": issues_by_type, + "issues_breakdown_by_day": issues_breakdown_by_day, + "issues_breakdown_list": issues_breakdown_list + }}) + print(f">>> Sending weekly report to {len(emails_to_send)} email-group") + for e in emails_to_send: + email_helper.weekly_report2(recipients=e["email"], data=e["data"]) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 196764ad9..89f6a9bc9 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -57,9 +57,7 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") c["smtp"] = helper.has_smtp() - c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] - r["iceServers"] = c["iceServers"] return { 'jwt': r.pop('jwt'), 'data': { From 3217a55bca70858febee7585e1dde00ecb86f56c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:29:54 +0200 Subject: [PATCH 074/221] feat(api): changed login response --- api/chalicelib/core/users.py | 2 +- api/routers/core.py | 33 +++++++++++++++++++++-- api/routers/core_dynamic.py | 39 +--------------------------- ee/api/chalicelib/core/users.py | 2 +- ee/api/routers/core_dynamic.py | 46 +++------------------------------ 5 files changed, 37 insertions(+), 85 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 0e9852e2d..082e9aca9 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -557,7 +557,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, 1 AS tenant_id, users.role, users.name, diff --git a/api/routers/core.py b/api/routers/core.py index 2a38d0a75..c997229ba 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,7 +1,8 @@ from typing import Union from decouple import config -from fastapi import Depends, Body, BackgroundTasks +from fastapi import Depends, Body, BackgroundTasks, HTTPException +from starlette import status import schemas from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ @@ -13,7 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \ custom_metrics, saved_search from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import email_helper +from chalicelib.utils import email_helper, helper, captcha from chalicelib.utils.TimeUTC import TimeUTC from or_dependencies import OR_context from routers.base import get_routers @@ -21,6 +22,34 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() +@public_app.post('/login', tags=["authentication"]) +def login(data: schemas.UserLoginSchema = Body(...)): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid captcha." + ) + + r = users.authenticate(data.email, data.password, for_plugin=False) + if r is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="You’ve entered invalid Email or Password." + ) + if "errors" in r: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=r["errors"][0] + ) + r["smtp"] = helper.has_smtp() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r + } + } + + @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index a407e2833..06cd2937a 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -1,17 +1,15 @@ from typing import Optional from decouple import config -from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from fastapi import Body, Depends, BackgroundTasks from starlette.responses import RedirectResponse import schemas -from chalicelib.core import assist from chalicelib.core import integrations_manager from chalicelib.core import sessions from chalicelib.core import tenants, users, metadata, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha from chalicelib.utils import helper from or_dependencies import OR_context from routers.base import get_routers @@ -27,41 +25,6 @@ def get_all_signup(): "edition": license.EDITION}} -@public_app.post('/login', tags=["authentication"]) -def login(data: schemas.UserLoginSchema = Body(...)): - if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid captcha." - ) - - r = users.authenticate(data.email, data.password, for_plugin=False) - if r is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="You’ve entered invalid Email or Password." - ) - - tenant_id = r.pop("tenantId") - - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["smtp"] = helper.has_smtp() - r["smtp"] = c["smtp"] - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 91c2384c4..6a51a1d80 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -627,7 +627,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.tenant_id, users.role, users.name, diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 89f6a9bc9..73e597b52 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -1,17 +1,17 @@ from typing import Optional from decouple import config -from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from fastapi import Body, Depends, BackgroundTasks from starlette.responses import RedirectResponse import schemas import schemas_ee from chalicelib.core import integrations_manager from chalicelib.core import sessions -from chalicelib.core import tenants, users, metadata, projects, license, assist +from chalicelib.core import tenants, users, metadata, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha, SAML2_helper +from chalicelib.utils import SAML2_helper from chalicelib.utils import helper from or_dependencies import OR_context from routers.base import get_routers @@ -27,46 +27,6 @@ def get_all_signup(): "edition": license.EDITION}} -@public_app.post('/login', tags=["authentication"]) -def login(data: schemas.UserLoginSchema = Body(...)): - if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid captcha." - ) - - r = users.authenticate(data.email, data.password, for_plugin=False) - if r is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="You’ve entered invalid Email or Password." - ) - if "errors" in r: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail=r["errors"][0] - ) - - tenant_id = r.pop("tenantId") - - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["smtp"] = helper.has_smtp() - r["smtp"] = c["smtp"] - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) From 8d4d61103abd046f281e487da40e7c02ffdd52ee Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:44:05 +0200 Subject: [PATCH 075/221] feat(api): fixed login response --- api/chalicelib/core/users.py | 2 +- ee/api/chalicelib/core/users.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 082e9aca9..8eb08bd35 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -586,7 +586,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): {"user_id": r["id"]}) cur.execute(query) return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], + "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"), "email": email, diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 6a51a1d80..49081b12a 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -666,7 +666,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if for_change_password: return True r = helper.dict_to_camel_case(r) - jwt_iat = change_jwt_iat(r['id']) + jwt_iat = change_jwt_iat(r['userId']) return { "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], TimeUTC.datetime_to_timestamp(jwt_iat), @@ -681,7 +681,7 @@ def authenticate_sso(email, internal_id, exp=None): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.tenant_id, users.role, users.name, @@ -699,7 +699,7 @@ def authenticate_sso(email, internal_id, exp=None): if r is not None: r = helper.dict_to_camel_case(r) - jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) return authorizers.generate_jwt(r['id'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", exp=(exp + jwt_iat // 1000) if exp is not None else None) From e796e6c795b7be93b53a6b7082427b6d64975978 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:49:24 +0200 Subject: [PATCH 076/221] feat(api): fixed login response --- ee/api/chalicelib/core/users.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 49081b12a..e5d5d17c9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -668,7 +668,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): r = helper.dict_to_camel_case(r) jwt_iat = change_jwt_iat(r['userId']) return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], + "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], TimeUTC.datetime_to_timestamp(jwt_iat), aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"), "email": email, @@ -700,7 +700,7 @@ def authenticate_sso(email, internal_id, exp=None): if r is not None: r = helper.dict_to_camel_case(r) jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) - return authorizers.generate_jwt(r['id'], r['tenantId'], + return authorizers.generate_jwt(r['userId'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", exp=(exp + jwt_iat // 1000) if exp is not None else None) return None From dc02594da813f9b263d37bb45b7eaac7c7755c43 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 16:31:08 +0200 Subject: [PATCH 077/221] feat(api): optimised weekly report --- api/chalicelib/core/weekly_report.py | 10 +++++----- ee/api/chalicelib/core/weekly_report.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 88e785b94..952bf584b 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -66,7 +66,7 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= %(1_week_ago)s AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( @@ -74,16 +74,16 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(2_week_ago)s ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(5_week_ago)s ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] diff --git a/ee/api/chalicelib/core/weekly_report.py b/ee/api/chalicelib/core/weekly_report.py index e652010d4..90256d795 100644 --- a/ee/api/chalicelib/core/weekly_report.py +++ b/ee/api/chalicelib/core/weekly_report.py @@ -67,7 +67,7 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= %(1_week_ago)s AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( @@ -75,16 +75,16 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(2_week_ago)s ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(5_week_ago)s ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] From 41b96321febea9fc8261a8ad231daef1f26194a3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:19:51 +0200 Subject: [PATCH 078/221] feat(api): custom metrics config --- api/chalicelib/core/custom_metrics.py | 6 ++++-- api/schemas.py | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index d6ebebc76..a2794f22e 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -228,9 +228,11 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} query = cur.mogrify(f"""\ WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, - view_type, metric_type, metric_of, metric_value, metric_format) + view_type, metric_type, metric_of, metric_value, + metric_format, default_config) VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, - %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, %(metric_format)s) + %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, + %(metric_format)s, %(default_config)s) RETURNING *) INSERT INTO metric_series(metric_id, index, name, filter) diff --git a/api/schemas.py b/api/schemas.py index ab063a9b9..5c1a33927 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -874,8 +874,15 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): alias_generator = attribute_to_camel_case +class CustomMetricsConfigSchema(BaseModel): + col: Optional[int] = Field(default=2) + row: Optional[int] = Field(default=2) + position: Optional[int] = Field(default=0) + + class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) + config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema()) @root_validator(pre=True) def transform_series(cls, values): From 36e5ba6389369ae7ae465c492283becf6ebe6a62 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:36:22 +0200 Subject: [PATCH 079/221] feat(api): limited long task DB --- api/chalicelib/core/sessions.py | 6 +++--- api/chalicelib/utils/pg_client.py | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 7543f9c9d..5f9a6d3ab 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1199,7 +1199,7 @@ def get_session_ids_by_user_ids(project_id, user_ids): def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: query = cur.mogrify( """\ DELETE FROM public.sessions @@ -1213,7 +1213,7 @@ def delete_sessions_by_session_ids(session_ids): def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: query = cur.mogrify( """\ DELETE FROM public.sessions @@ -1227,6 +1227,6 @@ def delete_sessions_by_user_ids(project_id, user_ids): def count_all(): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") return row.get("count", 0) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 1c4625873..2abc9f6c7 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -76,12 +76,17 @@ class PostgresClient: cursor = None long_query = False - def __init__(self, long_query=False): + def __init__(self, long_query=False, unlimited_query=False): self.long_query = long_query - if long_query: + if unlimited_query: + long_config = dict(_PG_CONFIG) + long_config["application_name"] += "-UNLIMITED" + self.connection = psycopg2.connect(**long_config) + elif long_query: long_config = dict(_PG_CONFIG) long_config["application_name"] += "-LONG" - self.connection = psycopg2.connect(**_PG_CONFIG) + long_config["options"] = f"-c statement_timeout={config('pg_long_timeout', cast=int, default=5*60) * 1000}" + self.connection = psycopg2.connect(**long_config) else: self.connection = postgreSQL_pool.getconn() From 974f78b84ac989b0ceab0ab48ceca002bbc9b296 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:51:47 +0200 Subject: [PATCH 080/221] feat(api): custom metrics config --- api/chalicelib/core/custom_metrics.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index a2794f22e..c3bdb134d 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -225,7 +225,9 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa _data[f"filter_{i}"] = s.filter.json() series_len = len(data.series) data.series = None - params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} + params = {"user_id": user_id, "project_id": project_id, + "default_config": json.dumps(data.config.dict()), + **data.dict(), **_data} query = cur.mogrify(f"""\ WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, view_type, metric_type, metric_of, metric_value, From 2b85ad3dfc16cbd33e49e9ab8ae4d03957ef0f4a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 13:19:24 +0200 Subject: [PATCH 081/221] feat(api): optimised get session details --- api/chalicelib/core/events.py | 7 +++---- api/chalicelib/core/resources.py | 13 ++++++++++--- api/chalicelib/core/sessions.py | 6 ++++-- ee/api/chalicelib/core/resources.py | 12 +++++++++--- 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index d07cf1042..8f978c40a 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -472,14 +472,13 @@ def search(text, event_type, project_id, source, key): return {"data": rows} -def get_errors_by_session_id(session_id): +def get_errors_by_session_id(session_id, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT er.*,ur.*, er.timestamp - s.start_ts AS time FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) - WHERE - er.session_id = %(session_id)s - ORDER BY timestamp;""", {"session_id": session_id})) + WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s + ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) errors = cur.fetchall() for e in errors: e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) diff --git a/api/chalicelib/core/resources.py b/api/chalicelib/core/resources.py index d85e56b6f..1e2f4718e 100644 --- a/api/chalicelib/core/resources.py +++ b/api/chalicelib/core/resources.py @@ -1,8 +1,10 @@ from chalicelib.utils import helper, pg_client +from decouple import config -def get_by_session_id(session_id, project_id): +def get_by_session_id(session_id, project_id, start_ts, duration): with pg_client.PostgresClient() as cur: + delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000 ch_query = """\ SELECT timestamp AS datetime, @@ -16,8 +18,13 @@ def get_by_session_id(session_id, project_id): success, COALESCE(status, CASE WHEN success THEN 200 END) AS status FROM events.resources INNER JOIN sessions USING (session_id) - WHERE session_id = %(session_id)s AND project_id= %(project_id)s;""" - params = {"session_id": session_id, "project_id": project_id} + WHERE session_id = %(session_id)s + AND project_id= %(project_id)s + AND sessions.start_ts=%(start_ts)s + AND resources.timestamp>=%(res_start_ts)s + AND resources.timestamp>=%(res_end_ts)s;""" + params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, + "res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, } cur.execute(cur.mogrify(ch_query, params)) rows = cur.fetchall() return helper.list_to_camel_case(rows) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 5f9a6d3ab..9894d24c4 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -85,7 +85,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ else: data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] # to keep only the first stack data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if @@ -94,7 +94,9 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["start_ts"], + duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id) diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py index 4e4f1c4e8..7666e66cf 100644 --- a/ee/api/chalicelib/core/resources.py +++ b/ee/api/chalicelib/core/resources.py @@ -1,16 +1,22 @@ from chalicelib.utils import helper from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC +from decouple import config -def get_by_session_id(session_id, project_id): +def get_by_session_id(session_id, project_id, start_ts, duration): with ch_client.ClickHouseClient() as ch: + delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000 ch_query = """\ SELECT datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status FROM resources - WHERE session_id = toUInt64(%(session_id)s) AND project_id=%(project_id)s;""" - params = {"session_id": session_id, "project_id": project_id} + WHERE session_id = toUInt64(%(session_id)s) + AND project_id=%(project_id)s + AND datetime >= toDateTime(%(res_start_ts)s / 1000) + AND datetime <= toDateTime(%(res_end_ts)s / 1000);""" + params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, + "res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, } rows = ch.execute(query=ch_query, params=params) results = [] for r in rows: From d4c7fdcc5fa7f63114c24f6bab2e3599618507f9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 15:24:21 +0200 Subject: [PATCH 082/221] feat(api): get sessions details fix --- api/chalicelib/core/sessions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 9894d24c4..ae18ac888 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -95,7 +95,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["start_ts"], + start_ts=data["startTs"], duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) From 85c27ff0f5e7b536475e3ef1f5aecddbbb1a3e01 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 15:59:54 +0200 Subject: [PATCH 083/221] feat(api): fixed login --- api/chalicelib/core/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 8eb08bd35..794121064 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -583,7 +583,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): SET jwt_iat = timezone('utc'::text, now()) WHERE user_id = %(user_id)s RETURNING jwt_iat;""", - {"user_id": r["id"]}) + {"user_id": r["userId"]}) cur.execute(query) return { "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], From c856b2168d632adec7e7c314b0afadc5c7998448 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 16:07:56 +0200 Subject: [PATCH 084/221] feat(api): fixed custom metrics timestamp issue --- api/chalicelib/core/custom_metrics.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index c3bdb134d..f7de1a7c9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -52,6 +52,8 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): "stages": [], "totalDropDueToIssues": 0 } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) @@ -68,6 +70,8 @@ def __get_errors_list(project_id, user_id, data): "total": 0, "errors": [] } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) @@ -78,10 +82,13 @@ def __is_sessions_list(data): def __get_sessions_list(project_id, user_id, data): if len(data.series) == 0: + print("empty series") return { "total": 0, "sessions": [] } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) From 2a12ed7337fcf11466266cefea1790b8b8c4c318 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 18:24:03 +0200 Subject: [PATCH 085/221] feat(api): optimised get issues for get session-details --- api/chalicelib/core/events.py | 6 +++--- api/chalicelib/core/issues.py | 8 +++++--- api/chalicelib/core/sessions.py | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 8f978c40a..dd9562de1 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -28,8 +28,8 @@ def __merge_cells(rows, start, count, replacement): return rows -def __get_grouped_clickrage(rows, session_id): - click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage") +def __get_grouped_clickrage(rows, session_id, project_id): + click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) if len(click_rage_issues) == 0: return rows @@ -63,7 +63,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False): ) rows = cur.fetchall() if group_clickrage: - rows = __get_grouped_clickrage(rows=rows, session_id=session_id) + rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) cur.execute(cur.mogrify(""" SELECT diff --git a/api/chalicelib/core/issues.py b/api/chalicelib/core/issues.py index e1aa54712..e4ac11745 100644 --- a/api/chalicelib/core/issues.py +++ b/api/chalicelib/core/issues.py @@ -44,16 +44,18 @@ def get(project_id, issue_id): return helper.dict_to_camel_case(data) -def get_by_session_id(session_id, issue_type=None): +def get_by_session_id(session_id, project_id, issue_type=None): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ SELECT * FROM events_common.issues INNER JOIN public.issues USING (issue_id) - WHERE session_id = %(session_id)s {"AND type = %(type)s" if issue_type is not None else ""} + WHERE session_id = %(session_id)s + AND project_id= %(project_id)s + {"AND type = %(type)s" if issue_type is not None else ""} ORDER BY timestamp;""", - {"session_id": session_id, "type": issue_type}) + {"session_id": session_id, "project_id": project_id, "type": issue_type}) ) return helper.list_to_camel_case(cur.fetchall()) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index ae18ac888..b3cd81c1d 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -99,7 +99,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id) + data['issues'] = issues.get_by_session_id(session_id=session_id,project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) From 50dce0ee9fc1df1465a0f11a65bb163c29a24152 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:20:16 +0200 Subject: [PATCH 086/221] feat(api): custom metrics fixed templates response --- api/chalicelib/core/dashboards.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 25dbdada3..bdd0518e0 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -38,6 +38,9 @@ def get_templates(project_id, user_id): for w in r["widgets"]: w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) + for s in w["series"]: + s["filter"] = helper.old_search_payload_to_flat(s["filter"]) + return helper.list_to_camel_case(rows) From 09711d45210c9e49168c314eb4de72adb0485e26 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:26:00 +0200 Subject: [PATCH 087/221] feat(api): metrics table of errors --- api/chalicelib/core/custom_metrics.py | 4 +--- api/schemas.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index f7de1a7c9..2967b7fec 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -59,9 +59,7 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): def __is_errors_list(data): return data.metric_type == schemas.MetricType.table \ - and data.metric_of == schemas.TableMetricOfType.issues \ - and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ - and data.metric_format == schemas.MetricFormatType.errors_list + and data.metric_of == schemas.TableMetricOfType.errors def __get_errors_list(project_id, user_id, data): diff --git a/api/schemas.py b/api/schemas.py index 5c1a33927..715bf0f84 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -481,7 +481,6 @@ class IssueType(str, Enum): class MetricFormatType(str, Enum): session_count = 'sessionCount' - errors_list = 'errors' class __MixedSearchFilter(BaseModel): @@ -811,6 +810,7 @@ class TableMetricOfType(str, Enum): issues = FilterType.issue.value visited_url = EventType.location.value sessions = "SESSIONS" + errors = IssueType.js_exception.value class TimeseriesMetricOfType(str, Enum): From 13d71ce388403c04f5f6b5c0c38d129b61a2bd47 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:56:27 +0200 Subject: [PATCH 088/221] feat(api):metrics get sessions related to issue --- api/chalicelib/core/custom_metrics.py | 31 +++++++++++++++++++++++++++ api/routers/core.py | 2 +- api/routers/subs/metrics.py | 12 +++++++++++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 2967b7fec..b92934912 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -500,3 +500,34 @@ def change_state(project_id, metric_id, user_id, status): {"metric_id": metric_id, "status": status, "user_id": user_id}) ) return get(metric_id=metric_id, project_id=project_id, user_id=user_id) + + +def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, + data: schemas.CustomMetricSessionsPayloadSchema + # , range_value=None, start_date=None, end_date=None + ): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + issues = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) + issues = issues.get("significant", []) + issues.get("insignificant", []) + issue = None + for i in issues: + if i.get("issueId", "") == issue_id: + issue = i + break + results.append({"seriesId": s.series_id, "seriesName": s.name, + "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, + issue=issue, data=s.filter) + if issue is not None else {"total": 0, "sessions": []}, + "issue": issue}) + return results diff --git a/api/routers/core.py b/api/routers/core.py index c997229ba..3f3d91e80 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -772,7 +772,7 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. @app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) -def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, +def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) if issue is None: diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index e00d2d4f7..c68eec3e9 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -172,6 +172,18 @@ def get_custom_metric_funnel_issues(projectId: int, metric_id: int, return {"data": data} +@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) +def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id, + metric_id=metric_id, issue_id=issueId, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) def get_custom_metric_errors_list(projectId: int, metric_id: int, From d478436d9b09846ef14f96bc5aa1bb5764a553fc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 14:56:46 +0200 Subject: [PATCH 089/221] feat(api): metric-funnel changed response --- api/chalicelib/core/custom_metrics.py | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index b92934912..5a7fdcea6 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -174,16 +174,13 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page - results.append({"seriesId": s.series_id, "seriesName": s.name, - **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}) - - return results + return {"seriesId": s.series_id, "seriesName": s.name, + **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): @@ -193,16 +190,13 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSe metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page - results.append({"seriesId": s.series_id, "seriesName": s.name, - **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results + return {"seriesId": s.series_id, "seriesName": s.name, + **errors.search(data=s.filter, project_id=project_id, user_id=user_id)} def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): @@ -512,7 +506,6 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp @@ -525,9 +518,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, if i.get("issueId", "") == issue_id: issue = i break - results.append({"seriesId": s.series_id, "seriesName": s.name, - "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, - issue=issue, data=s.filter) - if issue is not None else {"total": 0, "sessions": []}, - "issue": issue}) - return results + return {"seriesId": s.series_id, "seriesName": s.name, + "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, + issue=issue, data=s.filter) + if issue is not None else {"total": 0, "sessions": []}, + "issue": issue} From b8eac83662b7f687973b9856d909fc4e52dd8f3e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 15:07:39 +0200 Subject: [PATCH 090/221] feat(api): requirements upgrade --- api/requirements.txt | 6 +++--- ee/api/requirements.txt | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index d615851d1..f08b6db46 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,14 +1,14 @@ requests==2.27.1 urllib3==1.26.9 -boto3==1.22.6 -pyjwt==2.3.0 +boto3==1.24.8 +pyjwt==2.4.0 psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -fastapi==0.75.2 +fastapi==0.78.0 uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index f14d6022d..e96ed6ae5 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,14 +1,14 @@ requests==2.27.1 urllib3==1.26.9 -boto3==1.22.6 -pyjwt==2.3.0 +boto3==1.24.8 +pyjwt==2.4.0 psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 clickhouse-driver==0.2.3 python3-saml==1.12.0 -fastapi==0.75.2 +fastapi==0.78.0 python-multipart==0.0.5 uvicorn[standard]==0.17.6 python-decouple==3.6 From 40836092fa2584eba1bc1db015df51752517d97d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 17:19:58 +0200 Subject: [PATCH 091/221] feat(assist): FOSS assist search --- utilities/server.js | 2 ++ utilities/servers/websocket.js | 53 +++++++++++++++++++++++----------- utilities/utils/helper.js | 30 +++++++++++++++++-- 3 files changed, 66 insertions(+), 19 deletions(-) diff --git a/utilities/server.js b/utilities/server.js index b0eadcccd..ad03aafab 100644 --- a/utilities/server.js +++ b/utilities/server.js @@ -7,6 +7,8 @@ const HOST = '0.0.0.0'; const PORT = 9001; const wsapp = express(); +wsapp.use(express.json()); +wsapp.use(express.urlencoded({extended: true})); wsapp.use(request_logger("[wsapp]")); wsapp.use(`/assist/${process.env.S3_KEY}`, socket.wsRouter); diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index d2399477e..799304f20 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,12 +28,26 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractUserIdFromRequest = function (req) { +const extractFiltersFromRequest = function (req) { + let filters = {}; if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; + filters.userID = [req.query.userId]; } - return undefined; + filters = {...filters, ...req.body}; + let _filters = {} + for (let k of Object.keys(filters)) { + if (filters[k] !== undefined && filters[k] !== null) { + _filters[k] = filters[k]; + if (!Array.isArray(_filters[k])) { + _filters[k] = [_filters[k]]; + } + for (let i = 0; i < _filters[k].length; i++) { + _filters[k][i] = String(_filters[k][i]); + } + } + } + return Object.keys(_filters).length > 0 ? _filters : undefined; } const extractProjectKeyFromRequest = function (req) { @@ -57,18 +71,18 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); - + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -80,21 +94,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -106,10 +122,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -119,8 +136,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -133,11 +150,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -147,8 +165,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -161,6 +179,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 98322c417..070463e00 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -24,7 +24,33 @@ const request_logger = (identity) => { next(); } }; - +const isValidSession = function (sessionInfo, filters) { + let foundAll = true; + for (const [key, values] of Object.entries(filters)) { + let found = false; + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } + } + } + foundAll &&= found; + if (!found) { + break; + } + } + return foundAll; +} +const hasFilters = function (filters) { + return filters !== undefined && Object.keys(filters).length > 0; +} module.exports = { - extractPeerId, request_logger + extractPeerId, request_logger, isValidSession, hasFilters }; \ No newline at end of file From d4d029c5252435a15f487a11d2f0f5e7871ea64f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 18:01:34 +0200 Subject: [PATCH 092/221] feat(assist): assist upgrade uWebSockets feat(assist): assist upgrade SocketIo --- ee/utilities/package-lock.json | 97 ++++++++++++++++------------------ ee/utilities/package.json | 4 +- utilities/package-lock.json | 93 +++++++++++++------------------- utilities/package.json | 2 +- 4 files changed, 85 insertions(+), 111 deletions(-) diff --git a/ee/utilities/package-lock.json b/ee/utilities/package-lock.json index 98ef3f745..19699560a 100644 --- a/ee/utilities/package-lock.json +++ b/ee/utilities/package-lock.json @@ -13,9 +13,9 @@ "@socket.io/redis-adapter": "^7.1.0", "express": "^4.17.1", "redis": "^4.0.3", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2", - "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" + "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0" } }, "node_modules/@maxmind/geoip2-node": { @@ -83,14 +83,6 @@ "@node-redis/client": "^1.0.0" } }, - "node_modules/@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/@socket.io/redis-adapter": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz", @@ -121,9 +113,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "node_modules/@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -332,9 +324,9 @@ } }, "node_modules/engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -352,12 +344,9 @@ } }, "node_modules/engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "dependencies": { - "@socket.io/base64-arraybuffer": "~1.0.2" - }, + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==", "engines": { "node": ">=10.0.0" } @@ -667,7 +656,7 @@ "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } @@ -869,15 +858,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "engines": { @@ -902,6 +891,11 @@ "node": ">=10.0.0" } }, + "node_modules/socket.io/node_modules/socket.io-adapter": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" + }, "node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -1092,11 +1086,6 @@ "integrity": "sha512-HGQ8YooJ8Mx7l28tD7XjtB3ImLEjlUxG1wC1PAjxu6hPJqjPshUZxAICzDqDjtIbhDTf48WXXUcx8TQJB1XTKA==", "requires": {} }, - "@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==" - }, "@socket.io/redis-adapter": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz", @@ -1124,9 +1113,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "accepts": { "version": "1.3.8", @@ -1281,9 +1270,9 @@ "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "requires": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -1298,12 +1287,9 @@ } }, "engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "requires": { - "@socket.io/base64-arraybuffer": "~1.0.2" - } + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==" }, "escape-html": { "version": "1.0.3", @@ -1546,7 +1532,7 @@ "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "on-finished": { "version": "2.3.0", @@ -1696,16 +1682,23 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "requires": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" + }, + "dependencies": { + "socket.io-adapter": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" + } } }, "socket.io-adapter": { @@ -1774,7 +1767,7 @@ }, "uWebSockets.js": { "version": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#a58e810e47a23696410f6073c8c905dc38f75da5", - "from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.6.0" + "from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.10.0" }, "vary": { "version": "1.1.2", diff --git a/ee/utilities/package.json b/ee/utilities/package.json index 99c2666da..bd35ec6a6 100644 --- a/ee/utilities/package.json +++ b/ee/utilities/package.json @@ -22,8 +22,8 @@ "@socket.io/redis-adapter": "^7.1.0", "express": "^4.17.1", "redis": "^4.0.3", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2", - "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" + "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0" } } diff --git a/utilities/package-lock.json b/utilities/package-lock.json index d4ef1c007..e8d8d3129 100644 --- a/utilities/package-lock.json +++ b/utilities/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "express": "^4.17.1", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2" } }, @@ -26,14 +26,6 @@ "maxmind": "^4.2.0" } }, - "node_modules/@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", @@ -50,9 +42,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "node_modules/@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -232,9 +224,9 @@ } }, "node_modules/engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -252,12 +244,9 @@ } }, "node_modules/engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "dependencies": { - "@socket.io/base64-arraybuffer": "~1.0.2" - }, + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==", "engines": { "node": ">=10.0.0" } @@ -549,7 +538,7 @@ "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } @@ -706,15 +695,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "engines": { @@ -722,9 +711,9 @@ } }, "node_modules/socket.io-adapter": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz", - "integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" }, "node_modules/socket.io-parser": { "version": "4.0.4", @@ -916,11 +905,6 @@ "maxmind": "^4.2.0" } }, - "@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==" - }, "@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", @@ -937,9 +921,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "accepts": { "version": "1.3.8", @@ -1074,9 +1058,9 @@ "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "requires": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -1106,12 +1090,9 @@ } }, "engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "requires": { - "@socket.io/base64-arraybuffer": "~1.0.2" - } + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==" }, "escape-html": { "version": "1.0.3", @@ -1314,7 +1295,7 @@ "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "on-finished": { "version": "2.3.0", @@ -1423,15 +1404,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "requires": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "dependencies": { @@ -1451,9 +1432,9 @@ } }, "socket.io-adapter": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz", - "integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" }, "socket.io-parser": { "version": "4.0.4", diff --git a/utilities/package.json b/utilities/package.json index 73b92d0f2..cb6fb2b65 100644 --- a/utilities/package.json +++ b/utilities/package.json @@ -20,7 +20,7 @@ "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "express": "^4.17.1", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2" } } From ded2d980fe0a6505bba7b5b4ae98f9514da1dd36 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 18:01:52 +0200 Subject: [PATCH 093/221] feat(assist): assist refactored --- utilities/servers/websocket.js | 18 +++--------------- utilities/utils/helper.js | 17 ++++++++++++++++- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 799304f20..8ef276939 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession, objectToObjectOfArrays} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -34,20 +34,8 @@ const extractFiltersFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.userID = [req.query.userId]; } - filters = {...filters, ...req.body}; - let _filters = {} - for (let k of Object.keys(filters)) { - if (filters[k] !== undefined && filters[k] !== null) { - _filters[k] = filters[k]; - if (!Array.isArray(_filters[k])) { - _filters[k] = [_filters[k]]; - } - for (let i = 0; i < _filters[k].length; i++) { - _filters[k][i] = String(_filters[k][i]); - } - } - } - return Object.keys(_filters).length > 0 ? _filters : undefined; + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; } const extractProjectKeyFromRequest = function (req) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 070463e00..a874efa65 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -51,6 +51,21 @@ const isValidSession = function (sessionInfo, filters) { const hasFilters = function (filters) { return filters !== undefined && Object.keys(filters).length > 0; } +const objectToObjectOfArrays = function (obj) { + let _obj = {} + for (let k of Object.keys(obj)) { + if (obj[k] !== undefined && obj[k] !== null) { + _obj[k] = obj[k]; + if (!Array.isArray(_obj[k])) { + _obj[k] = [_obj[k]]; + } + for (let i = 0; i < _obj[k].length; i++) { + _obj[k][i] = String(_obj[k][i]); + } + } + } + return _obj; +} module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters + extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays }; \ No newline at end of file From 1462f909258dd5bc19b8cb699bc0cc452c702bec Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 19:37:04 +0200 Subject: [PATCH 094/221] feat(assist): EE assist search --- ee/utilities/.gitignore | 5 ++- ee/utilities/server.js | 7 ++- ee/utilities/servers/websocket-cluster.js | 46 +++++++++----------- ee/utilities/servers/websocket.js | 47 +++++++++----------- ee/utilities/utils/helper-ee.js | 53 +++++++++++++++++++++++ 5 files changed, 102 insertions(+), 56 deletions(-) create mode 100644 ee/utilities/utils/helper-ee.js diff --git a/ee/utilities/.gitignore b/ee/utilities/.gitignore index 0aaf625c9..f54e439ba 100644 --- a/ee/utilities/.gitignore +++ b/ee/utilities/.gitignore @@ -10,6 +10,7 @@ build.sh servers/peerjs-server.js servers/sourcemaps-handler.js servers/sourcemaps-server.js -#servers/websocket.js -/utils /Dockerfile +/utils/geoIP.js +/utils/HeapSnapshot.js +/utils/helper.js diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 429b37c25..fc319d79c 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -16,8 +16,9 @@ const PREFIX = process.env.prefix || `/assist` if (process.env.uws !== "true") { let wsapp = express(); + wsapp.use(express.json()); + wsapp.use(express.urlencoded({extended: true})); wsapp.use(request_logger("[wsapp]")); - wsapp.use(request_logger("[app]")); wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => { res.statusCode = 200; res.end("ok!"); @@ -73,10 +74,14 @@ if (process.env.uws !== "true") { } } uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 0b8a56699..4b3cb0a42 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,7 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractFiltersFromRequest} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); @@ -59,19 +60,6 @@ const uniqueSessions = function (data) { return resArr; } -const extractUserIdFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getQuery("userId")) { - debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - return req.getQuery("userId"); - } - } else if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; - } - return undefined; -} - const extractProjectKeyFromRequest = function (req) { if (process.env.uws === "true") { if (req.getParameter(0)) { @@ -103,7 +91,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); @@ -111,10 +99,11 @@ const socketsList = async function (req, res) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -126,21 +115,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -152,10 +143,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -165,8 +157,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -180,11 +172,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -194,8 +187,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -209,6 +202,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 51fa4cc41..63f38b94e 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,7 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractFiltersFromRequest} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -42,19 +43,6 @@ const createSocketIOServer = function (server, prefix) { } } -const extractUserIdFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getQuery("userId")) { - debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - return req.getQuery("userId"); - } - } else if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; - } - return undefined; -} - const extractProjectKeyFromRequest = function (req) { if (process.env.uws === "true") { if (req.getParameter(0)) { @@ -86,18 +74,18 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); - + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -109,21 +97,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -135,10 +125,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -148,8 +139,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -162,11 +153,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -176,8 +168,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -190,6 +182,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js new file mode 100644 index 000000000..522158a01 --- /dev/null +++ b/ee/utilities/utils/helper-ee.js @@ -0,0 +1,53 @@ +const {objectToObjectOfArrays} = require('./helper'); +const getBodyFromUWSResponse = async function (res) { + return new Promise(((resolve, reject) => { + let buffer; + res.onData((ab, isLast) => { + let chunk = Buffer.from(ab); + if (buffer) { + buffer = Buffer.concat([buffer, chunk]); + } else { + buffer = Buffer.concat([chunk]); + } + if (isLast) { + let json; + try { + json = JSON.parse(buffer); + } catch (e) { + console.error(e); + /* res.close calls onAborted */ + try { + res.close(); + } catch (e2) { + console.error(e2); + } + json = {}; + } + resolve(json); + } + }); + })); +} +const extractFiltersFromRequest = async function (req, res) { + let filters = {}; + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + filters.userID = [req.getQuery("userId")]; + } + + let body = await getBodyFromUWSResponse(res); + filters = {...filters, ...body}; + } else { + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + filters.userID = [req.query.userId]; + } + filters = {...filters, ...req.body}; + } + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; +} +module.exports = { + extractFiltersFromRequest +}; \ No newline at end of file From 971dbd40a4c2a7364a33e45fc478aada6b99375c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 19:42:16 +0200 Subject: [PATCH 095/221] feat(assist): assist refactored --- ee/utilities/clean.sh | 8 ++++++++ ee/utilities/prepare-dev.sh | 2 ++ ee/utilities/utils/helper-ee.js | 10 +++------- utilities/servers/websocket.js | 12 +----------- utilities/utils/helper.js | 11 ++++++++++- 5 files changed, 24 insertions(+), 19 deletions(-) create mode 100755 ee/utilities/clean.sh create mode 100755 ee/utilities/prepare-dev.sh diff --git a/ee/utilities/clean.sh b/ee/utilities/clean.sh new file mode 100755 index 000000000..3e8ec080b --- /dev/null +++ b/ee/utilities/clean.sh @@ -0,0 +1,8 @@ +rm -rf ./utils/geoIP.js +rm -rf ./utils/HeapSnapshot.js +rm -rf ./utils/helper.js + +rm -rf servers/peerjs-server.js +rm -rf servers/sourcemaps-handler.js +rm -rf servers/sourcemaps-server.js +rm -rf build.sh \ No newline at end of file diff --git a/ee/utilities/prepare-dev.sh b/ee/utilities/prepare-dev.sh new file mode 100755 index 000000000..2daecbfc1 --- /dev/null +++ b/ee/utilities/prepare-dev.sh @@ -0,0 +1,2 @@ +#!/bin/bash +rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./ \ No newline at end of file diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 522158a01..7853d67ad 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -1,4 +1,4 @@ -const {objectToObjectOfArrays} = require('./helper'); +const helper = require('./helper'); const getBodyFromUWSResponse = async function (res) { return new Promise(((resolve, reject) => { let buffer; @@ -39,13 +39,9 @@ const extractFiltersFromRequest = async function (req, res) { let body = await getBodyFromUWSResponse(res); filters = {...filters, ...body}; } else { - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; - } - filters = {...filters, ...req.body}; + return helper.extractFiltersFromRequest(req); } - filters = objectToObjectOfArrays({...filters, ...req.body}); + filters = helper.objectToObjectOfArrays({...filters, ...req.body}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 8ef276939..5658bbd57 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, objectToObjectOfArrays} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession, extractFiltersFromRequest} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,16 +28,6 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractFiltersFromRequest = function (req) { - let filters = {}; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; - } - filters = objectToObjectOfArrays({...filters, ...req.body}); - return Object.keys(filters).length > 0 ? filters : undefined; -} - const extractProjectKeyFromRequest = function (req) { if (req.params.projectKey) { debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index a874efa65..54fbfd8ef 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -66,6 +66,15 @@ const objectToObjectOfArrays = function (obj) { } return _obj; } +const extractFiltersFromRequest = function (req) { + let filters = {}; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + filters.userID = [req.query.userId]; + } + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; +} module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays + extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays, extractFiltersFromRequest }; \ No newline at end of file From a2ec909acebd5c3804c9a042ba193129d4cb2c61 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 20:09:36 +0200 Subject: [PATCH 096/221] feat(api): search live sessions --- api/chalicelib/core/assist.py | 35 ++++++++++++++++++++++++++--------- api/routers/core.py | 13 ++++++++++--- api/schemas.py | 16 ++++++++++++++++ 3 files changed, 52 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index b2926fd0c..e656c0728 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,6 +1,7 @@ import requests from decouple import config +import schemas from chalicelib.core import projects SESSION_PROJECTION_COLS = """s.project_id, @@ -19,14 +20,29 @@ SESSION_PROJECTION_COLS = """s.project_id, """ -def get_live_sessions_ws(project_id, user_id=None): +def get_live_sessions_ws_user_id(project_id, user_id): + data = { + "filter": {"userId": user_id} + } + return __get_live_sessions_ws(project_id=project_id, data=data) + + +def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema): + data = { + "filter": {}, + "pagination": {"limit": body.limit, "page": body.page}, + "sort": {"key": body.sort, "order": body.order} + } + for f in body.filters: + data["filter"][f.type] = f.value + return __get_live_sessions_ws(project_id=project_id, data=data) + + +def __get_live_sessions_ws(project_id, data): project_key = projects.get_project_key(project_id) - params = {} - if user_id and len(user_id) > 0: - params["userId"] = user_id try: - connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params, - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.post(config("assist") % config("S3_KEY") + f"/{project_key}", json=data, + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) @@ -53,7 +69,7 @@ def get_live_sessions_ws(project_id, user_id=None): def get_live_session_by_id(project_id, session_id): - all_live = get_live_sessions_ws(project_id) + all_live = __get_live_sessions_ws(project_id, data={"filter": {"sessionId": session_id}}) for l in all_live: if str(l.get("sessionID")) == str(session_id): return l @@ -64,8 +80,9 @@ def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) try: - connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}", - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.post(config("assistList") % config("S3_KEY") + f"/{project_key}", + json={"filter": {"sessionId": session_id}}, + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/api/routers/core.py b/api/routers/core.py index 3f3d91e80..7ad57334e 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,4 +1,4 @@ -from typing import Union +from typing import Union, Optional from decouple import config from fastapi import Depends, Body, BackgroundTasks, HTTPException @@ -773,7 +773,7 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. @app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) if issue is None: return {"errors": ["issue not found"]} @@ -859,7 +859,14 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/assist/sessions', tags=["assist"]) def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId, user_id=userId) + data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId) + return {'data': data} + + +@app.post('/{projectId}/assist/sessions', tags=["assist"]) +def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions_ws(projectId, body=data) return {'data': data} diff --git a/api/schemas.py b/api/schemas.py index 715bf0f84..3fb9a6805 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1008,3 +1008,19 @@ class CustomMetricAndTemplate(BaseModel): class Config: alias_generator = attribute_to_camel_case + + +class LiveSessionsSearchPayloadSchema(_PaginatedSchema): + filters: List[SessionSearchFilterSchema] = Field([]) + sort: str = Field(default="startTs") + order: SortOrderType = Field(default=SortOrderType.desc) + group_by_user: bool = Field(default=False) + + @root_validator(pre=True) + def transform_order(cls, values): + if values.get("order") is not None: + values["order"] = values["order"].upper() + return values + + class Config: + alias_generator = attribute_to_camel_case \ No newline at end of file From b85f2abfd5784e9bf89d19594f7cded8955413fc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 20:12:03 +0200 Subject: [PATCH 097/221] feat(assist): assist changed search payload --- ee/utilities/utils/helper-ee.js | 4 ++-- utilities/utils/helper.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 7853d67ad..18fca5fe4 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -37,11 +37,11 @@ const extractFiltersFromRequest = async function (req, res) { } let body = await getBodyFromUWSResponse(res); - filters = {...filters, ...body}; + filters = {...filters, ...(body.filter || {})}; } else { return helper.extractFiltersFromRequest(req); } - filters = helper.objectToObjectOfArrays({...filters, ...req.body}); + filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 54fbfd8ef..531cf9f64 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -72,7 +72,7 @@ const extractFiltersFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.userID = [req.query.userId]; } - filters = objectToObjectOfArrays({...filters, ...req.body}); + filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { From d6070d18297caff89b33d13b66bdd029e98f5a74 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 15:05:41 +0200 Subject: [PATCH 098/221] feat(api): optimized live session check feat(assist): optimized live session check feat(assist): sort feat(assist): pagination --- api/chalicelib/core/assist.py | 5 +-- ee/utilities/server.js | 2 + ee/utilities/servers/websocket-cluster.js | 36 +++++++-------- ee/utilities/servers/websocket.js | 38 +++++++--------- ee/utilities/utils/helper-ee.js | 24 ++++++++++ utilities/servers/websocket.js | 36 ++++++++------- utilities/utils/helper.js | 55 ++++++++++++++++++++--- 7 files changed, 127 insertions(+), 69 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index e656c0728..f647e95f1 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -80,9 +80,8 @@ def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) try: - connected_peers = requests.post(config("assistList") % config("S3_KEY") + f"/{project_key}", - json={"filter": {"sessionId": session_id}}, - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index fc319d79c..327a664a0 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -77,11 +77,13 @@ if (process.env.uws !== "true") { uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 4b3cb0a42..57ba2ab6c 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,8 +1,12 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); -const {extractFiltersFromRequest} = require('../utils/helper-ee'); +const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + extractFiltersFromRequest +} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); @@ -60,20 +64,6 @@ const uniqueSessions = function (data) { return resArr; } -const extractProjectKeyFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getParameter(0)) { - debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); - return req.getParameter(0); - } - } else if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - - const getAvailableRooms = async function () { return io.of('/').adapter.allRooms(); } @@ -120,12 +110,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -144,6 +135,7 @@ const socketsListByProject = async function (req, res) { } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -169,7 +161,7 @@ const socketsLive = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -177,12 +169,13 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -199,10 +192,11 @@ const socketsLiveByProject = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []); } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 63f38b94e..8c34bd91a 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,8 +1,12 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); -const {extractFiltersFromRequest} = require('../utils/helper-ee'); +const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + extractFiltersFromRequest +} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -43,20 +47,6 @@ const createSocketIOServer = function (server, prefix) { } } -const extractProjectKeyFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getParameter(0)) { - debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); - return req.getParameter(0); - } - } else if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - - const getAvailableRooms = async function () { return io.sockets.adapter.rooms.keys(); } @@ -102,12 +92,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -122,10 +113,11 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -150,7 +142,7 @@ const socketsLive = async function (req, res) { } } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -158,12 +150,13 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -179,10 +172,11 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 18fca5fe4..2ea57a421 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -28,6 +28,28 @@ const getBodyFromUWSResponse = async function (res) { }); })); } +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else { + return helper.extractProjectKeyFromRequest(req); + } + return undefined; +} +const extractSessionIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(1)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(1)}`); + return req.getParameter(1); + } + } else { + return helper.extractSessionIdFromRequest(req); + } + return undefined; +} const extractFiltersFromRequest = async function (req, res) { let filters = {}; if (process.env.uws === "true") { @@ -45,5 +67,7 @@ const extractFiltersFromRequest = async function (req, res) { return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, extractFiltersFromRequest }; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 5658bbd57..27e8fba4a 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,15 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, extractFiltersFromRequest} = require('../utils/helper'); +const { + extractPeerId, + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + hasFilters, + isValidSession, + extractPayloadFromRequest, + sortPaginate +} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,14 +36,6 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractProjectKeyFromRequest = function (req) { - if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - const getAvailableRooms = async function () { return io.sockets.adapter.rooms.keys(); @@ -49,7 +49,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -60,7 +60,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -77,12 +77,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractFiltersFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -100,11 +101,12 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -125,7 +127,7 @@ const socketsLive = async function (req, res) { } } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -133,7 +135,7 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -154,7 +156,7 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 531cf9f64..f47a7f540 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -24,6 +24,20 @@ const request_logger = (identity) => { next(); } }; +const extractProjectKeyFromRequest = function (req) { + if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} +const extractSessionIdFromRequest = function (req) { + if (req.params.sessionId) { + debug && console.log(`[WS]where sessionId=${req.params.sessionId}`); + return req.params.sessionId; + } + return undefined; +} const isValidSession = function (sessionInfo, filters) { let foundAll = true; for (const [key, values] of Object.entries(filters)) { @@ -49,7 +63,7 @@ const isValidSession = function (sessionInfo, filters) { return foundAll; } const hasFilters = function (filters) { - return filters !== undefined && Object.keys(filters).length > 0; + return filters && filters.filter && Object.keys(filters.filter).length > 0; } const objectToObjectOfArrays = function (obj) { let _obj = {} @@ -66,15 +80,44 @@ const objectToObjectOfArrays = function (obj) { } return _obj; } -const extractFiltersFromRequest = function (req) { - let filters = {}; +const extractPayloadFromRequest = function (req) { + let filters = { + "filter": {}, + "sort": {"key": undefined, "order": false}, + "pagination": {"limit": undefined, "page": undefined} + }; if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; + filters.filter.userID = [req.query.userId]; } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); - return Object.keys(filters).length > 0 ? filters : undefined; + return filters; +} +const sortPaginate = function (list, filters) { + list.sort((a, b) => { + let aV = (a[filters.sort.key] || a["timestamp"]); + let bV = (b[filters.sort.key] || b["timestamp"]); + return aV > bV ? 1 : aV < bV ? -1 : 0; + }) + + if (filters.sort.order) { + list.reverse(); + } + + if (filters.pagination.page && filters.pagination.limit) { + return list.slice((filters.pagination.page - 1) * filters.pagination.limit, + filters.pagination.page * filters.pagination.limit); + } + return list; } module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays, extractFiltersFromRequest + extractPeerId, + request_logger, + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + isValidSession, + hasFilters, + objectToObjectOfArrays, + extractPayloadFromRequest, + sortPaginate }; \ No newline at end of file From 8510949d29e96d76883d5b7ab09f6f16006d16d3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 16:03:37 +0200 Subject: [PATCH 099/221] feat(assist): sessions search handle nested objects --- utilities/utils/helper.js | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index f47a7f540..10c03c830 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -43,15 +43,22 @@ const isValidSession = function (sessionInfo, filters) { for (const [key, values] of Object.entries(filters)) { let found = false; for (const [skey, svalue] of Object.entries(sessionInfo)) { - if (skey.toLowerCase() === key.toLowerCase()) { - for (let v of values) { - if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (svalue !== undefined && svalue !== null) { + if (svalue.constructor === Object) { + if (isValidSession(svalue, {key: values})) { found = true; break; } - } - if (found) { - break; + } else if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } } } } @@ -82,10 +89,19 @@ const objectToObjectOfArrays = function (obj) { } const extractPayloadFromRequest = function (req) { let filters = { + "query": {}, "filter": {}, "sort": {"key": undefined, "order": false}, "pagination": {"limit": undefined, "page": undefined} }; + if (req.query.q) { + debug && console.log(`[WS]where q=${req.query.q}`); + filters.query.value = [req.query.q]; + } + if (req.query.key) { + debug && console.log(`[WS]where key=${req.query.key}`); + filters.query.key = [req.query.key]; + } if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; From 38be0856225ce223765af0b507fc39bcf1417db8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 17:15:02 +0200 Subject: [PATCH 100/221] feat(assist): autocomplete --- ee/utilities/server.js | 1 + ee/utilities/servers/websocket-cluster.js | 33 +++++++++++++++++---- ee/utilities/servers/websocket.js | 36 +++++++++++++++++++---- ee/utilities/utils/helper-ee.js | 4 +-- utilities/servers/websocket.js | 23 +++++++++++++++ utilities/utils/helper.js | 18 ++++++++++++ 6 files changed, 102 insertions(+), 13 deletions(-) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 327a664a0..480a2b27e 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -84,6 +84,7 @@ if (process.env.uws !== "true") { uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 57ba2ab6c..6dd69a4bc 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -5,7 +5,7 @@ const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../ut const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); @@ -81,7 +81,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); @@ -111,7 +111,7 @@ const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -139,7 +139,7 @@ wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -170,7 +170,7 @@ const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -198,6 +198,29 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 8c34bd91a..dac389fa8 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -5,7 +5,7 @@ const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../ut const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -64,7 +64,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -93,7 +93,7 @@ const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -121,7 +121,7 @@ wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -151,7 +151,7 @@ const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -178,6 +178,29 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -361,6 +384,7 @@ module.exports = { socketsList, socketsListByProject, socketsLive, - socketsLiveByProject + socketsLiveByProject, + autocomplete } }; \ No newline at end of file diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 2ea57a421..b29fbffef 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -50,7 +50,7 @@ const extractSessionIdFromRequest = function (req) { } return undefined; } -const extractFiltersFromRequest = async function (req, res) { +const extractPayloadFromRequest = async function (req, res) { let filters = {}; if (process.env.uws === "true") { if (req.getQuery("userId")) { @@ -69,5 +69,5 @@ const extractFiltersFromRequest = async function (req, res) { module.exports = { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest }; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 27e8fba4a..3587d9ad8 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -161,6 +161,29 @@ const socketsLiveByProject = async function (req, res) { wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 10c03c830..ff7d560f7 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -69,6 +69,23 @@ const isValidSession = function (sessionInfo, filters) { } return foundAll; } +const getValidAttributes = function (sessionInfo, query) { + let matches = []; + let deduplicate = []; + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (svalue !== undefined && svalue !== null) { + if (svalue.constructor === Object) { + matches = [...matches, ...getValidAttributes(svalue, query)] + } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) + && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 + && deduplicate.indexOf(skey + '_' + svalue) < 0) { + matches.push({"type": skey, "value": svalue}); + deduplicate.push(skey + '_' + svalue); + } + } + } + return matches; +} const hasFilters = function (filters) { return filters && filters.filter && Object.keys(filters.filter).length > 0; } @@ -129,6 +146,7 @@ const sortPaginate = function (list, filters) { module.exports = { extractPeerId, request_logger, + getValidAttributes, extractProjectKeyFromRequest, extractSessionIdFromRequest, isValidSession, From c53ecbef000d3eda02d2df44ee1eab4fdf8cc55a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 17:22:43 +0200 Subject: [PATCH 101/221] feat(api): assist autocomplete --- api/chalicelib/core/assist.py | 28 ++++++++++++++++++++++++++++ api/routers/core.py | 6 ++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index f647e95f1..1804da669 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -102,6 +102,34 @@ def is_live(project_id, session_id, project_key=None): return str(session_id) in connected_peers +def autocomplete(project_id, q: str, key: str = None): + project_key = projects.get_project_key(project_id) + params = {"q": q} + if key: + params["key"] = key + try: + results = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete", + params=params, timeout=config("assistTimeout", cast=int, default=5)) + if results.status_code != 200: + print("!! issue with the peer-server") + print(results.text) + return {"errors": [f"Something went wrong wile calling assist:{results.text}"]} + results = results.json().get("data", []) + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + return {"errors": ["Assist request timeout"]} + except Exception as e: + print("issue getting Assist response") + print(str(e)) + print("expected JSON, received:") + try: + print(results.text) + except: + print("couldn't get response") + return {"errors": ["Something went wrong wile calling assist"]} + return results + + def get_ice_servers(): return config("iceServers") if config("iceServers", default=None) is not None \ and len(config("iceServers")) > 0 else None diff --git a/api/routers/core.py b/api/routers/core.py index 7ad57334e..2ac949057 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -136,10 +136,12 @@ def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType, schemas.PerformanceEventType, schemas.FetchFilterType, schemas.GraphqlFilterType] = None, - key: str = None, - source: str = None, context: schemas.CurrentContext = Depends(OR_context)): + key: str = None, source: str = None, live: bool = False, + context: schemas.CurrentContext = Depends(OR_context)): if len(q) == 0: return {"data": []} + if live: + return assist.autocomplete(project_id=projectId, q=q, key=key) if type in [schemas.FetchFilterType._url]: type = schemas.EventType.request elif type in [schemas.GraphqlFilterType._name]: From cf80c46cd96c4afb08f92c2a8192d4a77acbc493 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 18:45:31 +0200 Subject: [PATCH 102/221] feat(assist): payload extraction debug --- ee/utilities/utils/helper-ee.js | 2 ++ utilities/utils/helper.js | 2 ++ 2 files changed, 4 insertions(+) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index b29fbffef..273212954 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -64,6 +64,8 @@ const extractPayloadFromRequest = async function (req, res) { return helper.extractFiltersFromRequest(req); } filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + debug && console.log("payload/filters:") + debug && console.log(filters) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index ff7d560f7..ae26b228b 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -124,6 +124,8 @@ const extractPayloadFromRequest = function (req) { filters.filter.userID = [req.query.userId]; } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + debug && console.log("payload/filters:") + debug && console.log(filters) return filters; } const sortPaginate = function (list, filters) { From dd2c51e3b638e3e9aad73a682ff3f111b3c1e94c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 19:04:43 +0200 Subject: [PATCH 103/221] feat(assist): changed debug --- ee/utilities/utils/helper-ee.js | 2 +- utilities/utils/helper.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 273212954..6ae4039bb 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -65,7 +65,7 @@ const extractPayloadFromRequest = async function (req, res) { } filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); debug && console.log("payload/filters:") - debug && console.log(filters) + debug && console.log(JSON.stringify(filters)) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index ae26b228b..2e4a327a2 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -125,7 +125,7 @@ const extractPayloadFromRequest = function (req) { } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); debug && console.log("payload/filters:") - debug && console.log(filters) + debug && console.log(JSON.stringify(filters)) return filters; } const sortPaginate = function (list, filters) { From 31a53edd5ad4add997f26a0af5466cd9c0b3a94c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 19:25:50 +0200 Subject: [PATCH 104/221] feat(api): changed assist search payload --- api/schemas.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/api/schemas.py b/api/schemas.py index 3fb9a6805..77a5db26d 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1010,11 +1010,34 @@ class CustomMetricAndTemplate(BaseModel): alias_generator = attribute_to_camel_case +class LiveFilterType(str, Enum): + user_os = FilterType.user_os.value + user_browser = FilterType.user_browser.value + user_device = FilterType.user_device.value + user_country = FilterType.user_country.value + user_id = FilterType.user_id.value + user_anonymous_id = FilterType.user_anonymous_id.value + rev_id = FilterType.rev_id.value + page_title = "pageTitle" + # + # platform = "PLATFORM" + # metadata = "METADATA" + # issue = "ISSUE" + # events_count = "EVENTS_COUNT" + # utm_source = "UTM_SOURCE" + # utm_medium = "UTM_MEDIUM" + # utm_campaign = "UTM_CAMPAIGN" + + +class LiveSessionSearchFilterSchema(BaseModel): + value: Union[List[str], str] = Field(...) + type: LiveFilterType = Field(...) + + class LiveSessionsSearchPayloadSchema(_PaginatedSchema): - filters: List[SessionSearchFilterSchema] = Field([]) - sort: str = Field(default="startTs") + filters: List[LiveSessionSearchFilterSchema] = Field([]) + sort: str = Field(default="timestamp") order: SortOrderType = Field(default=SortOrderType.desc) - group_by_user: bool = Field(default=False) @root_validator(pre=True) def transform_order(cls, values): @@ -1023,4 +1046,4 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema): return values class Config: - alias_generator = attribute_to_camel_case \ No newline at end of file + alias_generator = attribute_to_camel_case From 2dbdfade108939dac56c606fdf182ecf0822b599 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 20:24:32 +0200 Subject: [PATCH 105/221] feat(assist): fixed multiple values filter support for search --- utilities/utils/helper.js | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 2e4a327a2..6e4e1cb5b 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -91,14 +91,16 @@ const hasFilters = function (filters) { } const objectToObjectOfArrays = function (obj) { let _obj = {} - for (let k of Object.keys(obj)) { - if (obj[k] !== undefined && obj[k] !== null) { - _obj[k] = obj[k]; - if (!Array.isArray(_obj[k])) { - _obj[k] = [_obj[k]]; - } - for (let i = 0; i < _obj[k].length; i++) { - _obj[k][i] = String(_obj[k][i]); + if (obj) { + for (let k of Object.keys(obj)) { + if (obj[k] !== undefined && obj[k] !== null) { + _obj[k] = obj[k]; + if (!Array.isArray(_obj[k])) { + _obj[k] = [_obj[k]]; + } + for (let i = 0; i < _obj[k].length; i++) { + _obj[k][i] = String(_obj[k][i]); + } } } } @@ -123,7 +125,8 @@ const extractPayloadFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; } - filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + filters.filters = objectToObjectOfArrays(filters.filter); + filters = {...filters, ...(req.body.filter || {})}; debug && console.log("payload/filters:") debug && console.log(JSON.stringify(filters)) return filters; From c6b719b9fa89016e3bd2c9c08878d925de5e0437 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 21:56:59 +0200 Subject: [PATCH 106/221] feat(assist): full search feat(api): live sessions full search --- api/chalicelib/core/assist.py | 5 +++- api/schemas.py | 27 ++++++++++------- ee/utilities/servers/websocket-cluster.js | 8 ++--- ee/utilities/servers/websocket.js | 8 ++--- ee/utilities/utils/helper-ee.js | 34 +++++++++++++++++---- utilities/servers/websocket.js | 6 ++-- utilities/utils/helper.js | 36 +++++++++++++++-------- 7 files changed, 84 insertions(+), 40 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 1804da669..5ff067229 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -34,7 +34,10 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche "sort": {"key": body.sort, "order": body.order} } for f in body.filters: - data["filter"][f.type] = f.value + if f.type == schemas.LiveFilterType.metadata: + data["filter"][f.source] = f.value + else: + data["filter"][f.type.value] = f.value return __get_live_sessions_ws(project_id=project_id, data=data) diff --git a/api/schemas.py b/api/schemas.py index 77a5db26d..45a7bc3d8 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1018,25 +1018,32 @@ class LiveFilterType(str, Enum): user_id = FilterType.user_id.value user_anonymous_id = FilterType.user_anonymous_id.value rev_id = FilterType.rev_id.value - page_title = "pageTitle" - # - # platform = "PLATFORM" - # metadata = "METADATA" - # issue = "ISSUE" - # events_count = "EVENTS_COUNT" - # utm_source = "UTM_SOURCE" - # utm_medium = "UTM_MEDIUM" - # utm_campaign = "UTM_CAMPAIGN" + page_title = "PAGETITLE" + session_id = "SESSIONID" + metadata = "METADATA" + user_UUID = "USERUUID" + tracker_version = "TRACKERVERSION" + user_browser_version = "USERBROWSERVERSION" + user_device_type = "USERDEVICETYPE", + timestamp = "TIMESTAMP" class LiveSessionSearchFilterSchema(BaseModel): value: Union[List[str], str] = Field(...) type: LiveFilterType = Field(...) + source: Optional[str] = Field(None) + + @root_validator + def validator(cls, values): + if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value: + assert values.get("source") is not None, "source should not be null for METADATA type" + assert len(values.get("source")) > 0, "source should not be empty for METADATA type" + return values class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: str = Field(default="timestamp") + sort: LiveFilterType = Field(default=LiveFilterType.timestamp) order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 6dd69a4bc..f414939fe 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -93,7 +93,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -122,7 +122,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -150,7 +150,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -181,7 +181,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index dac389fa8..686b62293 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -75,7 +75,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -104,7 +104,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -132,7 +132,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -162,7 +162,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 6ae4039bb..41fe456cb 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -1,4 +1,5 @@ const helper = require('./helper'); +let debug = process.env.debug === "1" || false; const getBodyFromUWSResponse = async function (res) { return new Promise(((resolve, reject) => { let buffer; @@ -51,21 +52,42 @@ const extractSessionIdFromRequest = function (req) { return undefined; } const extractPayloadFromRequest = async function (req, res) { - let filters = {}; + let filters = { + "query": {}, + "filter": {} + }; if (process.env.uws === "true") { + if (req.getQuery("q")) { + debug && console.log(`[WS]where q=${req.getQuery("q")}`); + filters.query.value = [req.getQuery("q")]; + } + if (req.getQuery("key")) { + debug && console.log(`[WS]where key=${req.getQuery("key")}`); + filters.query.key = [req.getQuery("key")]; + } if (req.getQuery("userId")) { debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); filters.userID = [req.getQuery("userId")]; } let body = await getBodyFromUWSResponse(res); - filters = {...filters, ...(body.filter || {})}; + filters = { + ...filters, + "sort": { + "key": body.sort && body.sort.key ? body.sort.key : undefined, + "order": body.sort && body.sort.order === "DESC" + }, + "pagination": { + "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, + "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + } + } + filters.filter = {...filters.filter, ...(body.filter || {})}; } else { - return helper.extractFiltersFromRequest(req); + return helper.extractPayloadFromRequest(req); } - filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); - debug && console.log("payload/filters:") - debug && console.log(JSON.stringify(filters)) + filters.filter = helper.objectToObjectOfArrays(filters.filter); + debug && console.log("payload/filters:" + JSON.stringify(filters)) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 3587d9ad8..59d221042 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -89,7 +89,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -117,7 +117,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -146,7 +146,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 6e4e1cb5b..de002f89e 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -44,8 +44,8 @@ const isValidSession = function (sessionInfo, filters) { let found = false; for (const [skey, svalue] of Object.entries(sessionInfo)) { if (svalue !== undefined && svalue !== null) { - if (svalue.constructor === Object) { - if (isValidSession(svalue, {key: values})) { + if (typeof (svalue) === "object") { + if (isValidSession(svalue, {[key]: values})) { found = true; break; } @@ -74,7 +74,7 @@ const getValidAttributes = function (sessionInfo, query) { let deduplicate = []; for (const [skey, svalue] of Object.entries(sessionInfo)) { if (svalue !== undefined && svalue !== null) { - if (svalue.constructor === Object) { + if (typeof (svalue) === "object") { matches = [...matches, ...getValidAttributes(svalue, query)] } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 @@ -110,8 +110,14 @@ const extractPayloadFromRequest = function (req) { let filters = { "query": {}, "filter": {}, - "sort": {"key": undefined, "order": false}, - "pagination": {"limit": undefined, "page": undefined} + "sort": { + "key": req.body.sort && req.body.sort.key ? req.body.sort.key : undefined, + "order": req.body.sort && req.body.sort.order === "DESC" + }, + "pagination": { + "limit": req.body.pagination && req.body.pagination.limit ? req.body.pagination.limit : undefined, + "page": req.body.pagination && req.body.pagination.page ? req.body.pagination.page : undefined + } }; if (req.query.q) { debug && console.log(`[WS]where q=${req.query.q}`); @@ -125,17 +131,23 @@ const extractPayloadFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; } - filters.filters = objectToObjectOfArrays(filters.filter); - filters = {...filters, ...(req.body.filter || {})}; - debug && console.log("payload/filters:") - debug && console.log(JSON.stringify(filters)) + filters.filter = objectToObjectOfArrays(filters.filter); + filters.filter = {...filters.filter, ...(req.body.filter || {})}; + debug && console.log("payload/filters:" + JSON.stringify(filters)) return filters; } const sortPaginate = function (list, filters) { + let skey = "timestamp"; + if (list.length > 0 && filters.sort.key) { + for (let key of Object.keys(list[0])) { + if (key.toLowerCase() == filters.sort.key.toLowerCase()) { + skey = key; + break; + } + } + } list.sort((a, b) => { - let aV = (a[filters.sort.key] || a["timestamp"]); - let bV = (b[filters.sort.key] || b["timestamp"]); - return aV > bV ? 1 : aV < bV ? -1 : 0; + return a[skey] > b[skey] ? 1 : a[skey] < b[skey] ? -1 : 0; }) if (filters.sort.order) { From c254aab413c6ceb59e54411b2aea08b146ab2930 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 22:44:41 +0200 Subject: [PATCH 107/221] feat(assist): full autocomplete feat(assist): solved endpoints conflicts feat(api): live sessions full autocomplete --- api/chalicelib/core/assist.py | 2 +- api/or_dependencies.py | 4 ++- ee/utilities/server.js | 3 +- ee/utilities/servers/websocket-cluster.js | 44 ++++++++++++++--------- ee/utilities/servers/websocket.js | 43 +++++++++++++--------- ee/utilities/utils/helper-ee.js | 41 ++++++++++----------- utilities/servers/websocket.js | 38 ++++++++++++-------- utilities/utils/helper.js | 22 +++++++++--- 8 files changed, 122 insertions(+), 75 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 5ff067229..977d98826 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -130,7 +130,7 @@ def autocomplete(project_id, q: str, key: str = None): except: print("couldn't get response") return {"errors": ["Something went wrong wile calling assist"]} - return results + return {"data": results} def get_ice_servers(): diff --git a/api/or_dependencies.py b/api/or_dependencies.py index 7eee72c49..824670687 100644 --- a/api/or_dependencies.py +++ b/api/or_dependencies.py @@ -33,7 +33,9 @@ class ORRoute(APIRoute): if isinstance(response, JSONResponse): response: JSONResponse = response body = json.loads(response.body.decode('utf8')) - if response.status_code == 200 and body is not None and body.get("errors") is not None: + if response.status_code == 200 \ + and body is not None and isinstance(body, dict) \ + and body.get("errors") is not None: if "not found" in body["errors"][0]: response.status_code = status.HTTP_404_NOT_FOUND else: diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 480a2b27e..93d6d2a2e 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -75,16 +75,17 @@ if (process.env.uws !== "true") { } uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); - uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index f414939fe..2062e5794 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,7 +1,14 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractPeerId, + hasFilters, + isValidSession, + sortPaginate, + getValidAttributes, + uniqueAutocomplete +} = require('../utils/helper'); const { extractProjectKeyFromRequest, extractSessionIdFromRequest, @@ -104,8 +111,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -133,9 +138,6 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -163,8 +165,6 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -194,14 +194,11 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractPayloadFromRequest(req); + let filters = await extractPayloadFromRequest(req); let results = []; if (filters.query && Object.keys(filters.query).length > 0) { let rooms = await getAvailableRooms(); @@ -217,9 +214,8 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -292,6 +288,21 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + + module.exports = { wsRouter, start: (server, prefix) => { @@ -420,6 +431,7 @@ module.exports = { socketsList, socketsListByProject, socketsLive, - socketsLiveByProject + socketsLiveByProject, + autocomplete } }; \ No newline at end of file diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 686b62293..02267fb66 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,11 +1,18 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractPeerId, + hasFilters, + isValidSession, + sortPaginate, + getValidAttributes, + uniqueAutocomplete +} = require('../utils/helper'); const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractPayloadFromRequest + extractPayloadFromRequest, } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -86,8 +93,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -115,9 +120,6 @@ const socketsListByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -144,8 +146,6 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -174,14 +174,11 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractPayloadFromRequest(req); + let filters = await extractPayloadFromRequest(req); let results = []; if (filters.query && Object.keys(filters.query).length > 0) { let rooms = await getAvailableRooms(); @@ -197,9 +194,8 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -270,6 +266,21 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + + module.exports = { wsRouter, start: (server, prefix) => { diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 41fe456cb..dc821b94a 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -17,11 +17,11 @@ const getBodyFromUWSResponse = async function (res) { } catch (e) { console.error(e); /* res.close calls onAborted */ - try { - res.close(); - } catch (e2) { - console.error(e2); - } + // try { + // res.close(); + // } catch (e2) { + // console.error(e2); + // } json = {}; } resolve(json); @@ -59,30 +59,31 @@ const extractPayloadFromRequest = async function (req, res) { if (process.env.uws === "true") { if (req.getQuery("q")) { debug && console.log(`[WS]where q=${req.getQuery("q")}`); - filters.query.value = [req.getQuery("q")]; + filters.query.value = req.getQuery("q"); } if (req.getQuery("key")) { debug && console.log(`[WS]where key=${req.getQuery("key")}`); - filters.query.key = [req.getQuery("key")]; + filters.query.key = req.getQuery("key"); } if (req.getQuery("userId")) { debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - filters.userID = [req.getQuery("userId")]; + filters.filter.userID = [req.getQuery("userId")]; } - - let body = await getBodyFromUWSResponse(res); - filters = { - ...filters, - "sort": { - "key": body.sort && body.sort.key ? body.sort.key : undefined, - "order": body.sort && body.sort.order === "DESC" - }, - "pagination": { - "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, - "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + if (!filters.query.value) { + let body = await getBodyFromUWSResponse(res); + filters = { + ...filters, + "sort": { + "key": body.sort && body.sort.key ? body.sort.key : undefined, + "order": body.sort && body.sort.order === "DESC" + }, + "pagination": { + "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, + "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + } } + filters.filter = {...filters.filter, ...(body.filter || {})}; } - filters.filter = {...filters.filter, ...(body.filter || {})}; } else { return helper.extractPayloadFromRequest(req); } diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 59d221042..4feac9f3f 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -8,7 +8,9 @@ const { hasFilters, isValidSession, extractPayloadFromRequest, - sortPaginate + sortPaginate, + getValidAttributes, + uniqueAutocomplete } = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -71,8 +73,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -100,9 +100,6 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -129,18 +126,17 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -158,11 +154,9 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); let filters = extractPayloadFromRequest(req); let results = []; @@ -180,9 +174,9 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -253,6 +247,20 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + module.exports = { wsRouter, start: (server, prefix) => { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index de002f89e..32232d36d 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -51,7 +51,7 @@ const isValidSession = function (sessionInfo, filters) { } } else if (skey.toLowerCase() === key.toLowerCase()) { for (let v of values) { - if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { found = true; break; } @@ -77,7 +77,7 @@ const getValidAttributes = function (sessionInfo, query) { if (typeof (svalue) === "object") { matches = [...matches, ...getValidAttributes(svalue, query)] } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) - && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 + && String(svalue).toLowerCase().indexOf(query.value.toLowerCase()) >= 0 && deduplicate.indexOf(skey + '_' + svalue) < 0) { matches.push({"type": skey, "value": svalue}); deduplicate.push(skey + '_' + svalue); @@ -121,11 +121,11 @@ const extractPayloadFromRequest = function (req) { }; if (req.query.q) { debug && console.log(`[WS]where q=${req.query.q}`); - filters.query.value = [req.query.q]; + filters.query.value = req.query.q; } if (req.query.key) { debug && console.log(`[WS]where key=${req.query.key}`); - filters.query.key = [req.query.key]; + filters.query.key = req.query.key; } if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); @@ -160,6 +160,17 @@ const sortPaginate = function (list, filters) { } return list; } +const uniqueAutocomplete = function (list) { + let _list = []; + let deduplicate = []; + for (let e of list) { + if (deduplicate.indexOf(e.type + "_" + e.value) < 0) { + _list.push(e); + deduplicate.push(e.type + "_" + e.value) + } + } + return _list; +} module.exports = { extractPeerId, request_logger, @@ -170,5 +181,6 @@ module.exports = { hasFilters, objectToObjectOfArrays, extractPayloadFromRequest, - sortPaginate + sortPaginate, + uniqueAutocomplete }; \ No newline at end of file From fe6a50dc2cda40089be1774fd5c3ee758a097c4b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 11:53:49 +0200 Subject: [PATCH 108/221] feat(assist): changed pagination response feat(assist): allow nested-key sort feat(api): support new live sessions pagination response --- api/chalicelib/core/assist.py | 7 ++++--- utilities/utils/helper.js | 33 ++++++++++++++++++++++----------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 977d98826..5cc2d70db 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -63,11 +63,12 @@ def __get_live_sessions_ws(project_id, data): except: print("couldn't get response") live_peers = [] - - for s in live_peers: + _live_peers = live_peers + if "sessions" in live_peers: + _live_peers = live_peers["sessions"] + for s in _live_peers: s["live"] = True s["projectId"] = project_id - live_peers = sorted(live_peers, key=lambda l: l.get("timestamp", 0), reverse=True) return live_peers diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 32232d36d..854f491a9 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -136,29 +136,40 @@ const extractPayloadFromRequest = function (req) { debug && console.log("payload/filters:" + JSON.stringify(filters)) return filters; } -const sortPaginate = function (list, filters) { - let skey = "timestamp"; - if (list.length > 0 && filters.sort.key) { - for (let key of Object.keys(list[0])) { - if (key.toLowerCase() == filters.sort.key.toLowerCase()) { - skey = key; - break; +const getValue = function (obj, key) { + if (obj !== undefined && obj !== null) { + let val; + for (let k of Object.keys(obj)) { + if (typeof (obj[k]) === "object") { + val = getValue(obj[k], key); + } else if (k.toLowerCase() === key.toLowerCase()) { + val = obj[k]; + } + + if (val !== undefined) { + return val; } } } + return undefined; +} +const sortPaginate = function (list, filters) { + const total = list.length; list.sort((a, b) => { - return a[skey] > b[skey] ? 1 : a[skey] < b[skey] ? -1 : 0; - }) + const vA = getValue(a, filters.sort.key || "timestamp"); + const vB = getValue(b, filters.sort.key || "timestamp"); + return vA > vB ? 1 : vA < vB ? -1 : 0; + }); if (filters.sort.order) { list.reverse(); } if (filters.pagination.page && filters.pagination.limit) { - return list.slice((filters.pagination.page - 1) * filters.pagination.limit, + list = list.slice((filters.pagination.page - 1) * filters.pagination.limit, filters.pagination.page * filters.pagination.limit); } - return list; + return {"total": total, "sessions": list}; } const uniqueAutocomplete = function (list) { let _list = []; From 96bf84b5676e002419b9fdffa4418484b787b9a4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 12:27:51 +0200 Subject: [PATCH 109/221] feat(api): support nested-key-sort for live sessions --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 45a7bc3d8..2fdcae4c3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1043,7 +1043,7 @@ class LiveSessionSearchFilterSchema(BaseModel): class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: LiveFilterType = Field(default=LiveFilterType.timestamp) + sort: Union[LiveFilterType, str] = Field(default=LiveFilterType.timestamp) order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) From a3aa176e67f6d2da5723d63c28f7111561486c4a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 14:02:20 +0200 Subject: [PATCH 110/221] feat(assist): support null&empty values for search feat(assist): changed single-session search feat(api): support null&empty values for live sessions search feat(api): support key-mapping for different names feat(api): support platform live-sessions search --- api/chalicelib/core/assist.py | 43 ++++++++++++++++------- api/schemas.py | 19 ++++++++-- ee/utilities/servers/websocket-cluster.js | 10 ++++-- ee/utilities/servers/websocket.js | 10 ++++-- utilities/servers/websocket.js | 10 ++++-- utilities/utils/helper.js | 28 ++++++++------- 6 files changed, 85 insertions(+), 35 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 5cc2d70db..bfacd9295 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -22,7 +22,7 @@ SESSION_PROJECTION_COLS = """s.project_id, def get_live_sessions_ws_user_id(project_id, user_id): data = { - "filter": {"userId": user_id} + "filter": {"userId": user_id} if user_id else {} } return __get_live_sessions_ws(project_id=project_id, data=data) @@ -73,18 +73,9 @@ def __get_live_sessions_ws(project_id, data): def get_live_session_by_id(project_id, session_id): - all_live = __get_live_sessions_ws(project_id, data={"filter": {"sessionId": session_id}}) - for l in all_live: - if str(l.get("sessionID")) == str(session_id): - return l - return None - - -def is_live(project_id, session_id, project_key=None): - if project_key is None: - project_key = projects.get_project_key(project_id) + project_key = projects.get_project_key(project_id) try: - connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}", timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") @@ -103,7 +94,33 @@ def is_live(project_id, session_id, project_key=None): except: print("couldn't get response") return False - return str(session_id) in connected_peers + return connected_peers + + +def is_live(project_id, session_id, project_key=None): + if project_key is None: + project_key = projects.get_project_key(project_id) + try: + connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + timeout=config("assistTimeout", cast=int, default=5)) + if connected_peers.status_code != 200: + print("!! issue with the peer-server") + print(connected_peers.text) + return False + connected_peers = connected_peers.json().get("data") + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + return False + except Exception as e: + print("issue getting Assist response") + print(str(e)) + print("expected JSON, received:") + try: + print(connected_peers.text) + except: + print("couldn't get response") + return False + return str(session_id) == connected_peers def autocomplete(project_id, q: str, key: str = None): diff --git a/api/schemas.py b/api/schemas.py index 2fdcae4c3..c2d2c5497 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1018,6 +1018,7 @@ class LiveFilterType(str, Enum): user_id = FilterType.user_id.value user_anonymous_id = FilterType.user_anonymous_id.value rev_id = FilterType.rev_id.value + platform = FilterType.platform.value page_title = "PAGETITLE" session_id = "SESSIONID" metadata = "METADATA" @@ -1025,7 +1026,6 @@ class LiveFilterType(str, Enum): tracker_version = "TRACKERVERSION" user_browser_version = "USERBROWSERVERSION" user_device_type = "USERDEVICETYPE", - timestamp = "TIMESTAMP" class LiveSessionSearchFilterSchema(BaseModel): @@ -1043,13 +1043,26 @@ class LiveSessionSearchFilterSchema(BaseModel): class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: Union[LiveFilterType, str] = Field(default=LiveFilterType.timestamp) + sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP") order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) - def transform_order(cls, values): + def transform(cls, values): if values.get("order") is not None: values["order"] = values["order"].upper() + if values.get("filters") is not None: + i = 0 + while i < len(values["filters"]): + if values["filters"][i]["values"] is None or len(values["filters"][i]["values"]) == 0: + del values["filters"][i] + else: + i += 1 + for i in values["filters"]: + if i.get("type") == LiveFilterType.platform.value: + i["type"] = LiveFilterType.user_device_type.value + if values.get("sort") is not None: + if values["sort"].lower() == "startts": + values["sort"] = "TIMESTAMP" return values class Config: diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 2062e5794..95cb13740 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -136,7 +136,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? liveSessions[_projectKey] + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -192,7 +195,10 @@ const socketsLiveByProject = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []); } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 02267fb66..d2db03e61 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -118,7 +118,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -172,7 +175,10 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 4feac9f3f..1e676a02c 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -98,7 +98,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? liveSessions[_projectKey] + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -152,7 +155,10 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 854f491a9..c976d1b5c 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -42,22 +42,24 @@ const isValidSession = function (sessionInfo, filters) { let foundAll = true; for (const [key, values] of Object.entries(filters)) { let found = false; - for (const [skey, svalue] of Object.entries(sessionInfo)) { - if (svalue !== undefined && svalue !== null) { - if (typeof (svalue) === "object") { - if (isValidSession(svalue, {[key]: values})) { - found = true; - break; - } - } else if (skey.toLowerCase() === key.toLowerCase()) { - for (let v of values) { - if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (values !== undefined && values !== null) { + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (svalue !== undefined && svalue !== null) { + if (typeof (svalue) === "object") { + if (isValidSession(svalue, {[key]: values})) { found = true; break; } - } - if (found) { - break; + } else if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } } } } From 33a3890562c8ce22c929e43b808339ed3e576742 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 16:34:02 +0200 Subject: [PATCH 111/221] feat(api): fixed typo --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index c2d2c5497..c65236cd3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1053,7 +1053,7 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema): if values.get("filters") is not None: i = 0 while i < len(values["filters"]): - if values["filters"][i]["values"] is None or len(values["filters"][i]["values"]) == 0: + if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0: del values["filters"][i] else: i += 1 From 734320cfe559b96c01a082c12dee1b0d93f1e73f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 17:49:57 +0200 Subject: [PATCH 112/221] feat(api): custom metrics errors pagination feat(api): custom metrics sessions pagination --- api/chalicelib/core/custom_metrics.py | 4 ++++ api/schemas.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5a7fdcea6..5f6f1ac94 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -70,6 +70,8 @@ def __get_errors_list(project_id, user_id, data): } data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.page = data.page + data.series[0].filter.limit = data.limit return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) @@ -87,6 +89,8 @@ def __get_sessions_list(project_id, user_id, data): } data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.page = data.page + data.series[0].filter.limit = data.limit return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) diff --git a/api/schemas.py b/api/schemas.py index c65236cd3..bacceea78 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -826,7 +826,7 @@ class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema): alias_generator = attribute_to_camel_case -class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema): +class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema): density: int = Field(7) class Config: From 621b4aae7caf9240cb344bab8ab3d61d7c504a8e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 19:12:06 +0200 Subject: [PATCH 113/221] feat(db): migrate old funnels to new metric-funnels --- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 27 ++++++++++++++++++- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 27 ++++++++++++++++++- 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 7b5169c3c..9c7e75b95 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -164,4 +164,29 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; \ No newline at end of file + view_type=excluded.view_type; + +BEGIN; +DO +$$ + BEGIN + IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND + EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + THEN + ALTER TABLE IF EXISTS metrics + ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; + WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter) + SELECT project_id, user_id, name, 'funnel', is_public, filter + FROM funnels + WHERE deleted_at ISNULL + RETURNING metric_id,_funnel_filter) + INSERT + INTO metric_series(metric_id, name, filter, index) + SELECT metric_id, 'Series 1', _funnel_filter, 0 + FROM f_t_m; + ALTER TABLE IF EXISTS metrics + DROP COLUMN IF EXISTS _funnel_filter; + END IF; + END +$$; +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 00bf4ec1d..bec9bdff3 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -151,4 +151,29 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; \ No newline at end of file + view_type=excluded.view_type; + +BEGIN; +DO +$$ + BEGIN + IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND + EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + THEN + ALTER TABLE IF EXISTS metrics + ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; + WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter) + SELECT project_id, user_id, name, 'funnel', is_public, filter + FROM funnels + WHERE deleted_at ISNULL + RETURNING metric_id,_funnel_filter) + INSERT + INTO metric_series(metric_id, name, filter, index) + SELECT metric_id, 'Series 1', _funnel_filter, 0 + FROM f_t_m; + ALTER TABLE IF EXISTS metrics + DROP COLUMN IF EXISTS _funnel_filter; + END IF; + END +$$; +COMMIT; \ No newline at end of file From f9695198f2b475eeb1d4c52077461dab1fcebb11 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 19:18:52 +0200 Subject: [PATCH 114/221] feat(db): migrate to v1.7.0: fixed cross-database references issue --- ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 9c7e75b95..ee01e24e0 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -171,7 +171,7 @@ DO $$ BEGIN IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND - EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL)) THEN ALTER TABLE IF EXISTS metrics ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index bec9bdff3..2ed45dea7 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -158,7 +158,7 @@ DO $$ BEGIN IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND - EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL)) THEN ALTER TABLE IF EXISTS metrics ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; From e322e9c3d0a415148ef97a88774c202de9824bc0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 26 Apr 2022 18:10:25 +0200 Subject: [PATCH 115/221] feat(api): round time metrics --- api/chalicelib/core/metrics.py | 34 ++++++++++--------- api/chalicelib/utils/helper.py | 20 +++++++++++- api/schemas.py | 1 + ee/api/chalicelib/core/metrics.py | 54 +++++++++++++++++-------------- 4 files changed, 67 insertions(+), 42 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index fb8241440..05c5233f8 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -967,7 +967,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -1126,7 +1126,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} + result = {"value": avg, "chart": rows} + helper.__time_value(result) + return result def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1348,7 +1350,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2241,7 +2243,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2300,7 +2302,7 @@ def __get_application_activity_avg_page_load_time(cur, project_id, startTimestam cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2316,7 +2318,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2369,7 +2371,7 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes "endTimestamp": endTimestamp, **__get_constraint_values(args)})) row = cur.fetchone() - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return row @@ -2385,7 +2387,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2442,7 +2444,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2512,7 +2514,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2645,7 +2647,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2731,7 +2733,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2772,7 +2774,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2816,7 +2818,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2857,7 +2859,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) @@ -2899,7 +2901,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() row["chart"] = helper.list_to_camel_case(rows) - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(row) return helper.dict_to_camel_case(row) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 8cfab8a3f..042b2a94b 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -1,12 +1,13 @@ +import math import random import re import string from typing import Union -import math import requests import schemas +from chalicelib.utils.TimeUTC import TimeUTC local_prefix = 'local-' from decouple import config @@ -384,3 +385,20 @@ def custom_alert_to_front(values): if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom: values["query"]["left"] = values["seriesId"] return values + + +def __time_value(row): + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + factor = 1 + if row["value"] > TimeUTC.MS_MINUTE: + row["value"] = row["value"] / TimeUTC.MS_MINUTE + row["unit"] = schemas.TemplatePredefinedUnits.minute + factor = TimeUTC.MS_MINUTE + elif row["value"] > 1 * 1000: + row["value"] = row["value"] / 1000 + row["unit"] = schemas.TemplatePredefinedUnits.second + factor = 1000 + + if "chart" in row and factor > 1: + for r in row["chart"]: + r["value"] /= factor diff --git a/api/schemas.py b/api/schemas.py index f1daef481..bb697d03f 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -966,6 +966,7 @@ class TemplatePredefinedKeys(str, Enum): class TemplatePredefinedUnits(str, Enum): millisecond = "ms" + second = "s" minute = "min" memory = "mb" frame = "f/s" diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 668ce4760..2d6aa7201 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -943,11 +943,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, - "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, - density=density, neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + + results = {"value": avg, + "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0})} + helper.__time_value(results) + return results def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1088,11 +1090,12 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, - "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, - density=density, neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + results = {"value": avg, + "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0})} + helper.__time_value(results) + return results def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1288,10 +1291,11 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, density=density, - neutral={"value": 0}), - "unit": schemas.TemplatePredefinedUnits.millisecond} + results = {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, density=density, + neutral={"value": 0})} + helper.__time_value(results) + return results def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -2102,7 +2106,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2179,7 +2183,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2255,7 +2259,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args) previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2334,7 +2338,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2395,7 +2399,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2529,7 +2533,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2608,7 +2612,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del end_time=endTimestamp, density=density, neutral={"value": 0}) results["chart"] = rows - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2684,7 +2688,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2726,7 +2730,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return results @@ -2768,7 +2772,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) @@ -2810,5 +2814,5 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n end_time=endTimestamp, density=density, neutral={"value": 0})) - results["unit"] = schemas.TemplatePredefinedUnits.millisecond + helper.__time_value(results) return helper.dict_to_camel_case(results) From 9ddc0e5e4a1fa9f63037de16a77481252dcc44ab Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Jun 2022 10:39:30 +0200 Subject: [PATCH 116/221] feat(api): merge dev --- ee/api/chalicelib/core/metrics.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 2d6aa7201..111671a01 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -2476,8 +2476,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value FROM (SELECT COUNT(session_id) AS count FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY session_id) AS groupped_data + WHERE {" AND ".join(ch_sub_query)}) AS groupped_data WHERE count>0;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} From d45fd1634d342c894fc78ba27ea2f785d008ac97 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Apr 2022 14:59:05 +0200 Subject: [PATCH 117/221] feat(api): EE fixed No of pages count widget --- ee/api/chalicelib/core/metrics.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 111671a01..2d6aa7201 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -2476,7 +2476,8 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value FROM (SELECT COUNT(session_id) AS count FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)}) AS groupped_data + WHERE {" AND ".join(ch_sub_query)} + GROUP BY session_id) AS groupped_data WHERE count>0;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} From 1e6c6fa1a7e1f99db7e959939a2768e1a26cef60 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Apr 2022 15:29:45 +0200 Subject: [PATCH 118/221] feat(db): EE remove pages_count column --- ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 1 + ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql new file mode 100644 index 000000000..412f3ae2a --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -0,0 +1 @@ +ALTER TABLE sessions DROP COLUMN pages_count; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql index 22cc6b876..712cbd6d4 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql @@ -14,7 +14,6 @@ CREATE TABLE IF NOT EXISTS sessions user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), datetime DateTime, duration UInt32, - pages_count UInt16, events_count UInt16, errors_count UInt16, utm_source Nullable(String), From c72120ac646ddb799640caad4657789dbbdbc0c1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 13:40:57 +0200 Subject: [PATCH 119/221] feat(api): s3 helper detect environment feat(api): support description for dashboards --- api/chalicelib/core/dashboards.py | 7 ++++--- api/chalicelib/utils/s3.py | 13 ++++++++----- api/schemas.py | 1 + .../helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql | 12 ++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql | 12 ++++++++++++ scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + 7 files changed, 39 insertions(+), 8 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql create mode 100644 scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 7b7bfe252..bce5d3ad0 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -42,8 +42,8 @@ def get_templates(project_id, user_id): def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): with pg_client.PostgresClient() as cur: - pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned) - VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s) + pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description) + VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s) RETURNING *""" params = {"userId": user_id, "projectId": project_id, **data.dict()} if data.metrics is not None and len(data.metrics) > 0: @@ -134,7 +134,8 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo row = cur.fetchone() offset = row["count"] pg_query = f"""UPDATE dashboards - SET name = %(name)s + SET name = %(name)s, + description= %(description)s {", is_public = %(is_public)s" if data.is_public is not None else ""} {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""} WHERE dashboards.project_id = %(projectId)s diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 67e1eafd2..b6575ccb5 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -5,11 +5,14 @@ import boto3 import botocore from botocore.client import Config -client = boto3.client('s3', endpoint_url=config("S3_HOST"), - aws_access_key_id=config("S3_KEY"), - aws_secret_access_key=config("S3_SECRET"), - config=Config(signature_version='s3v4'), - region_name=config("sessions_region")) +if not config("S3_HOST", default=False): + client = boto3.client('s3') +else: + client = boto3.client('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), + config=Config(signature_version='s3v4'), + region_name=config("sessions_region")) def exists(bucket, key): diff --git a/api/schemas.py b/api/schemas.py index bb697d03f..105ead87e 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,6 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) + description: str = Field(default=None) is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql new file mode 100644 index 000000000..e94ccc4e1 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -0,0 +1,12 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.6.1-ee' +$$ LANGUAGE sql IMMUTABLE; + + +ALTER TABLE IF EXISTS dashboards + ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 461a414fc..7d6bdece7 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -838,6 +838,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, + description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, is_pinned boolean NOT NULL DEFAULT FALSE, created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql new file mode 100644 index 000000000..c61efae19 --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -0,0 +1,12 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.6.1' +$$ LANGUAGE sql IMMUTABLE; + + +ALTER TABLE IF EXISTS dashboards + ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 5a01226f1..a4b41fefe 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -992,6 +992,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, + description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, is_pinned boolean NOT NULL DEFAULT FALSE, created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), From 32fdd80784892fc2f32605a70c881c3237eb302c Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 27 Apr 2022 12:54:40 +0000 Subject: [PATCH 120/221] Vagrant for local contribution (#434) * chore(vagrant): initial vagrantfile * chore(vagrant): adding instructions after installation * chore(vagrant): Adding vagrant user to docker group * chore(vagrant): use local docker daemon for k3s * chore(vagrant): fix comment * chore(vagrant): adding hostname in /etc/hosts * chore(vagrant): fix doc * chore(vagrant): limiting cpu * chore(frontend): initialize dev env * chore(docker): adding dockerignore * chore(dockerfile): using cache for fasten build * chore(dockerignore): update * chore(docker): build optimizations * chore(build): all components build option * chore(build): utilities build fix * chore(scrpt): remove debug message * chore(vagrant): provision using stable branch always Signed-off-by: rjshrjndrn --- scripts/vagrant/Vagrantfile | 129 ++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 scripts/vagrant/Vagrantfile diff --git a/scripts/vagrant/Vagrantfile b/scripts/vagrant/Vagrantfile new file mode 100644 index 000000000..341d9792c --- /dev/null +++ b/scripts/vagrant/Vagrantfile @@ -0,0 +1,129 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : + +# All Vagrant configuration is done below. The "2" in Vagrant.configure +# configures the configuration version (we support older styles for +# backwards compatibility). Please don't change it unless you know what +# you're doing. +Vagrant.configure("2") do |config| + # The most common configuration options are documented and commented below. + # For a complete reference, please see the online documentation at + # https://docs.vagrantup.com. + + # Every Vagrant development environment requires a box. You can search for + # boxes at https://vagrantcloud.com/search. + config.vm.box = "peru/ubuntu-20.04-server-amd64" + config.vm.define "openreplay-dev" + + # Disable automatic box update checking. If you disable this, then + # boxes will only be checked for updates when the user runs + # `vagrant box outdated`. This is not recommended. + # config.vm.box_check_update = false + + # Create a forwarded port mapping which allows access to a specific port + # within the machine from a port on the host machine. In the example below, + # accessing "localhost:8080" will access port 80 on the guest machine. + # NOTE: This will enable public access to the opened port + # config.vm.network "forwarded_port", guest: 80, host: 8080 + + # Create a forwarded port mapping which allows access to a specific port + # within the machine from a port on the host machine and only allow access + # via 127.0.0.1 to disable public access + # config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1" + + # Create a private network, which allows host-only access to the machine + # using a specific IP. + config.vm.network "private_network", type: "dhcp" + + # Create a public network, which generally matched to bridged network. + # Bridged networks make the machine appear as another physical device on + # your network. + # config.vm.network "public_network" + + # Share an additional folder to the guest VM. The first argument is + # the path on the host to the actual folder. The second argument is + # the path on the guest to mount the folder. And the optional third + # argument is a set of non-required options. + config.vm.synced_folder "./", "/home/vagrant/openreplay-dev/" + + # Provider-specific configuration so you can fine-tune various + # backing providers for Vagrant. These expose provider-specific options. + # Example for VirtualBox: + # + config.vm.provider "virtualbox" do |vb| + # Display the VirtualBox GUI when booting the machine + vb.gui = false + + # Customize the amount of memory on the VM: + vb.cpus = "2" + vb.memory = "4096" + end + # + # View the documentation for the provider you are using for more + # information on available options. + + # Enable provisioning with a shell script. Additional provisioners such as + # Ansible, Chef, Docker, Puppet and Salt are also available. Please see the + # documentation for more information about their specific syntax and use. + config.vm.provision "shell", inline: <<-SHELL + set -x + + IP_ADDR=`ip r | tail -n1 | awk '{print $NF}'` + + # Updating host domainName + grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts + + apt-get update + apt-get install -y git curl + curl -fsSL https://get.docker.com | sh - + usermod -aG docker vagrant + + git clone https://github.com/openreplay/openreplay infra + cd infra/scripts/helmcharts + + # changing container runtime for k3s to docker + sudo -u vagrant git checkout -- init.sh + sed -i 's/INSTALL_K3S_EXEC=\\(.*\\)\\\"/INSTALL_K3S_EXEC=\\1 --docker\\\"/g' init.sh + + DOMAIN_NAME=openreplay.local bash init.sh + cp -rf /root/.kube /home/vagrant/ + cp -rf /home/vagrant/infra/scripts/helmcharts/vars.yaml /home/vagrant/openreplay-dev/openreplay/scripts/helmcharts/vars.yaml + chown -R vagrant:vagrant /home/vagrant + + cat <> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts' + + ## Linux (Paste the following command in terminal) + + sudo -- sh -c 'grep -q openreplay.local /etc/hosts || echo $IP_ADDR openreplay.local >> /etc/hosts && sudo sed -i "s/.*openreplay.local/${IP_ADDR} openreplay.local/g" /etc/hosts; grep openreplay.local /etc/hosts' + + ## Windows + + Use the following instructions if you’re running Windows 10 or Windows 8: + + Press the Windows key. + Type Notepad in the search field. + In the search results, right-click Notepad and select Run as administrator. + From Notepad, open the following file: + c:\\Windows\\System32\\Drivers\\etc\\hosts + add the below line in the hosts file + $IP_ADDR openreplay.local + Select File > Save to save your changes. + + To Access Openreplay: + - Open your browser and go to "http://openreplay.local" + + EOF + SHELL +end From ea103f9589f45fa144908b5872085184188d0167 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Fri, 29 Apr 2022 11:58:07 +0200 Subject: [PATCH 121/221] chore(vagrant): Adding development readme Signed-off-by: rjshrjndrn --- scripts/vagrant/README.md | 69 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 scripts/vagrant/README.md diff --git a/scripts/vagrant/README.md b/scripts/vagrant/README.md new file mode 100644 index 000000000..ffe132c73 --- /dev/null +++ b/scripts/vagrant/README.md @@ -0,0 +1,69 @@ + + +### Installation + +- Vagrant: [https://www.vagrantup.com/downloads](https://www.vagrantup.com/downloads) +- VirtualBox: [https://www.virtualbox.org/wiki/Downloads](https://www.virtualbox.org/wiki/Downloads) + +### Configuration + +```bash +mkdir openreplay-contributions +cd openreplay-contributions +git clone https://github.com/openreplay/openreplay -b dev +cp -rf openreplay/scripts/vagrant/ . +vagrant up +``` + +### To access OpenReplay instance + +```bash +Add ip address from about output to your local resolver + +## Mac/Linux + +Copy paste the command from the vagrant output + +## Windows + +Use the following instructions if you’re running Windows 10 or Windows 8: + Press the Windows key. + Type Notepad in the search field. + In the search results, right-click Notepad and select Run as administrator. + From Notepad, open the following file: + c:\Windows\System32\Drivers\etc\hosts + add the below line in the hosts file + openreplay.local + Select File > Save to save your changes. + +**Open browser** +http://openreplay.local +``` + +### To start developing + +- [Frontend](../../frontend/development.md) +- [API](../../api/development.md) +- [Backend](../../backend/development.md) + +### Notes + +It’ll be a good practice to take a snapshot once the initial setup is complete, so that if something is not working as expected, you can always fall back to a stable known version. +```bash +cd openreplay-dev +vagrant snapshot save +# For example +vagrant snapshot save openreplay-160-base +``` + +```bash +# To restore the snapshot +cd openreplay-dev +vagrant snapshot restore openreplay-160-base +``` + + From 1c671631e786f65ab068854ffa11d1c190b917a1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 14:06:00 +0200 Subject: [PATCH 122/221] feat(api): changed Dockerfile --- api/Dockerfile | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index f3b5e85f5..000576611 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,20 +1,7 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env ENV APP_NAME chalice -# Installing Nodejs -RUN apt update && apt install -y curl && \ - curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ - apt install -y nodejs && \ - apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* -RUN cd sourcemap-reader && \ - npm install - # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -22,5 +9,23 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* \ + +COPY requirements.txt requirements.txt +RUN pip install -r requirements.txt +WORKDIR /work_tmp +COPY sourcemap-reader/*.json . +RUN npm install + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/. + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh From c715a6084e6002f957fcd796246f644c3c30b35e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 14:16:36 +0200 Subject: [PATCH 123/221] feat(api): fixed description default value --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 105ead87e..091eae0c3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,7 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) - description: str = Field(default=None) + description: str = Field(default='') is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) From 1224e6054ede9ea37516a518fafddd48f632e156 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Apr 2022 16:08:38 +0200 Subject: [PATCH 124/221] feat(api): fixed description optional value --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 091eae0c3..1d92f5fce 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -888,7 +888,7 @@ class SavedSearchSchema(FunnelSchema): class CreateDashboardSchema(BaseModel): name: str = Field(..., min_length=1) - description: str = Field(default='') + description: Optional[str] = Field(default='') is_public: bool = Field(default=False) is_pinned: bool = Field(default=False) metrics: Optional[List[int]] = Field(default=[]) From 90143bcd31e803d137d481ad359c38c1f9f5f5fb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:00:40 +0200 Subject: [PATCH 125/221] feat(api): updated dependencies --- api/requirements.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index 198b535dd..d615851d1 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,15 +1,15 @@ -requests==2.26.0 -urllib3==1.26.6 -boto3==1.16.1 -pyjwt==1.7.1 -psycopg2-binary==2.8.6 +requests==2.27.1 +urllib3==1.26.9 +boto3==1.22.6 +pyjwt==2.3.0 +psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -fastapi==0.75.0 -uvicorn[standard]==0.17.5 +fastapi==0.75.2 +uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 -apscheduler==3.8.1 \ No newline at end of file +apscheduler==3.9.1 \ No newline at end of file From 1859fb8a6c4f96452736b175b335a4f39e5e43a3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:10:48 +0200 Subject: [PATCH 126/221] feat(api): EE updated dependencies --- ee/api/requirements.txt | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 5909d31c1..f14d6022d 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,16 +1,16 @@ -requests==2.26.0 -urllib3==1.26.6 -boto3==1.16.1 -pyjwt==1.7.1 -psycopg2-binary==2.8.6 +requests==2.27.1 +urllib3==1.26.9 +boto3==1.22.6 +pyjwt==2.3.0 +psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -clickhouse-driver==0.2.2 +clickhouse-driver==0.2.3 python3-saml==1.12.0 -fastapi==0.75.0 +fastapi==0.75.2 python-multipart==0.0.5 -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 -apscheduler==3.8.1 \ No newline at end of file +apscheduler==3.9.1 \ No newline at end of file From 1dcad02b9a0bd483c013d82e6aac85296276e385 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 13:14:25 +0200 Subject: [PATCH 127/221] feat(api): changed replay file URL --- api/chalicelib/core/sessions_mobs.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 8f61d436b..ccbda20bb 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -5,14 +5,23 @@ from chalicelib.utils.s3 import client def get_web(sessionId): - return client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) - }, - ExpiresIn=100000 - ) + return [ + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + }, + ExpiresIn=100000 + ), + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + "e" + }, + ExpiresIn=100000 + )] def get_ios(sessionId): From 9af6fc004b71555d94120a2298208ed08e7d31d4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:32:17 +0200 Subject: [PATCH 128/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/Dockerfile b/api/Dockerfile index 000576611..20d9f649a 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -17,9 +17,9 @@ RUN apt update && apt install -y curl && \ apt remove --purge -y curl && \ rm -rf /var/lib/apt/lists/* \ +WORKDIR /work_tmp COPY requirements.txt requirements.txt RUN pip install -r requirements.txt -WORKDIR /work_tmp COPY sourcemap-reader/*.json . RUN npm install From 9c5d96e35cdfa84d1b492462fa3b651e94c7da3b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:36:52 +0200 Subject: [PATCH 129/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/Dockerfile b/api/Dockerfile index 20d9f649a..ae2ded77c 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -20,7 +20,7 @@ RUN apt update && apt install -y curl && \ WORKDIR /work_tmp COPY requirements.txt requirements.txt RUN pip install -r requirements.txt -COPY sourcemap-reader/*.json . +COPY sourcemap-reader/*.json ./ RUN npm install WORKDIR /work From b0d3074cebd944af5d3818c12b6e02c8e8e9c3dd Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 4 May 2022 14:50:09 +0200 Subject: [PATCH 130/221] feat(api): changed Dockerfile --- api/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index ae2ded77c..682286786 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -18,10 +18,10 @@ RUN apt update && apt install -y curl && \ rm -rf /var/lib/apt/lists/* \ WORKDIR /work_tmp -COPY requirements.txt requirements.txt -RUN pip install -r requirements.txt -COPY sourcemap-reader/*.json ./ -RUN npm install +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt +COPY sourcemap-reader/*.json /work_tmp/ +RUN cd /work_tmp && npm install WORKDIR /work COPY . . From d9d2f08fb8bd40b0b137393d1ffa384c26866488 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 18:21:47 +0200 Subject: [PATCH 131/221] feat(DB): changed sessions_metadata sort expression --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 72 ++++++++++++++++++- .../clickhouse/create/sessions_metadata.sql | 2 +- 2 files changed, 72 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 412f3ae2a..a8f90613d 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -1 +1,71 @@ -ALTER TABLE sessions DROP COLUMN pages_count; \ No newline at end of file +ALTER TABLE sessions + DROP COLUMN pages_count; + +CREATE TABLE default.sessions_metadata_temp +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime) + TTL datetime + INTERVAL 1 MONTH; + +INSERT INTO default.sessions_metadata_temp(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, + user_browser, user_browser_version, user_device, user_device_type, + user_country, + datetime, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, + metadata_4, + metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.sessions_metadata; + +DROP TABLE default.sessions_metadata; +RENAME TABLE default.sessions_metadata_temp TO default.sessions_metadata; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index ddf8aed01..f6b77930e 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -27,5 +27,5 @@ CREATE TABLE IF NOT EXISTS sessions_metadata metadata_10 Nullable(String) ) ENGINE = MergeTree PARTITION BY toDate(datetime) - ORDER BY (session_id) + ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file From cf6320d4df18146be7b382e5d1b19a0684a0c10b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 20:37:37 +0200 Subject: [PATCH 132/221] feat(DB): traces/trails index feat(api): get all traces/trails --- ee/api/chalicelib/core/traces.py | 26 ++++++++++++++++++- ee/api/routers/ee.py | 11 +++++++- ee/api/schemas_ee.py | 10 +++++++ .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 2 ++ .../db/init_dbs/postgresql/init_schema.sql | 1 + 5 files changed, 48 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index fd0ae6c2b..d77b0f580 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -9,7 +9,8 @@ from pydantic import BaseModel, Field from starlette.background import BackgroundTask import app as main_app -from chalicelib.utils import pg_client +import schemas_ee +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from schemas import CurrentContext @@ -151,6 +152,29 @@ async def process_traces_queue(): await write_traces_batch(traces) +def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + """SELECT COUNT(*) AS count, + COALESCE(JSONB_AGG(full_traces) + FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn + FROM traces + WHERE tenant_id=%(tenant_id)s + AND created_at>=%(startDate)s + AND created_at<=%(endDate)s + ORDER BY created_at) AS full_traces;""", + {"tenant_id": tenant_id, + "startDate": data.startDate, + "endDate": data.endDate, + "p_start": (data.page - 1) * data.limit, + "p_end": data.page * data.limit}) + ) + rows = cur.fetchall() + return helper.list_to_camel_case(rows) + + cron_jobs = [ {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60), "misfire_grace_time": 20} diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index 1a9589eaa..f63d0dd3a 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,6 +1,7 @@ -from chalicelib.core import roles +from chalicelib.core import roles, traces from chalicelib.core import unlock from chalicelib.utils import assist_helper +from chalicelib.utils.TimeUTC import TimeUTC unlock.check() @@ -58,3 +59,11 @@ def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_contex @app.get('/assist/credentials', tags=["assist"]) def get_assist_credentials(): return {"data": assist_helper.get_full_config()} + + +@app.post('/trails', tags=["traces", "trails"]) +def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': traces.get_all(tenant_id=context.tenant_id, data=data) + } diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 59a58f94b..06ae8f2ba 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -3,6 +3,7 @@ from typing import Optional, List from pydantic import BaseModel, Field import schemas +from chalicelib.utils.TimeUTC import TimeUTC class RolePayloadSchema(BaseModel): @@ -22,3 +23,12 @@ class CreateMemberSchema(schemas.CreateMemberSchema): class EditMemberSchema(schemas.EditMemberSchema): roleId: int = Field(...) + + +class TrailSearchPayloadSchema(schemas._PaginatedSchema): + startDate: int = Field(default=TimeUTC.now(-7)) + endDate: int = Field(default=TimeUTC.now(1)) + user_id: Optional[int] = Field(default=None) + + class Config: + alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index e94ccc4e1..00d871cac 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -9,4 +9,6 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + +CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 7d6bdece7..d78a99c27 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -785,6 +785,7 @@ $$ ); CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id); CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id); + CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); From e95c5b915dd2474e5c16d57a55d36479fa55d6a9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 5 May 2022 20:42:08 +0200 Subject: [PATCH 133/221] feat(api): return createdAt with the list of users --- api/chalicelib/core/users.py | 2 ++ ee/api/chalicelib/core/users.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index ceada34f8..0ef2f2088 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -377,6 +377,7 @@ def get_members(tenant_id): users.email, users.role, users.name, + users.created_at, basic_authentication.generated_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, @@ -393,6 +394,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index b70f6a269..d34e2f5f9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -414,6 +414,7 @@ def get_members(tenant_id): users.email, users.role, users.name, + users.created_at, basic_authentication.generated_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, @@ -435,6 +436,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: From 7f9bc99bcfaa08ca8bdb60e23495f76849f24d5c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 11:56:03 +0200 Subject: [PATCH 134/221] feat(DB): traces/trails index feat(api): get all possible traces/trails actions feat(api): search traces/trails by actions feat(api): search traces/trails by user --- ee/api/chalicelib/core/traces.py | 26 +++++++++++++++---- ee/api/routers/ee.py | 5 ++++ ee/api/schemas_ee.py | 1 + .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 1 + 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index d77b0f580..64c1c6df1 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -154,27 +154,43 @@ async def process_traces_queue(): def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): with pg_client.PostgresClient() as cur: + conditions = ["tenant_id=%(tenant_id)s", "created_at>=%(startDate)s", "created_at<=%(endDate)s"] + if data.user_id is not None: + conditions.append("user_id=%(user_id)s") + if data.action is not None: + conditions.append("action=%(action)s") cur.execute( cur.mogrify( - """SELECT COUNT(*) AS count, + f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(full_traces) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn FROM traces - WHERE tenant_id=%(tenant_id)s - AND created_at>=%(startDate)s - AND created_at<=%(endDate)s + WHERE {" AND ".join(conditions)} ORDER BY created_at) AS full_traces;""", {"tenant_id": tenant_id, "startDate": data.startDate, "endDate": data.endDate, "p_start": (data.page - 1) * data.limit, - "p_end": data.page * data.limit}) + "p_end": data.page * data.limit, + **data.dict()}) ) rows = cur.fetchall() return helper.list_to_camel_case(rows) +def get_available_actions(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify( + f"""SELECT DISTINCT action + FROM traces + WHERE tenant_id=%(tenant_id)s + ORDER BY 1""", + {"tenant_id": tenant_id})) + rows = cur.fetchall() + return [r["action"] for r in rows] + + cron_jobs = [ {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60), "misfire_grace_time": 20} diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index f63d0dd3a..9a79551b7 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -67,3 +67,8 @@ def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), return { 'data': traces.get_all(tenant_id=context.tenant_id, data=data) } + + +@app.post('/trails/actions', tags=["traces", "trails"]) +def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)): + return {'data': traces.get_available_actions(tenant_id=context.tenant_id)} diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 06ae8f2ba..50eb3d03f 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -29,6 +29,7 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) + action: Optional[str] = Field(default=None) class Config: alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 00d871cac..b28f28b62 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -11,4 +11,5 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); +CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index d78a99c27..a59e25e54 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -786,6 +786,7 @@ $$ CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id); CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id); CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); + CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); From 507462180e9c56bf60e1ad7f9b98486e66d55a52 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:07:03 +0200 Subject: [PATCH 135/221] feat(api): fixed return createdAt with the list of users --- api/chalicelib/core/users.py | 2 +- ee/api/chalicelib/core/users.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 0ef2f2088..40cc0f7db 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -394,7 +394,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: - r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index d34e2f5f9..cf2a808e7 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -436,7 +436,7 @@ def get_members(tenant_id): if len(r): r = helper.list_to_camel_case(r) for u in r: - r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) if u["invitationToken"]: u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) else: From 9100d2785439a555bdc9ebc5186979f3f4d4abb8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:11:38 +0200 Subject: [PATCH 136/221] feat(api): changed root path --- ee/api/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/app.py b/ee/api/app.py index 0041ec12e..505f1393c 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -16,7 +16,7 @@ from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.subs import dashboard, insights, metrics, v1_api_ee -app = FastAPI() +app = FastAPI(root_path="/api") @app.middleware('http') From 41d7d16d034eca846576aca18d390b25e041c051 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 12:16:07 +0200 Subject: [PATCH 137/221] feat(api): changed Dockerfile --- api/Dockerfile | 2 +- ee/api/Dockerfile | 33 +++++++++++++++++++-------------- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index 682286786..cc8f36ece 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -15,7 +15,7 @@ RUN apt update && apt install -y curl && \ curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ apt install -y nodejs && \ apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* \ + rm -rf /var/lib/apt/lists/* WORKDIR /work_tmp COPY requirements.txt /work_tmp/requirements.txt diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index aee6aecb2..c99e576e4 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,21 +1,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env ENV APP_NAME chalice -# Installing Nodejs -RUN apt update && apt install -y curl && \ - curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ - apt install -y nodejs && \ - apt remove --purge -y curl && \ - rm -rf /var/lib/apt/lists/* && \ - cd sourcemap-reader && \ - npm install - +RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -23,5 +10,23 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* + +WORKDIR /work_tmp +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt +COPY sourcemap-reader/*.json /work_tmp/ +RUN cd /work_tmp && npm install + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv /work_tmp/node_modules sourcemap-reader/. + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh From 9fcba8703e25063917a2ceced27f15ca10f095a5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 15:09:50 +0200 Subject: [PATCH 138/221] feat(api): EE updated authorizer --- ee/api/chalicelib/core/authorizers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index 149d570ab..5adf3e61a 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -52,7 +52,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None): key=config("jwt_secret"), algorithm=config("jwt_algorithm") ) - return token.decode("utf-8") + return token def api_key_authorizer(token): From 18f0d2fbcabec88d165e0388c927fde55af32146 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 17:27:43 +0200 Subject: [PATCH 139/221] feat(api): search user trails by username feat(db): index to search user trails by username --- ee/api/chalicelib/core/traces.py | 34 ++++++++++++------- ee/api/schemas_ee.py | 4 ++- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 1 + 4 files changed, 26 insertions(+), 14 deletions(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 64c1c6df1..5fbfafc0c 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -9,6 +9,7 @@ from pydantic import BaseModel, Field from starlette.background import BackgroundTask import app as main_app +import schemas import schemas_ee from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -154,29 +155,36 @@ async def process_traces_queue(): def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): with pg_client.PostgresClient() as cur: - conditions = ["tenant_id=%(tenant_id)s", "created_at>=%(startDate)s", "created_at<=%(endDate)s"] + conditions = ["traces.tenant_id=%(tenant_id)s", + "traces.created_at>=%(startDate)s", + "traces.created_at<=%(endDate)s"] + params = {"tenant_id": tenant_id, + "startDate": data.startDate, + "endDate": data.endDate, + "p_start": (data.page - 1) * data.limit, + "p_end": data.page * data.limit, + **data.dict()} if data.user_id is not None: conditions.append("user_id=%(user_id)s") if data.action is not None: conditions.append("action=%(action)s") + if data.query is not None and len(data.query) > 0: + conditions.append("users.name ILIKE %(query)s") + params["query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) cur.execute( cur.mogrify( f"""SELECT COUNT(*) AS count, - COALESCE(JSONB_AGG(full_traces) + COALESCE(JSONB_AGG(full_traces ORDER BY rn) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY created_at) AS rn - FROM traces + FROM (SELECT traces.*,users.email,users.name AS username, + ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn + FROM traces LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY created_at) AS full_traces;""", - {"tenant_id": tenant_id, - "startDate": data.startDate, - "endDate": data.endDate, - "p_start": (data.page - 1) * data.limit, - "p_end": data.page * data.limit, - **data.dict()}) + ORDER BY traces.created_at {data.order}) AS full_traces;""", params) ) - rows = cur.fetchall() - return helper.list_to_camel_case(rows) + rows = cur.fetchone() + return helper.dict_to_camel_case(rows) def get_available_actions(tenant_id): diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 50eb3d03f..9d1440b44 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -1,4 +1,4 @@ -from typing import Optional, List +from typing import Optional, List, Literal from pydantic import BaseModel, Field @@ -29,7 +29,9 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) + query: Optional[str] = Field(default=None) action: Optional[str] = Field(default=None) + order: Literal["asc", "desc"] = Field(default="desc") class Config: alias_generator = schemas.attribute_to_camel_case diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index b28f28b62..46fd953bf 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -12,4 +12,5 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); +CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a59e25e54..c6d05dd34 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -257,6 +257,7 @@ $$ internal_id text NULL DEFAULT NULL ); CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; + CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); CREATE TABLE IF NOT EXISTS basic_authentication From 2fba643b7cb259fcb4b15087bf7b55cac44b8b50 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 17:43:55 +0200 Subject: [PATCH 140/221] feat(api): changed search user trails by username --- ee/api/chalicelib/core/traces.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 5fbfafc0c..35339a133 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -170,8 +170,9 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): conditions.append("action=%(action)s") if data.query is not None and len(data.query) > 0: conditions.append("users.name ILIKE %(query)s") + conditions.append("users.tenant_id = %(tenant_id)s") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator._contains) cur.execute( cur.mogrify( f"""SELECT COUNT(*) AS count, From f7002ab2a0bd2f91e201451acb157d64256f9cb9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 18:30:59 +0200 Subject: [PATCH 141/221] feat(api): vault support --- ee/api/.gitignore | 1 - .../core/sessions_favorite_viewed.py | 74 +++++++++++++++++++ ee/api/chalicelib/utils/s3_extra.py | 30 ++++++++ 3 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 ee/api/chalicelib/core/sessions_favorite_viewed.py create mode 100644 ee/api/chalicelib/utils/s3_extra.py diff --git a/ee/api/.gitignore b/ee/api/.gitignore index c5a8d9ce4..fb839d5e6 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -207,7 +207,6 @@ Pipfile /chalicelib/core/mobile.py /chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py -/chalicelib/core/sessions_favorite_viewed.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py /chalicelib/core/significance.py diff --git a/ee/api/chalicelib/core/sessions_favorite_viewed.py b/ee/api/chalicelib/core/sessions_favorite_viewed.py new file mode 100644 index 000000000..bef7787d1 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_favorite_viewed.py @@ -0,0 +1,74 @@ +from chalicelib.core import sessions +from chalicelib.utils import pg_client, s3_extra +from decouple import config + + +def add_favorite_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + INSERT INTO public.user_favorite_sessions + (user_id, session_id) + VALUES + (%(userId)s,%(sessionId)s);""", + {"userId": user_id, "sessionId": session_id}) + ) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, + include_fav_viewed=True) + + +def remove_favorite_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + DELETE FROM public.user_favorite_sessions + WHERE + user_id = %(userId)s + AND session_id = %(sessionId)s;""", + {"userId": user_id, "sessionId": session_id}) + ) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, + include_fav_viewed=True) + + +def add_viewed_session(project_id, user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + INSERT INTO public.user_viewed_sessions + (user_id, session_id) + VALUES + (%(userId)s,%(sessionId)s) + ON CONFLICT DO NOTHING;""", + {"userId": user_id, "sessionId": session_id}) + ) + + +def favorite_session(project_id, user_id, session_id): + if favorite_session_exists(user_id=user_id, session_id=session_id): + s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_D_VALUE', default='default')) + s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_D_VALUE', default='default')) + return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + s3_extra.tag_file(session_id=str(session_id), tag_value=config('RETENTION_L_VALUE', default='vault')) + s3_extra.tag_file(session_id=str(session_id) + "e", tag_value=config('RETENTION_L_VALUE', default='vault')) + return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + + +def view_session(project_id, user_id, session_id): + return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id) + + +def favorite_session_exists(user_id, session_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + """SELECT + session_id + FROM public.user_favorite_sessions + WHERE + user_id = %(userId)s + AND session_id = %(sessionId)s""", + {"userId": user_id, "sessionId": session_id}) + ) + r = cur.fetchone() + return r is not None diff --git a/ee/api/chalicelib/utils/s3_extra.py b/ee/api/chalicelib/utils/s3_extra.py new file mode 100644 index 000000000..bd74d8277 --- /dev/null +++ b/ee/api/chalicelib/utils/s3_extra.py @@ -0,0 +1,30 @@ +from chalicelib.utils.s3 import client +from decouple import config + +def tag_file( session_id, tag_key='retention', tag_value='vault'): + return client.put_object_tagging( + Bucket=config("sessions_bucket"), + Key=session_id, + # VersionId='string', + # ContentMD5='string', + # ChecksumAlgorithm='CRC32'|'CRC32C'|'SHA1'|'SHA256', + Tagging={ + 'TagSet': [ + { + 'Key': tag_key, + 'Value': tag_value + }, + ] + }, + # ExpectedBucketOwner='string', + # RequestPayer='requester' + ) + + # generate_presigned_url( + # 'put_object', + # Params={ + # 'Bucket': bucket, + # 'Key': key + # }, + # ExpiresIn=expires_in + # ) From 23584b8be86e4bda2f074cd74bc09f1ae52351fe Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 6 May 2022 18:36:46 +0200 Subject: [PATCH 142/221] feat(alerts): changed Dockerfile.alerts --- api/Dockerfile.alerts | 15 +++++++++------ ee/api/Dockerfile.alerts | 14 +++++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index 76e8c262a..c7e8c7a37 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -1,13 +1,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh -ENV pg_minconn 2 ENV APP_NAME alerts - +ENV pg_minconn 2 # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 @@ -15,5 +10,13 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh \ No newline at end of file diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index 6aec0f98b..2864848e9 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -2,12 +2,8 @@ FROM python:3.9.10-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* -WORKDIR /work -COPY . . -RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh -ENV pg_minconn 2 ENV APP_NAME alerts +ENV pg_minconn 2 # Add Tini # Startup daemon @@ -16,5 +12,13 @@ ARG envarg ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini + +COPY requirements.txt /work_tmp/requirements.txt +RUN pip install -r /work_tmp/requirements.txt + +WORKDIR /work +COPY . . +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh + ENTRYPOINT ["/tini", "--"] CMD ./entrypoint.sh \ No newline at end of file From 6bf5d1d65bc228bd740717332454302c88a1ea45 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 9 May 2022 15:30:28 +0200 Subject: [PATCH 143/221] feat(api): user trail limit changed --- ee/api/schemas_ee.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 9d1440b44..794dfdd64 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -26,6 +26,7 @@ class EditMemberSchema(schemas.EditMemberSchema): class TrailSearchPayloadSchema(schemas._PaginatedSchema): + limit: int = Field(default=200, gt=0) startDate: int = Field(default=TimeUTC.now(-7)) endDate: int = Field(default=TimeUTC.now(1)) user_id: Optional[int] = Field(default=None) From 2ed54261b6f9fbacc9fb3a9b2a863865f89e50a2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 10 May 2022 17:13:19 +0200 Subject: [PATCH 144/221] feat(api): fixed sourcemaps reader endpoint --- api/.env.default | 2 +- ee/api/.env.default | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/.env.default b/api/.env.default index 7dd248bec..30ff0b02d 100644 --- a/api/.env.default +++ b/api/.env.default @@ -44,6 +44,6 @@ sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/ +sourcemaps_reader=http://127.0.0.1:9000/sourcemaps stage=default-foss version_number=1.4.0 \ No newline at end of file diff --git a/ee/api/.env.default b/ee/api/.env.default index 094579f1b..8215908b2 100644 --- a/ee/api/.env.default +++ b/ee/api/.env.default @@ -53,6 +53,6 @@ sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/ +sourcemaps_reader=http://127.0.0.1:9000/sourcemaps stage=default-ee version_number=1.0.0 From 6c0aca2f8c0c7e84e71281569d3c9ede2499556a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 12 May 2022 16:24:58 +0200 Subject: [PATCH 145/221] feat(DB): changed partition expression --- ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql | 2 +- ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql | 2 +- .../helm/db/init_dbs/clickhouse/create/sessions_metadata.sql | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql index 7781d2328..b9322a403 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/clicks.sql @@ -16,6 +16,6 @@ CREATE TABLE IF NOT EXISTS clicks label String, hesitation_time Nullable(UInt32) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql index eed67c990..fb4b2c881 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql @@ -17,6 +17,6 @@ CREATE TABLE IF NOT EXISTS customs payload Nullable(String), level Enum8('info'=0, 'error'=1) DEFAULT 'info' ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql index 4560f6500..98052071a 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/errors.sql @@ -18,6 +18,6 @@ CREATE TABLE IF NOT EXISTS errors message String, error_id String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql index 523d2d468..83b475d0f 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/inputs.sql @@ -15,6 +15,6 @@ CREATE TABLE IF NOT EXISTS inputs datetime DateTime, label String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql index 9770fb380..90a90a104 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/longtasks.sql @@ -20,7 +20,7 @@ CREATE TABLE IF NOT EXISTS longtasks container_name String, container_src String ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql index 71d9503cf..3902abd33 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/pages.sql @@ -35,6 +35,6 @@ CREATE TABLE IF NOT EXISTS pages dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), load_event_time Nullable(UInt16) MATERIALIZED if (greaterOrEquals(load_event_end, load_event_start), minus(load_event_end, load_event_start), Null) ) ENGINE = MergeTree -PARTITION BY toDate(datetime) +PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql index fa64967f4..650895662 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/performance.sql @@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS performance avg_used_js_heap_size UInt64, max_used_js_heap_size UInt64 ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql index cc2c7cd6d..bfd4f0ea1 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/resources.sql @@ -27,6 +27,6 @@ CREATE TABLE IF NOT EXISTS resources method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), status Nullable(UInt16) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql index 712cbd6d4..59df20242 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions.sql @@ -20,6 +20,6 @@ CREATE TABLE IF NOT EXISTS sessions utm_medium Nullable(String), utm_campaign Nullable(String) ) ENGINE = ReplacingMergeTree(duration) - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 1 MONTH; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index f6b77930e..2884b4515 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS sessions_metadata metadata_9 Nullable(String), metadata_10 Nullable(String) ) ENGINE = MergeTree - PARTITION BY toDate(datetime) + PARTITION BY toStartOfWeek(datetime) ORDER BY (project_id, datetime) TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file From c12cea6f6bbfe17eaf4864680b367a64fab297f0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 13 May 2022 15:49:17 +0200 Subject: [PATCH 146/221] feat(api): fixed CH client format --- ee/api/chalicelib/utils/ch_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index aa45699f7..a51230a19 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -26,7 +26,7 @@ class ClickHouseClient: return self.__client def format(self, query, params): - return self.__client.substitute_params(query, params) + return self.__client.substitute_params(query, params, self.__client.connection.context) def __exit__(self, *args): pass From 9c9452c530e409df420649fbbef01bc6d34fd2cf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 13 May 2022 19:15:31 +0200 Subject: [PATCH 147/221] feat(api): upgraded python base image feat(alerts): upgraded python base image --- api/Dockerfile | 2 +- api/Dockerfile.alerts | 2 +- api/Dockerfile.bundle | 2 +- ee/api/Dockerfile | 2 +- ee/api/Dockerfile.alerts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index cc8f36ece..4465b0432 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME chalice diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index c7e8c7a37..7d8dd8200 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME alerts diff --git a/api/Dockerfile.bundle b/api/Dockerfile.bundle index e5ccd23f6..2f58635f2 100644 --- a/api/Dockerfile.bundle +++ b/api/Dockerfile.bundle @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" WORKDIR /work COPY . . diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index c99e576e4..b5dffb40d 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME chalice diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index 2864848e9..ae8d308c8 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -1,4 +1,4 @@ -FROM python:3.9.10-slim +FROM python:3.9.12-slim LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* From 20f7c0fb70c7dc76235affa7b5c410a4d2653d5b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 16 May 2022 18:24:16 +0200 Subject: [PATCH 148/221] feat(DB): changed metrics category from Overview to Monitoring Essentials --- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 113 ++++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 40 +++---- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 112 +++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 40 +++---- 4 files changed, 265 insertions(+), 40 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 46fd953bf..d8624d06d 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -13,4 +13,117 @@ ALTER TABLE IF EXISTS dashboards CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index c6d05dd34..95f247af1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1269,102 +1269,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'overview', '{ +VALUES ('Captured sessions', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'overview', '{ + ('Request Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'overview', '{ + ('Page Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'overview', '{ + ('Image Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'overview', '{ + ('DOM Content Load Start', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'overview', '{ + ('First Meaningful paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'overview', '{ + ('No. of Visited Pages', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'overview', '{ + ('Session Duration', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'overview', '{ + ('DOM Build Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'overview', '{ + ('Pages Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'overview', '{ + ('Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'overview', '{ + ('First Paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'overview', '{ + ('DOM Content Loaded', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'overview', '{ + ('Time Till First byte', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'overview', '{ + ('Time To Interactive', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'overview', '{ + ('Captured requests', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'overview', '{ + ('Time To Render', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'overview', '{ + ('Memory Consumption', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'overview', '{ + ('CPU Load', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'overview', '{ + ('Frame rate', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index c61efae19..6a4e151e9 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -9,4 +9,116 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'Monitoring Essentials', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a4b41fefe..c3ee2fdb1 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1060,102 +1060,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'overview', '{ +VALUES ('Captured sessions', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'overview', '{ + ('Request Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'overview', '{ + ('Page Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'overview', '{ + ('Image Load Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'overview', '{ + ('DOM Content Load Start', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'overview', '{ + ('First Meaningful paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'overview', '{ + ('No. of Visited Pages', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'overview', '{ + ('Session Duration', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'overview', '{ + ('DOM Build Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'overview', '{ + ('Pages Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'overview', '{ + ('Response Time', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'overview', '{ + ('First Paint', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'overview', '{ + ('DOM Content Loaded', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'overview', '{ + ('Time Till First byte', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'overview', '{ + ('Time To Interactive', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'overview', '{ + ('Captured requests', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'overview', '{ + ('Time To Render', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'overview', '{ + ('Memory Consumption', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'overview', '{ + ('CPU Load', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'overview', '{ + ('Frame rate', 'Monitoring Essentials', '{ "col": 1, "row": 1, "position": 0 From a3ba925cea5371fc7058b8340937769a3494cb9a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 19:08:08 +0200 Subject: [PATCH 149/221] feat(api): centralized 'order' feat(api): transform 'order' casing --- api/chalicelib/core/alerts_processor.py | 4 ++-- api/chalicelib/core/errors.py | 2 +- api/chalicelib/core/sessions.py | 6 +++--- api/schemas.py | 12 +++++++++++- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 56fde11da..ece75bfe5 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -99,10 +99,10 @@ def Build(a): j_s = True if a["seriesId"] is not None: a["filter"]["sort"] = "session_id" - a["filter"]["order"] = "DESC" + a["filter"]["order"] = schemas.SortOrderType.desc a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() - full_args, query_part= sessions.search_query_parts( + full_args, query_part = sessions.search_query_parts( data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index a7f863e79..983d091f8 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -463,7 +463,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = "DESC" + order = schemas.SortOrderType.desc if data.order is not None: order = data.order extra_join = "" diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index adc549d1e..e717f1d07 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -201,12 +201,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = "DESC" + data.order = schemas.SortOrderType.desc else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" + g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" else: sort = 'start_ts' @@ -230,7 +230,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e full_args) else: if data.order is None: - data.order = "DESC" + data.order = schemas.SortOrderType.desc sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/api/schemas.py b/api/schemas.py index 1d92f5fce..ae3720624 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -618,17 +618,27 @@ class _PaginatedSchema(BaseModel): page: int = Field(default=1, gt=0) +class SortOrderType(str, Enum): + asc = "ASC" + desc = "DESC" + + class SessionsSearchPayloadSchema(_PaginatedSchema): events: List[_SessionSearchEventSchema] = Field([]) filters: List[SessionSearchFilterSchema] = Field([]) startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: Literal["asc", "desc"] = Field(default="desc") + order: Literal[SortOrderType] = Field(default=SortOrderType.desc) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) bookmarked: bool = Field(default=False) + @root_validator(pre=True) + def transform_order(cls, values): + if values.get("order") is not None: + values["order"] = values["order"].upper() + class Config: alias_generator = attribute_to_camel_case From b2732eb9be50fa76ad597dfd8eb8da077401103c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 19:43:18 +0200 Subject: [PATCH 150/221] feat(api): changed SearchSession payload schema --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index ae3720624..54a7bf9d9 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -629,7 +629,7 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: Literal[SortOrderType] = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.desc) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) bookmarked: bool = Field(default=False) From 254202ba851fdc1d86bfd0762f47700fefad0a98 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 18 May 2022 20:02:09 +0200 Subject: [PATCH 151/221] feat(api): fixed changed SearchSession payload schema --- api/schemas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/schemas.py b/api/schemas.py index 54a7bf9d9..ff42fd7d3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -638,6 +638,7 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): def transform_order(cls, values): if values.get("order") is not None: values["order"] = values["order"].upper() + return values class Config: alias_generator = attribute_to_camel_case From c2ea4fb4b6d403d5b94bc12b440bef45013802d6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 May 2022 11:20:25 +0200 Subject: [PATCH 152/221] feat(api): metrics changed web vitals description feat(db): changed metric's monitoring essentials category to web vitals --- api/chalicelib/core/dashboards.py | 5 ++- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/init_schema.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/1.6.1/1.6.1.sql | 40 +++++++++---------- .../db/init_dbs/postgresql/init_schema.sql | 40 +++++++++---------- 5 files changed, 83 insertions(+), 82 deletions(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index bce5d3ad0..25dbdada3 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -6,8 +6,9 @@ from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC +# category name should be lower cased CATEGORY_DESCRIPTION = { - 'overview': 'High-level metrics and web vitals.', + 'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.', 'custom': 'Previously created custom metrics by me and my team.', 'errors': 'Keep a closer eye on errors and track their type, origin and domain.', 'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.', @@ -33,7 +34,7 @@ def get_templates(project_id, user_id): cur.execute(pg_query) rows = cur.fetchall() for r in rows: - r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "") + r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "") for w in r["widgets"]: w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index d8624d06d..325d419ba 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -16,102 +16,102 @@ CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 95f247af1..ec29b1dfc 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1269,102 +1269,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql index 6a4e151e9..4f1c7c28f 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql @@ -11,102 +11,102 @@ ALTER TABLE IF EXISTS dashboards INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index c3ee2fdb1..91a590688 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1060,102 +1060,102 @@ LANGUAGE plpgsql; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) -VALUES ('Captured sessions', 'Monitoring Essentials', '{ +VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_sessions', 'predefined', 'overview'), - ('Request Load Time', 'Monitoring Essentials', '{ + ('Request Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), - ('Page Load Time', 'Monitoring Essentials', '{ + ('Page Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), - ('Image Load Time', 'Monitoring Essentials', '{ + ('Image Load Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), - ('DOM Content Load Start', 'Monitoring Essentials', '{ + ('DOM Content Load Start', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), - ('First Meaningful paint', 'Monitoring Essentials', '{ + ('First Meaningful paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), - ('No. of Visited Pages', 'Monitoring Essentials', '{ + ('No. of Visited Pages', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), - ('Session Duration', 'Monitoring Essentials', '{ + ('Session Duration', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), - ('DOM Build Time', 'Monitoring Essentials', '{ + ('DOM Build Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), - ('Pages Response Time', 'Monitoring Essentials', '{ + ('Pages Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), - ('Response Time', 'Monitoring Essentials', '{ + ('Response Time', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_response_time', 'predefined', 'overview'), - ('First Paint', 'Monitoring Essentials', '{ + ('First Paint', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), - ('DOM Content Loaded', 'Monitoring Essentials', '{ + ('DOM Content Loaded', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), - ('Time Till First byte', 'Monitoring Essentials', '{ + ('Time Till First byte', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), - ('Time To Interactive', 'Monitoring Essentials', '{ + ('Time To Interactive', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), - ('Captured requests', 'Monitoring Essentials', '{ + ('Captured requests', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'count_requests', 'predefined', 'overview'), - ('Time To Render', 'Monitoring Essentials', '{ + ('Time To Render', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), - ('Memory Consumption', 'Monitoring Essentials', '{ + ('Memory Consumption', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), - ('CPU Load', 'Monitoring Essentials', '{ + ('CPU Load', 'web vitals', '{ "col": 1, "row": 1, "position": 0 }', true, true, true, 'avg_cpu', 'predefined', 'overview'), - ('Frame rate', 'Monitoring Essentials', '{ + ('Frame rate', 'web vitals', '{ "col": 1, "row": 1, "position": 0 From 5968b559344de23de52d9575709192b85170d733 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 31 May 2022 10:14:55 +0100 Subject: [PATCH 153/221] feat(api): refactored user-auth --- api/auth/auth_jwt.py | 18 ++++++++++-------- api/chalicelib/core/users.py | 14 +++++++------- ee/api/chalicelib/core/users.py | 14 +++++++------- 3 files changed, 24 insertions(+), 22 deletions(-) diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index 1ac8d5d79..4eff80789 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -19,10 +19,14 @@ class JWTAuth(HTTPBearer): if not credentials.scheme == "Bearer": raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + auth_exists = jwt_payload is not None \ + and users.auth_exists(user_id=jwt_payload.get("userId", -1), + tenant_id=jwt_payload.get("tenantId", -1), + jwt_iat=jwt_payload.get("iat", 100), + jwt_aud=jwt_payload.get("aud", "")) if jwt_payload is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ - or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): + or not auth_exists: print("JWTAuth: Token issue") if jwt_payload is not None: print(jwt_payload) @@ -34,21 +38,19 @@ class JWTAuth(HTTPBearer): print("JWTAuth: iat is None") if jwt_payload is not None and jwt_payload.get("aud") is None: print("JWTAuth: aud is None") - if jwt_payload is not None and \ - not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): + if jwt_payload is not None and not auth_exists: print("JWTAuth: not users.auth_exists") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") - user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"]) + user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) if user is None: print("JWTAuth: User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") jwt_payload["authorizer_identity"] = "jwt" print(jwt_payload) request.state.authorizer_identity = "jwt" - request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"], - user_id=jwt_payload["userId"], + request.state.currentContext = CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), + user_id=jwt_payload.get("userId", -1), email=user["email"]) return request.state.currentContext diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 40cc0f7db..3a4067f68 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -564,13 +564,13 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): {"userId": user_id}) ) r = cur.fetchone() - return r is not None \ - and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + return r is not None \ + and r.get("jwt_iat") is not None \ + and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ + or (jwt_aud.startswith("plugin") \ + and (r["changed_at"] is None \ + or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) + ) def authenticate(email, password, for_change_password=False, for_plugin=False): diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index cf2a808e7..5d28dc395 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -613,13 +613,13 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): {"userId": user_id, "tenant_id": tenant_id}) ) r = cur.fetchone() - return r is not None \ - and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + return r is not None \ + and r.get("jwt_iat") is not None \ + and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ + or (jwt_aud.startswith("plugin") \ + and (r["changed_at"] is None \ + or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) + ) def change_jwt_iat(user_id): From 10f26ab45cf437e03e8b4d977a55a6741fc43755 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 31 May 2022 13:46:13 +0100 Subject: [PATCH 154/221] feat(api): clean script --- ee/api/clean.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 59f723c80..861d1d9f1 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -31,7 +31,6 @@ rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions.py rm -rf ./chalicelib/core/sessions_assignments.py -rm -rf ./chalicelib/core/sessions_favorite_viewed.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py rm -rf ./chalicelib/core/significance.py From 81503030e4348059d07bebbec1574577670d569d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Jun 2022 19:51:42 +0100 Subject: [PATCH 155/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 236 +++++++++++++++--- 1 file changed, 195 insertions(+), 41 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index a8f90613d..385908163 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -1,7 +1,81 @@ ALTER TABLE sessions DROP COLUMN pages_count; -CREATE TABLE default.sessions_metadata_temp + +CREATE TABLE IF NOT EXISTS events_s +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, session_id); + +CREATE TABLE IF NOT EXISTS sessions_s ( session_id UInt64, project_id UInt32, @@ -16,8 +90,66 @@ CREATE TABLE default.sessions_metadata_temp user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + +-- CREATE TABLE IF NOT EXISTS sessions_meta +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- tracker_version String, +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os String, +-- user_os_version Nullable(String), +-- user_browser String, +-- user_browser_version Nullable(String), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- duration UInt32, +-- pages_count UInt16, +-- events_count UInt16, +-- errors_count UInt16, +-- utm_source Nullable(String), +-- utm_medium Nullable(String), +-- utm_campaign Nullable(String), +-- user_id Nullable(String), +-- metadata_1 Nullable(String), +-- metadata_2 Nullable(String), +-- metadata_3 Nullable(String), +-- metadata_4 Nullable(String), +-- metadata_5 Nullable(String), +-- metadata_6 Nullable(String), +-- metadata_7 Nullable(String), +-- metadata_8 Nullable(String), +-- metadata_9 Nullable(String), +-- metadata_10 Nullable(String), +-- _timestamp DateTime DEFAULT now() +-- ) ENGINE = ReplacingMergeTree(_timestamp) +-- PARTITION BY toYYYYMMDD(datetime) +-- ORDER BY (project_id, datetime, session_id) +-- TTL datetime + INTERVAL 1 MONTH +-- SETTINGS index_granularity = 512; + +CREATE TABLE IF NOT EXISTS metadata_s +( + session_id UInt64, + project_id UInt32, + datetime DateTime, user_id Nullable(String), - user_anonymous_id Nullable(String), metadata_1 Nullable(String), metadata_2 Nullable(String), metadata_3 Nullable(String), @@ -27,45 +159,67 @@ CREATE TABLE default.sessions_metadata_temp metadata_7 Nullable(String), metadata_8 Nullable(String), metadata_9 Nullable(String), - metadata_10 Nullable(String) -) ENGINE = MergeTree + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toDate(datetime) - ORDER BY (project_id, datetime) - TTL datetime + INTERVAL 1 MONTH; + ORDER BY (project_id, datetime, session_id); -INSERT INTO default.sessions_metadata_temp(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, - user_os_version, - user_browser, user_browser_version, user_device, user_device_type, - user_country, - datetime, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, - metadata_4, - metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) -SELECT session_id, - project_id, - tracker_version, - rev_id, - user_uuid, - user_os, - user_os_version, - user_browser, - user_browser_version, - user_device, - user_device_type, - user_country, - datetime, - user_id, - user_anonymous_id, - metadata_1, - metadata_2, - metadata_3, - metadata_4, - metadata_5, - metadata_6, - metadata_7, - metadata_8, - metadata_9, - metadata_10 -FROM default.sessions_metadata; +CREATE TABLE IF NOT EXISTS autocomplete +( + project_id UInt32 NOT NULL, + type LowCardinality(String) NOT NULL, + value String NOT NULL, + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id, type) + TTL _timestamp + INTERVAL 1 MONTH; -DROP TABLE default.sessions_metadata; -RENAME TABLE default.sessions_metadata_temp TO default.sessions_metadata; \ No newline at end of file +CREATE MATERIALIZED VIEW sessions_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT * +FROM massive_split.sessions_s +WHERE datetime >= now() - INTERVAL 1 DAY + AND isNotNull(duration) + AND duration > 0; + +CREATE MATERIALIZED VIEW events_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT *, now() AS _timestamp +FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 1 DAY; + +CREATE MATERIALIZED VIEW sessions_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT * +FROM massive_split.sessions_s +WHERE datetime >= now() - INTERVAL 7 DAY + AND isNotNull(duration) + AND duration > 0; + +CREATE MATERIALIZED VIEW events_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT *, now() AS _timestamp +FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file From e92f14dc17f3558b5582b0132aa4de758016100f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Jun 2022 12:37:52 +0100 Subject: [PATCH 156/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 385908163..0339fd4b8 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -222,4 +222,26 @@ CREATE MATERIALIZED VIEW events_l7d_mv AS SELECT *, now() AS _timestamp FROM massive_split.events_s +WHERE datetime >= now() - INTERVAL 7 DAY; + +CREATE MATERIALIZED VIEW metadata_l24h_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 DAY + POPULATE +AS +SELECT * +FROM massive_split.metadata_s +WHERE datetime >= now() - INTERVAL 1 DAY; + +CREATE MATERIALIZED VIEW metadata_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT * +FROM massive_split.metadata_s WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file From d86ca3c7ec19e7251d8d85f009841bdb3b112aae Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Jun 2022 16:56:37 +0100 Subject: [PATCH 157/221] feat(db): EE CH new structure --- .../db/init_dbs/clickhouse/1.6.1/1.6.1.sql | 223 +- .../db/init_dbs/clickhouse/1.6.1/fill.sql | 2878 +++++++++++++++++ .../db/init_dbs/clickhouse/1.6.1/queries.sql | 983 ++++++ 3 files changed, 3978 insertions(+), 106 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql index 0339fd4b8..f6ba9d751 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/1.6.1.sql @@ -2,6 +2,25 @@ ALTER TABLE sessions DROP COLUMN pages_count; +CREATE TABLE projects_metadata +( + project_id UInt32, + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id) + SETTINGS index_granularity = 512; + CREATE TABLE IF NOT EXISTS events_s ( session_id UInt64, @@ -12,7 +31,7 @@ CREATE TABLE IF NOT EXISTS events_s hesitation_time Nullable(UInt32), name Nullable(String), payload Nullable(String), - level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), message Nullable(String), error_id Nullable(String), @@ -70,85 +89,35 @@ CREATE TABLE IF NOT EXISTS events_s compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), success Nullable(UInt8), method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), - status Nullable(UInt16) + status Nullable(UInt16), + _timestamp DateTime DEFAULT now() ) ENGINE = MergeTree PARTITION BY toYYYYMM(datetime) - ORDER BY (project_id, datetime, event_type, session_id); + ORDER BY (project_id, datetime, event_type, session_id) + TTL datetime + INTERVAL 1 MONTH; -CREATE TABLE IF NOT EXISTS sessions_s +CREATE TABLE IF NOT EXISTS sessions ( - session_id UInt64, - project_id UInt32, - tracker_version String, - rev_id Nullable(String), - user_uuid UUID, - user_os String, - user_os_version Nullable(String), - user_browser String, - user_browser_version Nullable(String), + session_id UInt64, + project_id UInt32, + tracker_version LowCardinality(String), + rev_id LowCardinality(Nullable(String)), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), user_device Nullable(String), user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), - datetime DateTime, - duration UInt32, - pages_count UInt16, - events_count UInt16, - errors_count UInt16, + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, utm_source Nullable(String), utm_medium Nullable(String), utm_campaign Nullable(String), - _timestamp DateTime DEFAULT now() -) ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 MONTH - SETTINGS index_granularity = 512; - --- CREATE TABLE IF NOT EXISTS sessions_meta --- ( --- session_id UInt64, --- project_id UInt32, --- tracker_version String, --- rev_id Nullable(String), --- user_uuid UUID, --- user_os String, --- user_os_version Nullable(String), --- user_browser String, --- user_browser_version Nullable(String), --- user_device Nullable(String), --- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), --- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), --- datetime DateTime, --- duration UInt32, --- pages_count UInt16, --- events_count UInt16, --- errors_count UInt16, --- utm_source Nullable(String), --- utm_medium Nullable(String), --- utm_campaign Nullable(String), --- user_id Nullable(String), --- metadata_1 Nullable(String), --- metadata_2 Nullable(String), --- metadata_3 Nullable(String), --- metadata_4 Nullable(String), --- metadata_5 Nullable(String), --- metadata_6 Nullable(String), --- metadata_7 Nullable(String), --- metadata_8 Nullable(String), --- metadata_9 Nullable(String), --- metadata_10 Nullable(String), --- _timestamp DateTime DEFAULT now() --- ) ENGINE = ReplacingMergeTree(_timestamp) --- PARTITION BY toYYYYMMDD(datetime) --- ORDER BY (project_id, datetime, session_id) --- TTL datetime + INTERVAL 1 MONTH --- SETTINGS index_granularity = 512; - -CREATE TABLE IF NOT EXISTS metadata_s -( - session_id UInt64, - project_id UInt32, - datetime DateTime, user_id Nullable(String), metadata_1 Nullable(String), metadata_2 Nullable(String), @@ -160,10 +129,12 @@ CREATE TABLE IF NOT EXISTS metadata_s metadata_8 Nullable(String), metadata_9 Nullable(String), metadata_10 Nullable(String), - _timestamp DateTime DEFAULT now() + _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toDate(datetime) - ORDER BY (project_id, datetime, session_id); + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; CREATE TABLE IF NOT EXISTS autocomplete ( @@ -176,35 +147,13 @@ CREATE TABLE IF NOT EXISTS autocomplete ORDER BY (project_id, type) TTL _timestamp + INTERVAL 1 MONTH; -CREATE MATERIALIZED VIEW sessions_l24h_mv - ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY - POPULATE -AS -SELECT * -FROM massive_split.sessions_s -WHERE datetime >= now() - INTERVAL 1 DAY - AND isNotNull(duration) - AND duration > 0; - -CREATE MATERIALIZED VIEW events_l24h_mv - ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMM(datetime) - ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY - POPULATE -AS -SELECT *, now() AS _timestamp -FROM massive_split.events_s -WHERE datetime >= now() - INTERVAL 1 DAY; CREATE MATERIALIZED VIEW sessions_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 7 DAY + SETTINGS index_granularity = 512 POPULATE AS SELECT * @@ -220,28 +169,90 @@ CREATE MATERIALIZED VIEW events_l7d_mv TTL datetime + INTERVAL 7 DAY POPULATE AS -SELECT *, now() AS _timestamp +SELECT * FROM massive_split.events_s WHERE datetime >= now() - INTERVAL 7 DAY; -CREATE MATERIALIZED VIEW metadata_l24h_mv + +CREATE MATERIALIZED VIEW sessions_info_l1m_mv ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMMDD(datetime) + PARTITION BY toYYYYMM(datetime) ORDER BY (project_id, datetime, session_id) - TTL datetime + INTERVAL 1 DAY + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512 POPULATE AS -SELECT * -FROM massive_split.metadata_s -WHERE datetime >= now() - INTERVAL 1 DAY; +SELECT project_id, + session_id, + datetime, + now() AS _timestamp, + toJSONString(map('project_id', toString(project_id), + 'session_id', toString(session_id), + 'user_uuid', toString(user_uuid), + 'user_id', user_id, + 'user_os', user_os, + 'user_browser', user_browser, + 'user_device', user_device, + --'user_device_type', user_device_type, +--'user_country', user_country, + 'start_ts', toString(datetime), + 'duration', toString(duration), + 'events_count', toString(events_count), + 'pages_count', toString(pages_count), + 'errors_count', toString(errors_count), + -- 'user_anonymous_id', user_anonymous_id, +-- 'platform', platform, +-- 'issue_score', issue_score, +-- issue_types, +-- favorite, +-- viewed, + 'metadata', CAST((arrayFilter(x->isNotNull(x), + arrayMap( + x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[1]), + NULL), + [ + [projects_meta.metadata_1,sessions.metadata_1], + [projects_meta.metadata_2,sessions.metadata_2], + [projects_meta.metadata_3,sessions.metadata_3], + [projects_meta.metadata_4,sessions.metadata_4], + [projects_meta.metadata_5,sessions.metadata_5], + [projects_meta.metadata_6,sessions.metadata_6], + [projects_meta.metadata_7,sessions.metadata_7], + [projects_meta.metadata_8,sessions.metadata_8], + [projects_meta.metadata_9,sessions.metadata_9], + [projects_meta.metadata_10,sessions.metadata_10] + ])), + arrayFilter(x->isNotNull(x), + arrayMap( + x->if(isNotNull(x[1]) AND isNotNull(x[2]), toString(x[2]), + NULL), + [ + [projects_meta.metadata_1,sessions.metadata_1], + [projects_meta.metadata_2,sessions.metadata_2], + [projects_meta.metadata_3,sessions.metadata_3], + [projects_meta.metadata_4,sessions.metadata_4], + [projects_meta.metadata_5,sessions.metadata_5], + [projects_meta.metadata_6,sessions.metadata_6], + [projects_meta.metadata_7,sessions.metadata_7], + [projects_meta.metadata_8,sessions.metadata_8], + [projects_meta.metadata_9,sessions.metadata_9], + [projects_meta.metadata_10,sessions.metadata_10] + ]))), 'Map(String,String)') + )) AS info +FROM massive_split.sessions + INNER JOIN projects_metadata USING (project_id) +WHERE datetime >= now() - INTERVAL 1 MONTH + AND isNotNull(duration) + AND duration > 0; -CREATE MATERIALIZED VIEW metadata_l7d_mv +CREATE MATERIALIZED VIEW sessions_info_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, session_id) TTL datetime + INTERVAL 7 DAY + SETTINGS index_granularity = 512 POPULATE AS SELECT * -FROM massive_split.metadata_s -WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file +FROM sessions_info_l1m_mv +WHERE datetime >= now() - INTERVAL 7 DAY; diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql new file mode 100644 index 000000000..e22b73848 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/fill.sql @@ -0,0 +1,2878 @@ +-- CREATE TABLE IF NOT EXISTS single_t.events +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), +-- tracker_version LowCardinality(String), +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os LowCardinality(String), +-- user_os_version LowCardinality(Nullable(String)), +-- user_browser LowCardinality(String), +-- user_browser_version LowCardinality(Nullable(String)), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- label Nullable(String), +-- hesitation_time Nullable(UInt32), +-- name Nullable(String), +-- payload Nullable(String), +-- level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), +-- source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), +-- message Nullable(String), +-- error_id Nullable(String), +-- duration Nullable(UInt16), +-- context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), +-- container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), +-- container_id Nullable(String), +-- container_name Nullable(String), +-- container_src Nullable(String), +-- url Nullable(String), +-- url_host Nullable(String) MATERIALIZED lower(domain(url)), +-- url_path Nullable(String) MATERIALIZED lower(pathFull(url)), +-- request_start Nullable(UInt16), +-- response_start Nullable(UInt16), +-- response_end Nullable(UInt16), +-- dom_content_loaded_event_start Nullable(UInt16), +-- dom_content_loaded_event_end Nullable(UInt16), +-- load_event_start Nullable(UInt16), +-- load_event_end Nullable(UInt16), +-- first_paint Nullable(UInt16), +-- first_contentful_paint Nullable(UInt16), +-- speed_index Nullable(UInt16), +-- visually_complete Nullable(UInt16), +-- time_to_interactive Nullable(UInt16), +-- ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), +-- minus(response_start, request_start), Null), +-- ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), +-- minus(response_end, request_start), Null), +-- response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), +-- minus(response_end, response_start), Null), +-- dom_building_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_start, response_end), +-- minus(dom_content_loaded_event_start, response_end), Null), +-- dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), +-- minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), +-- load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), +-- minus(load_event_end, load_event_start), Null), +-- min_fps Nullable(UInt8), +-- avg_fps Nullable(UInt8), +-- max_fps Nullable(UInt8), +-- min_cpu Nullable(UInt8), +-- avg_cpu Nullable(UInt8), +-- max_cpu Nullable(UInt8), +-- min_total_js_heap_size Nullable(UInt64), +-- avg_total_js_heap_size Nullable(UInt64), +-- max_total_js_heap_size Nullable(UInt64), +-- min_used_js_heap_size Nullable(UInt64), +-- avg_used_js_heap_size Nullable(UInt64), +-- max_used_js_heap_size Nullable(UInt64), +-- type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), +-- header_size Nullable(UInt16), +-- encoded_body_size Nullable(UInt32), +-- decoded_body_size Nullable(UInt32), +-- compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), +-- success Nullable(UInt8), +-- method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), +-- status Nullable(UInt16) +-- ) ENGINE = MergeTree +-- PARTITION BY toDate(datetime) +-- ORDER BY (project_id, datetime); +-- -- TTL datetime + INTERVAL 1 MONTH; +-- DROP TABLE single_t.events; +-- +-- INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label, hesitation_time, name, payload, level, source, message, +-- error_id, duration, context, container_type, container_id, container_name, container_src, +-- url, request_start, response_start, response_end, dom_content_loaded_event_start, +-- dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, +-- first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, +-- avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, +-- max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, +-- type, header_size, encoded_body_size, decoded_body_size, success, method, status) +-- SELECT session_id, +-- project_id, +-- event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time, +-- name, +-- payload, +-- level, +-- source, +-- message, +-- error_id, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size, +-- type, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM ( +-- SELECT session_id, +-- project_id, +-- 'CLICK' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM clicks +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'ERROR' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- message, +-- error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM errors +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'INPUT' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM inputs +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'LONGTASK' event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src, +-- null AS url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM longtasks +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'PAGE' event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- null AS duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- null AS type, +-- null AS header_size, +-- null AS encoded_body_size, +-- null AS decoded_body_size, +-- null AS success, +-- null AS method, +-- null AS status +-- FROM pages +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'PERFORMANCE' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label label, +-- null AS label hesitation_time, +-- null AS label name, +-- null AS label payload, +-- null AS label level, +-- null AS label source, +-- null AS label message, +-- null AS label error_id, +-- null AS label duration, +-- null AS label context, +-- null AS label container_type, +-- null AS label container_id, +-- null AS label container_name, +-- null AS label container_src, +-- null AS labelurl, +-- null AS label request_start, +-- null AS label response_start, +-- null AS label response_end, +-- null AS label dom_content_loaded_event_start, +-- null AS label dom_content_loaded_event_end, +-- null AS label load_event_startnull, +-- null AS label load_event_end, +-- null AS label first_paint, +-- null AS label first_contentful_paint, +-- null AS label speed_index, +-- null AS label visually_complete, +-- null AS label time_to_interactive, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size, +-- null AS label type, +-- null AS label header_size, +-- null AS label encoded_body_size, +-- null AS label decoded_body_size, +-- null AS label success, +-- null AS label method, +-- null AS label status +-- FROM performance +-- UNION ALL +-- SELECT session_id, +-- project_id, +-- 'RESOURCE' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- null AS label, +-- null AS hesitation_time, +-- null AS name, +-- null AS payload, +-- null AS level, +-- null AS source, +-- null AS message, +-- null AS error_id, +-- duration, +-- null AS context, +-- null AS container_type, +-- null AS container_id, +-- null AS container_name, +-- null AS container_src, +-- url, +-- null AS request_start, +-- null AS response_start, +-- null AS response_end, +-- null AS dom_content_loaded_event_start, +-- null AS dom_content_loaded_event_end, +-- null AS load_event_start, +-- null AS load_event_end, +-- null AS first_paint, +-- null AS first_contentful_paint, +-- null AS speed_index, +-- null AS visually_complete, +-- null AS time_to_interactive, +-- null AS min_fps, +-- null AS avg_fps, +-- null AS max_fps, +-- null AS min_cpu, +-- null AS avg_cpu, +-- null AS max_cpu, +-- null AS min_total_js_heap_size, +-- null AS avg_total_js_heap_size, +-- null AS max_total_js_heap_size, +-- null AS min_used_js_heap_size, +-- null AS avg_used_js_heap_size, +-- null AS max_used_js_heap_size, +-- type, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM resources); +-- +-- +-- INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label, hesitation_time) +-- SELECT session_id, +-- project_id, +-- 'CLICK' AS event_type, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label, +-- hesitation_time +-- FROM clicks; +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, +-- user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, +-- source, name, message, error_id) +-- +-- SELECT 'ERROR' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- source, +-- name, +-- message, +-- error_id +-- FROM errors; +-- +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, label) +-- +-- SELECT 'INPUT' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- label +-- FROM inputs; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, duration, context, container_type, container_id, container_name, +-- container_src) +-- SELECT 'LONGTASK' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- duration, +-- context, +-- container_type, +-- container_id, +-- container_name, +-- container_src +-- FROM longtasks; +-- +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, url, request_start, response_start, response_end, +-- dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, +-- load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, +-- time_to_interactive) +-- SELECT 'PAGE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- url, +-- request_start, +-- response_start, +-- response_end, +-- dom_content_loaded_event_start, +-- dom_content_loaded_event_end, +-- load_event_start, +-- load_event_end, +-- first_paint, +-- first_contentful_paint, +-- speed_index, +-- visually_complete, +-- time_to_interactive +-- FROM pages; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, +-- min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, +-- min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size) +-- SELECT 'PERFORMANCE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- min_fps, +-- avg_fps, +-- max_fps, +-- min_cpu, +-- avg_cpu, +-- max_cpu, +-- min_total_js_heap_size, +-- avg_total_js_heap_size, +-- max_total_js_heap_size, +-- min_used_js_heap_size, +-- avg_used_js_heap_size, +-- max_used_js_heap_size +-- FROM performance; +-- +-- INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, +-- user_os_version, user_browser, user_browser_version, user_device, user_device_type, +-- user_country, datetime, url, type, duration, header_size, encoded_body_size, +-- decoded_body_size, success, method, status) +-- SELECT 'RESOURCE' AS event_type, +-- session_id, +-- project_id, +-- tracker_version, +-- rev_id, +-- user_uuid, +-- user_os, +-- user_os_version, +-- user_browser, +-- user_browser_version, +-- user_device, +-- user_device_type, +-- user_country, +-- datetime, +-- url, +-- type, +-- duration, +-- header_size, +-- encoded_body_size, +-- decoded_body_size, +-- success, +-- method, +-- status +-- FROM resources; +-- +-- +-- SELECT table, formatReadableSize(size) as size, rows, days, formatReadableSize(avgDaySize) as avgDaySize +-- FROM ( +-- SELECT table, +-- sum(bytes) AS size, +-- sum(rows) AS rows, +-- min(min_date) AS min_date, +-- max(max_date) AS max_date, +-- (max_date - min_date) AS days, +-- size / (max_date - min_date) AS avgDaySize +-- FROM system.parts +-- WHERE active +-- GROUP BY table +-- ORDER BY rows DESC +-- ); +-- +-- SELECT database, +-- table, +-- formatReadableSize(sum(bytes)) as size, +-- min(min_date) as min_date, +-- max(max_date) as max_date +-- FROM system.parts +-- WHERE active +-- GROUP BY database, table; +-- +-- SELECT count(*) +-- FROM single_t.events; +-- -- 449 484 932 +-- -- 449 484 932 +-- +-- SELECT (SELECT count(*) FROM clicks) + (SELECT count(*) FROM inputs) + (SELECT count(*) FROM longtasks) + +-- (SELECT count(*) FROM errors) + (SELECT count(*) FROM pages) + (SELECT count(*) FROM resources) + +-- (SELECT count(*) FROM performance) AS totl; +-- +-- +-- +-- CREATE TABLE IF NOT EXISTS single_t.events3 +-- ( +-- session_id UInt64, +-- project_id UInt32, +-- event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), +-- tracker_version LowCardinality(String), +-- rev_id Nullable(String), +-- user_uuid UUID, +-- user_os LowCardinality(String), +-- user_os_version LowCardinality(Nullable(String)), +-- user_browser LowCardinality(String), +-- user_browser_version LowCardinality(Nullable(String)), +-- user_device Nullable(String), +-- user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), +-- user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), +-- datetime DateTime, +-- label Nullable(String), +-- hesitation_time Nullable(UInt32), +-- name Nullable(String), +-- payload Nullable(String), +-- level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), +-- source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), +-- message Nullable(String), +-- error_id Nullable(String), +-- duration Nullable(UInt16), +-- context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), +-- container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), +-- container_id Nullable(String), +-- container_name Nullable(String), +-- container_src Nullable(String), +-- url Nullable(String), +-- url_host Nullable(String) MATERIALIZED lower(domain(url)), +-- url_path Nullable(String) MATERIALIZED lower(pathFull(url)), +-- request_start Nullable(UInt16), +-- response_start Nullable(UInt16), +-- response_end Nullable(UInt16), +-- dom_content_loaded_event_start Nullable(UInt16), +-- dom_content_loaded_event_end Nullable(UInt16), +-- load_event_start Nullable(UInt16), +-- load_event_end Nullable(UInt16), +-- first_paint Nullable(UInt16), +-- first_contentful_paint Nullable(UInt16), +-- speed_index Nullable(UInt16), +-- visually_complete Nullable(UInt16), +-- time_to_interactive Nullable(UInt16), +-- ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), +-- minus(response_start, request_start), Null), +-- ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), +-- minus(response_end, request_start), Null), +-- response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), +-- minus(response_end, response_start), Null), +-- dom_building_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_start, response_end), +-- minus(dom_content_loaded_event_start, response_end), Null), +-- dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( +-- greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), +-- minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), +-- load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), +-- minus(load_event_end, load_event_start), Null), +-- min_fps Nullable(UInt8), +-- avg_fps Nullable(UInt8), +-- max_fps Nullable(UInt8), +-- min_cpu Nullable(UInt8), +-- avg_cpu Nullable(UInt8), +-- max_cpu Nullable(UInt8), +-- min_total_js_heap_size Nullable(UInt64), +-- avg_total_js_heap_size Nullable(UInt64), +-- max_total_js_heap_size Nullable(UInt64), +-- min_used_js_heap_size Nullable(UInt64), +-- avg_used_js_heap_size Nullable(UInt64), +-- max_used_js_heap_size Nullable(UInt64), +-- type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), +-- header_size Nullable(UInt16), +-- encoded_body_size Nullable(UInt32), +-- decoded_body_size Nullable(UInt32), +-- compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), +-- success Nullable(UInt8), +-- method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), +-- status Nullable(UInt16) +-- ) ENGINE = MergeTree +-- PARTITION BY toDate(datetime) +-- ORDER BY (project_id, datetime,event_type); +-- +-- -- INSERT INTO eng_t.events42(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, label, hesitation_time, name, payload, level, source, message, error_id, duration, context, container_type, container_id, container_name, container_src, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, decoded_body_size, success, method, status) +-- -- SELECT session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, label, hesitation_time, name, payload, level, source, message, error_id, duration, context, container_type, container_id, container_name, container_src, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, decoded_body_size, success, method, status FROM single_t.events; + +CREATE TABLE IF NOT EXISTS single_t.events3 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(BOOLEAN), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, event_type); + +INSERT INTO eng_t.events4(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time) +SELECT session_id, + project_id, + 'CLICK' AS event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time +FROM clicks +WHERE mod(session_id, 2) = 1; + + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, datetime, + source, name, message, error_id) + +SELECT 'ERROR' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id +FROM errors +WHERE mod(session_id, 2) = 0; +TRUNCATE TABLE eng_t.events4; +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label) + +SELECT 'INPUT' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label +FROM inputs +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src) +SELECT 'LONGTASK' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src +FROM longtasks +WHERE mod(session_id, 2) = 0; + + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive) +SELECT 'PAGE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive +FROM pages +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size) +SELECT 'PERFORMANCE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size +FROM performance +WHERE mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status) +SELECT 'RESOURCE' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status +FROM resources +WHERE type != 'fetch' + AND mod(session_id, 2) = 0; + +INSERT INTO eng_t.events4(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status) +SELECT 'REQUEST' AS event_type, + session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status +FROM resources +WHERE type = 'fetch' + AND mod(session_id, 2) = 0; + +CREATE TABLE IF NOT EXISTS eng_t.events4 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(BOOLEAN), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = Join(ALL, INNER, session_id); + +TRUNCATE TABLE eng_t.events4; + + +-- merge metadata with events +CREATE DATABASE full_meerge; + +CREATE TABLE IF NOT EXISTS massive.events6 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime); +-- TTL datetime + INTERVAL 1 MONTH; +INSERT INTO massive.events6(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10) +SELECT session_id + 6651141467121565 * 3 AS session_id, + project_id, + 'CLICK' AS event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.clicks + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, + datetime, source, name, message, error_id, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) + +SELECT 'ERROR' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.errors + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) + +SELECT 'INPUT' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.inputs + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'LONGTASK' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.longtasks + LEFT JOIN default.sessions_metadata USING (session_id); + + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'PAGE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.pages + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'PERFORMANCE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.performance + LEFT JOIN default.sessions_metadata USING (session_id); + +INSERT INTO massive.events6(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'RESOURCE' AS event_type, + session_id + 6651141467121565 * 3 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.resources + LEFT JOIN default.sessions_metadata USING (session_id) +WHERE type != 'fetch'; + +INSERT INTO massive2.events7(event_type, session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, + metadata_6, metadata_7, metadata_8, metadata_9, metadata_10) +SELECT 'REQUEST' AS event_type, + session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + if(status IS NOT NULL, status = 1, null) AS status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM default.resources + LEFT JOIN default.sessions_metadata USING (session_id) +WHERE type = 'fetch' + AND mod(project_id, 2) = 0; + +-- -- TO GENERATE RANDOM USER IDS +-- INSERT INTO sessions_metadata(session_id, user_id, datetime, project_id, user_device_type) +-- SELECT session_id, +-- arrayElement( +-- array('Ze2wc7lvYi', 'NYd7m0Ytg8', 'qgNpvEkXap', 'wvWqM4Ow2G', 'n5Y6DK7ZdP', 'uW4SEYjXxI', 't4EfJiNxk9', +-- 'qWQ8WuIRLS', 'fnRWCwkFyB', '8wf298MFWR', 'G3A3DL0Fdd', 'cQcZHNNiAJ', 'MKcW2adQ38', 'OBzk9EFxVe', +-- '8SBiqoFail', '3Wh9Ur0eOr', 'z6KuuxiPXX', '7j4HaReEsF', 'Ros0kDOVeV', 'PvHi3cBkgV', 'HLjUo6oBlJ', +-- '4Tmi34faA0', 'O9ZATbPjaB', '7ATvuWQCIH', 'kXW4LHnW5X', 'HIHc9TTyTc', 'i5p9jRe7I0', '7dRnUEFoZO', +-- 'u3PDLkI5uG', 'HTYjxmDJCG', '6hKHjcKniO', 'qmPNUWgDIx', 'RfoN9oeYZD', 'HHXpBaYm3k', 'VdpZDfnL9J', +-- 'Qfwa1dPrrF', 'cgdD2GfFVT', 'iRvT6l7qj3', 'QokprB2GMV', 'umqISqbncX', '7bvRdQ4al3', 'VGKZAUIRjy', +-- 'SNTEGLKbCD', 'zfUaVSD8Jn', 'De7zUojKNt', 'lXiotVRkil', 'bQaDX5kESw', 'tngESCaH6I', 'uucUZvTpPd', +-- 'BFJpni8D3I'), mod(session_id, 50)) AS user_id, +-- datetime, +-- project_id, +-- user_device_type +-- FROM sessions +-- WHERE project_id = 2460; + +INSERT INTO massive2.sessions2(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, + datetime, + duration, events_count, errors_count, utm_source, utm_medium, utm_campaign) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign +FROM default.sessions; + + +CREATE DATABASE massive2; +CREATE TABLE IF NOT EXISTS massive2.events7 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, event_type); + + + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 43 +FROM massive.events6 +WHERE event_type = 'REQUEST' + AND mod(project_id, 2) = 0; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 42 +FROM massive.events6 +WHERE event_type = 'REQUEST' + AND mod(project_id, 2) = 1; + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 41 +FROM massive.events6 +WHERE event_type = 'RESOURCE' + AND mod(project_id, 2) = 0; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, type, duration, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + type, + duration, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 40 +FROM massive.events6 +WHERE event_type = 'RESOURCE' + AND mod(project_id, 2) = 1; + +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'CLICK'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, user_id, + user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, + metadata_7, metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'PERFORMANCE'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, + metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, + riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'PAGE'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, user_id, user_anonymous_id, metadata_1, metadata_2, + metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, + metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'INPUT'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, source, name, message, error_id, user_id, user_anonymous_id, + metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, + metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + source, + name, + message, + error_id, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'ERROR'; +INSERT INTO massive2.events7(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, context, container_type, container_id, container_name, + container_src, user_id, user_anonymous_id, metadata_1, metadata_2, metadata_3, metadata_4, + metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + context, + container_type, + container_id, + container_name, + container_src, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 4 +FROM massive.events6 +WHERE event_type = 'LONGTASK'; + +ALTER TABLE massive2.events7 + ADD COLUMN riteration UInt8 DEFAULT 0; +ALTER TABLE massive2.sessions2 + ADD COLUMN riteration UInt8 DEFAULT 0; + + + +INSERT INTO massive2.sessions2(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, user_os_version, + user_browser, user_browser_version, user_device, user_device_type, user_country, + datetime, + duration, events_count, errors_count, utm_source, utm_medium, utm_campaign, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + 4 +FROM massive.sessions; + +SELECT COUNT(*) +FROM massive2.events7; + +CREATE DATABASE massive30; +CREATE TABLE IF NOT EXISTS massive30.events30 +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + tracker_version LowCardinality(String), + rev_id Nullable(String), + user_uuid UUID, + user_os LowCardinality(String), + user_os_version LowCardinality(Nullable(String)), + user_browser LowCardinality(String), + user_browser_version LowCardinality(Nullable(String)), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16), + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type); + +ALTER TABLE massive30.events30 + ADD COLUMN riteration UInt8; + +INSERT INTO massive30.events30(session_id, project_id, event_type, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, label, hesitation_time, name, payload, level, source, message, + error_id, duration, context, container_type, container_id, container_name, container_src, + url, request_start, response_start, response_end, dom_content_loaded_event_start, + dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, + first_contentful_paint, speed_index, visually_complete, time_to_interactive, min_fps, + avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, + avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, + avg_used_js_heap_size, max_used_js_heap_size, type, header_size, encoded_body_size, + decoded_body_size, success, method, status, user_id, user_anonymous_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, + metadata_9, metadata_10, riteration) +SELECT session_id, + project_id, + event_type, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + label, + hesitation_time, + name, + payload, + level, + source, + message, + error_id, + duration, + context, + container_type, + container_id, + container_name, + container_src, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + type, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status, + user_id, + user_anonymous_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10, + 9 AS riteration +FROM massive2.events7 +WHERE mod(project_id, 10) = 9; +-- ORDER BY datetime LIMIT 500000; + +DROP TABLE massive30.events30; + + +DESCRIBE TABLE massive2.events7; + + +-- ----------------------------------------------------- +CREATE DATABASE massive_split; +CREATE TABLE IF NOT EXISTS massive_split.events_s +( + session_id UInt64, + project_id UInt32, + event_type Enum8('CLICK'=0, 'INPUT'=1, 'PAGE'=2,'RESOURCE'=3,'REQUEST'=4,'PERFORMANCE'=5,'LONGTASK'=6,'ERROR'=7,'CUSTOM'=8), + datetime DateTime, + label Nullable(String), + hesitation_time Nullable(UInt32), + name Nullable(String), + payload Nullable(String), + level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null), + source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)), + message Nullable(String), + error_id Nullable(String), + duration Nullable(UInt16), + context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), + container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), + container_id Nullable(String), + container_name Nullable(String), + container_src Nullable(String), + url Nullable(String), + url_host Nullable(String) MATERIALIZED lower(domain(url)), + url_path Nullable(String) MATERIALIZED lower(pathFull(url)), + request_start Nullable(UInt16), + response_start Nullable(UInt16), + response_end Nullable(UInt16), + dom_content_loaded_event_start Nullable(UInt16), + dom_content_loaded_event_end Nullable(UInt16), + load_event_start Nullable(UInt16), + load_event_end Nullable(UInt16), + first_paint Nullable(UInt16), + first_contentful_paint Nullable(UInt16), + speed_index Nullable(UInt16), + visually_complete Nullable(UInt16), + time_to_interactive Nullable(UInt16), + ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start), + minus(response_start, request_start), Null), + ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start), + minus(response_end, request_start), Null), + response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start), + minus(response_end, response_start), Null), + dom_building_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_start, response_end), + minus(dom_content_loaded_event_start, response_end), Null), + dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if( + greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start), + minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null), + load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start), + minus(load_event_end, load_event_start), Null), + min_fps Nullable(UInt8), + avg_fps Nullable(UInt8), + max_fps Nullable(UInt8), + min_cpu Nullable(UInt8), + avg_cpu Nullable(UInt8), + max_cpu Nullable(UInt8), + min_total_js_heap_size Nullable(UInt64), + avg_total_js_heap_size Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + min_used_js_heap_size Nullable(UInt64), + avg_used_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), + type Nullable(Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4)), + header_size Nullable(UInt16), + encoded_body_size Nullable(UInt32), + decoded_body_size Nullable(UInt32), + compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size), + success Nullable(UInt8), + method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)), + status Nullable(UInt16) +) ENGINE = MergeTree + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, session_id); + +CREATE TABLE IF NOT EXISTS massive_split.sessions_s +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + + +CREATE TABLE IF NOT EXISTS massive_split.metadata_s +( + session_id UInt64, + project_id UInt32, + datetime DateTime, + user_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime, session_id); + +INSERT INTO massive_split.sessions_s(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, user_device_type, + user_country, datetime, duration, pages_count, events_count, errors_count, + utm_source, utm_medium, utm_campaign, riteration) +SELECT session_id + 6651141467121565 * 4 AS session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + mod(rand(), 100) AS pages_count, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + 4 AS riteration +FROM default.sessions; + +ALTER TABLE massive_split.sessions_s + ADD COLUMN riteration UInt8; + +INSERT INTO massive_split.metadata_s(session_id, project_id, datetime, user_id, metadata_1, metadata_2, metadata_3, + metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, + metadata_10) +SELECT session_id, + project_id, + datetime, + user_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM massive2.events7 AS s +LIMIT 1 BY session_id; + +INSERT INTO massive_split.events_s(session_id, project_id, event_type, datetime, label, hesitation_time, name, payload, + level, source, message, error_id, duration, context, container_type, container_id, + container_name, container_src, url, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, + load_event_end, first_paint, first_contentful_paint, speed_index, visually_complete, + time_to_interactive, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, + min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, type, + header_size, encoded_body_size, decoded_body_size, success, method, status) +SELECT session_id, + project_id, + event_type, + datetime, + label, + hesitation_time, + name, + payload, + level, + source, + message, + error_id, + duration, + context, + container_type, + container_id, + container_name, + container_src, + url, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint, + speed_index, + visually_complete, + time_to_interactive, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + type, + header_size, + encoded_body_size, + decoded_body_size, + success, + method, + status +FROM massive2.events7; + +SELECT COUNT(*) +FROM massive_split.sessions_s; +SELECT COUNT(*) +FROM massive_split.metadata_s; +SELECT COUNT(*) +FROM massive_split.events_s; +SELECT COUNT(*) +FROM massive2.events7; +-- SELECT COUNT(*) FROM massive2.sessions2; + + +CREATE TABLE IF NOT EXISTS massive_split.sessions_meta +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + duration UInt32, + pages_count UInt16, + events_count UInt16, + errors_count UInt16, + utm_source Nullable(String), + utm_medium Nullable(String), + utm_campaign Nullable(String), + user_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, session_id) + TTL datetime + INTERVAL 1 MONTH + SETTINGS index_granularity = 512; + +INSERT INTO massive_split.sessions_meta(session_id, project_id, tracker_version, rev_id, user_uuid, user_os, + user_os_version, user_browser, user_browser_version, user_device, + user_device_type, user_country, datetime, duration, pages_count, events_count, + errors_count, utm_source, utm_medium, utm_campaign, user_id, metadata_1, + metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, + metadata_8, metadata_9, metadata_10) +SELECT session_id, + project_id, + tracker_version, + rev_id, + user_uuid, + user_os, + user_os_version, + user_browser, + user_browser_version, + user_device, + user_device_type, + user_country, + datetime, + duration, + pages_count, + events_count, + errors_count, + utm_source, + utm_medium, + utm_campaign, + user_id, + metadata_1, + metadata_2, + metadata_3, + metadata_4, + metadata_5, + metadata_6, + metadata_7, + metadata_8, + metadata_9, + metadata_10 +FROM massive_split.sessions_s AS s + LEFT JOIN massive_split.metadata_s AS m ON (s.project_id = m.project_id AND s.session_id = m.session_id); diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql new file mode 100644 index 000000000..556209c79 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.6.1/queries.sql @@ -0,0 +1,983 @@ +-- Q1 +SELECT session_id +-- FROM massive2.events7 +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + + +-- Q1.1 +SELECT session_id +FROM massive2.events7 +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.2 +SELECT session_id +FROM +-- massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.2.1 +SELECT session_id +FROM +-- massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) +events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) +WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.3 +SELECT session_id +FROM +-- massive_split.events_s +-- events_l7d_mv +events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.4 +SELECT session_id +FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q1.5 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive2.events7 +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM massive2.events7 + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) + FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.3 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q2.5 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.1 +SELECT session_id +FROM massive2.events7 +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.2 +SELECT session_id +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) +FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.2.1 +SELECT session_id +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) +FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) +WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST') + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' +-- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + AND metadata_s.user_id = 'uucUZvTpPd' +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.3 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.4 +SELECT session_id +FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q3.5 +SELECT session_id +-- FROM massive_split.events_s +-- FROM events_l7d_mv +FROM events_l24h_mv +WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM massive2.events7 + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.2 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv +-- INNER JOIN metadata_l7d_mv USING (session_id) + FROM events_l24h_mv + INNER JOIN metadata_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND user_id = 'uucUZvTpPd') +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.2.1 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events_s +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events_s + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events_s.project_id = 2460 + AND events_s.datetime >= '2022-04-02 00:00:00' + AND events_s.datetime <= '2022-04-03 00:00:00' +-- AND events_s.datetime <= '2022-04-10 00:00:00' +-- AND events_s.datetime <= '2022-05-02 00:00:00' + AND (events_s.event_type = 'CLICK' OR events_s.event_type = 'REQUEST') + AND metadata_s.user_id = 'uucUZvTpPd' + AND metadata_s.project_id = 2460 + AND metadata_s.datetime >= '2022-04-02 00:00:00' + AND metadata_s.datetime <= '2022-04-03 00:00:00' + -- AND metadata_s.datetime <= '2022-04-10 00:00:00' +-- AND metadata_s.datetime <= '2022-05-02 00:00:00' + ) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.3 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 +-- FROM massive_split.events_s +-- FROM events_l7d_mv + FROM events_l24h_mv + INNER JOIN (SELECT DISTINCT session_id +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.4 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s USING (session_id) +-- INNER JOIN events_l7d_mv USING (session_id) + INNER JOIN events_l24h_mv USING (session_id) + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- Q4.5 +SELECT session_id +FROM (SELECT session_id, + datetime, + event_type = 'CLICK' AND label ILIKE '%invoice%' AS c1, + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%' AS c2 + FROM events_l24h_mv +-- FROM events_l7d_mv +-- FROM massive_split.events_s + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND (event_type = 'CLICK' OR event_type = 'REQUEST') + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd')) +GROUP BY session_id +HAVING windowFunnel(99999)(datetime, c1, c2) = 2 +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM massive2.events7 AS events + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.1 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM massive2.events7 AS events + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.2 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.3 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- INNER JOIN massive_split.metadata_s USING (session_id) +-- FROM events_l7d_mv AS events +-- INNER JOIN metadata_l7d_mv AS metadata_s USING (session_id) + FROM events_l24h_mv AS events + INNER JOIN metadata_l24h_mv AS metadata_s USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.4 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.4-A +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.5 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, session_id + FROM (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta + -- INNER JOIN massive_split.events_s AS events USING (session_id) +-- INNER JOIN events_l7d_mv AS events USING (session_id) + INNER JOIN events_l24h_mv AS events USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.6 +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + AND user_id = 'uucUZvTpPd') + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QU1.6-A +SELECT user_id, COUNT(session_id) +FROM (SELECT user_id, + session_id +-- FROM massive_split.events_s AS events +-- FROM events_l7d_mv AS events + FROM events_l24h_mv AS events + INNER JOIN (SELECT DISTINCT session_id, + user_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' + -- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) AS meta USING (session_id) + WHERE events.project_id = 2460 + AND events.datetime >= '2022-04-02 00:00:00' + AND events.datetime <= '2022-04-03 00:00:00' +-- AND events.datetime <= '2022-04-10 00:00:00' +-- AND events.datetime <= '2022-05-02 00:00:00' + AND session_id IN (SELECT DISTINCT session_id +-- FROM massive_split.metadata_s +-- FROM metadata_l7d_mv + FROM metadata_l24h_mv + WHERE project_id = 2460 + AND datetime >= '2022-04-02 00:00:00' + AND datetime <= '2022-04-03 00:00:00' +-- AND datetime <= '2022-04-10 00:00:00' +-- AND datetime <= '2022-05-02 00:00:00' + ) + GROUP BY user_id, session_id + HAVING windowFunnel(99999)(datetime, event_type = 'CLICK' AND label ILIKE '%invoice%', + event_type = 'REQUEST' AND url ILIKE '%letsdeel.com/pay%') = 2 + ) AS filtred_sessions +GROUP BY user_id +LIMIT 10 +SETTINGS +max_threads = 4; + +-- QM4: +SELECT timestamp, + groupArray([toString(t.type), toString(t.count)]) AS types +FROM (SELECT toUnixTimestamp(toStartOfInterval(events7.datetime, INTERVAL 37565 second)) * 1000 AS timestamp, + events7.type, + COUNT(events7.session_id) AS count +-- FROM massive_split.events_s AS events7 +-- FROM events_l7d_mv AS events7 + FROM events_l24h_mv AS events7 + WHERE events7.project_id = toUInt32(2460) + AND toStartOfInterval(events7.datetime, INTERVAL 37565 second) >= '2022-04-02 00:00:00' + AND events7.datetime <= '2022-04-03 00:00:00' +-- AND events7.datetime <= '2022-04-10 00:00:00' +-- AND events7.datetime < '2022-05-02 00:00:00' + AND events7.event_type = 'RESOURCE' + GROUP BY timestamp, events7.type + ORDER BY timestamp) AS t +GROUP BY timestamp + SETTINGS + max_threads = 4; From 667fe3dd79f7050ea22c8f9f762241149ca629aa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 6 Jun 2022 19:33:26 +0200 Subject: [PATCH 158/221] feat(db): removed user's appearance feat(db): removed generated_password feat(api): merged account&client feat(api): cleaned account response feat(api): removed user's appearance feat(api): removed generated_password feat(api): limits endpoint feat(api): notifications/count endpoint --- api/chalicelib/core/license.py | 15 +---- api/chalicelib/core/notifications.py | 18 ++++++ api/chalicelib/core/signup.py | 4 +- api/chalicelib/core/users.py | 55 +++++----------- api/routers/core.py | 12 ++-- api/routers/core_dynamic.py | 42 ++++++------ api/schemas.py | 5 -- ee/api/chalicelib/core/license.py | 16 +---- ee/api/chalicelib/core/notifications.py | 21 ++++++ ee/api/chalicelib/core/signup.py | 4 +- ee/api/chalicelib/core/users.py | 56 ++++------------ ee/api/routers/core_dynamic.py | 43 ++++++------- .../{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} | 33 +++++++--- .../db/init_dbs/postgresql/init_schema.sql | 64 +------------------ .../{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} | 8 ++- .../db/init_dbs/postgresql/init_schema.sql | 64 +------------------ 16 files changed, 153 insertions(+), 307 deletions(-) rename ee/scripts/helm/db/init_dbs/postgresql/{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} (88%) rename scripts/helm/db/init_dbs/postgresql/{1.6.1/1.6.1.sql => 1.7.0/1.7.0.sql} (97%) diff --git a/api/chalicelib/core/license.py b/api/chalicelib/core/license.py index ab704778a..4a562ea7b 100644 --- a/api/chalicelib/core/license.py +++ b/api/chalicelib/core/license.py @@ -3,19 +3,10 @@ from chalicelib.utils import pg_client def get_status(tenant_id=None): with pg_client.PostgresClient() as cur: - cur.execute("SELECT * FROM public.tenants;") + # cur.execute("SELECT * FROM public.tenants;") + cur.execute("SELECT edition FROM public.tenants;") r = cur.fetchone() return { "hasActivePlan": True, - "current": { - "edition": r.get("edition", "").upper(), - "versionNumber": r.get("version_number", ""), - "license": "", - "expirationDate": -1 - }, - "count": { - "teamMember": r.get("t_users"), - "projects": r.get("t_projects"), - "capturedSessions": r.get("t_sessions") - } + "edition": r.get("edition", "").upper() } diff --git a/api/chalicelib/core/notifications.py b/api/chalicelib/core/notifications.py index 0d9b5be20..ce3c4d61a 100644 --- a/api/chalicelib/core/notifications.py +++ b/api/chalicelib/core/notifications.py @@ -25,6 +25,24 @@ def get_all(tenant_id, user_id): return rows +def get_all_count(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + SELECT COUNT(notifications.*) AS count + FROM public.notifications + LEFT JOIN (SELECT notification_id + FROM public.user_viewed_notifications + WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) + WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL + ORDER BY created_at DESC + LIMIT 100;""", + {"user_id": user_id}) + ) + row = cur.fetchone() + return row + + def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: return False diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index ab23eef68..4d320e0be 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -77,8 +77,8 @@ def create_step1(data: schemas.UserSignupSchema): RETURNING user_id,email,role,name ), au AS (INSERT - INTO public.basic_authentication (user_id, password, generated_password) - VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE) + INTO public.basic_authentication (user_id, password) + VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12))) ) INSERT INTO public.projects (name, active) VALUES (%(projectName)s, TRUE) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 3a4067f68..408fb03c1 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -21,10 +21,10 @@ def create_new_member(email, invitation_token, admin, name, owner=False): query = cur.mogrify(f"""\ WITH u AS (INSERT INTO public.users (email, role, name, data) VALUES (%(email)s, %(role)s, %(name)s, %(data)s) - RETURNING user_id,email,role,name,appearance + RETURNING user_id,email,role,name ), - au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) - VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) + au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at) + VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now())) RETURNING invitation_token ) SELECT u.user_id, @@ -32,7 +32,6 @@ def create_new_member(email, invitation_token, admin, name, owner=False): u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -61,7 +60,6 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False): email, role, name, - TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""", @@ -73,8 +71,7 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False): result = cur.fetchone() query = cur.mogrify("""\ UPDATE public.basic_authentication - SET generated_password = TRUE, - invitation_token = %(invitation_token)s, + SET invitation_token = %(invitation_token)s, invited_at = timezone('utc'::text, now()), change_pwd_expire_at = NULL, change_pwd_token = NULL @@ -132,11 +129,7 @@ def update(tenant_id, user_id, changes): else: sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s") else: - if key == "appearance": - sub_query_users.append(f"appearance = %(appearance)s::jsonb") - changes["appearance"] = json.dumps(changes[key]) - else: - sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s") + sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s") with pg_client.PostgresClient() as cur: if len(sub_query_users) > 0: @@ -151,11 +144,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""", {"user_id": user_id, **changes}) ) if len(sub_query_bauth) > 0: @@ -170,11 +161,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""", {"user_id": user_id, **changes}) ) @@ -244,15 +233,13 @@ def get(user_id, tenant_id): cur.execute( cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, email, role, - name, - basic_authentication.generated_password, + name, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - appearance, api_key FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE @@ -262,7 +249,7 @@ def get(user_id, tenant_id): {"userId": user_id}) ) r = cur.fetchone() - return helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + return helper.dict_to_camel_case(r) def generate_new_api_key(user_id): @@ -282,7 +269,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance"] + ALLOW_EDIT = ["name", "email", "admin"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -315,11 +302,6 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): return {"data": user} -def edit_appearance(user_id, tenant_id, changes): - updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) - return {"data": updated_user} - - def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -329,8 +311,7 @@ def get_by_email_only(email): 1 AS tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -353,8 +334,7 @@ def get_by_email_reset(email, reset_token): 1 AS tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -377,8 +357,7 @@ def get_members(tenant_id): users.email, users.role, users.name, - users.created_at, - basic_authentication.generated_password, + users.created_at, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -581,11 +560,9 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): 1 AS tenant_id, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) @@ -599,7 +576,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if r is not None: if for_change_password: return True - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) query = cur.mogrify( f"""UPDATE public.users SET jwt_iat = timezone('utc'::text, now()) diff --git a/api/routers/core.py b/api/routers/core.py index 813577b88..3008e94b7 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -966,6 +966,11 @@ def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} +@app.get('/notifications/count', tags=['notifications']) +def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)} + + @app.get('/notifications/{notificationId}/view', tags=['notifications']) def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)} @@ -1075,13 +1080,6 @@ def edit_account(data: schemas.EditUserSchema = Body(...), editor_id=context.user_id) -@app.post('/account/appearance', tags=["account"]) -@app.put('/account/appearance', tags=["account"]) -def edit_account_appearance(data: schemas.EditUserAppearanceSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return users.edit_appearance(tenant_id=context.tenant_id, user_id=context.user_id, changes=data.dict()) - - @app.post('/account/password', tags=["account"]) @app.put('/account/password', tags=["account"]) def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index e7e87e76c..7791c5677 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -67,17 +67,17 @@ def login(data: schemas.UserLoginSchema = Body(...)): @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + t = tenants.get_by_tenant_id(context.tenant_id) + if t is not None: + t.pop("createdAt") + t["tenantName"] = t.pop("name") return { 'data': { **r, - "limits": { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) - }, + **t, **license.get_status(context.tenant_id), "smtp": helper.has_smtp(), - "iceServers": assist.get_ice_servers() + # "iceServers": assist.get_ice_servers() } } @@ -199,29 +199,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = m_key=key, project_id=projectId)} -@app.get('/plans', tags=["plan"]) -def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": license.get_status(context.tenant_id) - } - - @public_app.get('/general_stats', tags=["private"], include_in_schema=False) def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} -@app.get('/client', tags=['projects']) -def get_client(context: schemas.CurrentContext = Depends(OR_context)): - r = tenants.get_by_tenant_id(context.tenant_id) - if r is not None: - r.pop("createdAt") - return { - 'data': r - } - - @app.get('/projects', tags=['projects']) def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True)} + + +@app.get('/limits', tags=['accounts']) +def get_limits(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': { + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + }, + } + } diff --git a/api/schemas.py b/api/schemas.py index ff42fd7d3..fc61999ad 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -38,15 +38,10 @@ class EditUserSchema(BaseModel): name: Optional[str] = Field(None) email: Optional[EmailStr] = Field(None) admin: Optional[bool] = Field(False) - appearance: Optional[dict] = Field({}) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) -class EditUserAppearanceSchema(BaseModel): - appearance: dict = Field(...) - - class ForgetPasswordPayloadSchema(_Grecaptcha): email: EmailStr = Field(...) diff --git a/ee/api/chalicelib/core/license.py b/ee/api/chalicelib/core/license.py index 2423567de..c1c9823d7 100644 --- a/ee/api/chalicelib/core/license.py +++ b/ee/api/chalicelib/core/license.py @@ -7,21 +7,11 @@ from chalicelib.utils import pg_client def get_status(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) + # cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) + cur.mogrify("SELECT edition FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) r = cur.fetchone() license = unlock.get_license() return { "hasActivePlan": unlock.is_valid(), - "current": { - "edition": r.get("edition", "").lower(), - "versionNumber": r.get("version_number", ""), - "license": license[0:2] + "*" * (len(license) - 4) + license[-2:], - "expirationDate": unlock.get_expiration_date(), - "teamMember": config("numberOfSeats", cast=int, default=0) - }, - "count": { - "teamMember": r.get("t_users"), - "projects": r.get("t_projects"), - "capturedSessions": r.get("t_sessions") - } + "edition": r.get("edition", "").lower(), } diff --git a/ee/api/chalicelib/core/notifications.py b/ee/api/chalicelib/core/notifications.py index 41c26b74c..5ba58f242 100644 --- a/ee/api/chalicelib/core/notifications.py +++ b/ee/api/chalicelib/core/notifications.py @@ -26,6 +26,27 @@ def get_all(tenant_id, user_id): return rows +def get_all_count(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + SELECT COUNT(notifications.*) + FROM public.notifications + LEFT JOIN (SELECT notification_id + FROM public.user_viewed_notifications + WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) + WHERE (notifications.tenant_id =%(tenant_id)s + OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL + ORDER BY created_at DESC + LIMIT 100;""", + {"tenant_id": tenant_id, "user_id": user_id}) + ) + rows = helper.list_to_camel_case(cur.fetchall()) + for r in rows: + r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) + return rows + + def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: return False diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 4014f5e92..0415efc79 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -80,8 +80,8 @@ def create_step1(data: schemas.UserSignupSchema): RETURNING user_id,email,role,name,role_id ), au AS ( - INSERT INTO public.basic_authentication (user_id, password, generated_password) - VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE) + INSERT INTO public.basic_authentication (user_id, password) + VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12))) ) INSERT INTO public.projects (tenant_id, name, active) VALUES ((SELECT t.tenant_id FROM t), %(projectName)s, TRUE) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 5d28dc395..1f97fbd09 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -25,10 +25,10 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))) - RETURNING tenant_id,user_id,email,role,name,appearance, role_id + RETURNING tenant_id,user_id,email,role,name, role_id ), - au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) - VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) + au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at) + VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now())) RETURNING invitation_token ) SELECT u.user_id AS id, @@ -36,7 +36,6 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -74,7 +73,6 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own email, role, name, - TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -88,8 +86,7 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own result = cur.fetchone() query = cur.mogrify("""\ UPDATE public.basic_authentication - SET generated_password = TRUE, - invitation_token = %(invitation_token)s, + SET invitation_token = %(invitation_token)s, invited_at = timezone('utc'::text, now()), change_pwd_expire_at = NULL, change_pwd_token = NULL @@ -147,10 +144,7 @@ def update(tenant_id, user_id, changes): else: sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s") else: - if key == "appearance": - sub_query_users.append(f"appearance = %(appearance)s::jsonb") - changes["appearance"] = json.dumps(changes[key]) - elif helper.key_to_snake_case(key) == "role_id": + if helper.key_to_snake_case(key) == "role_id": sub_query_users.append("""role_id=(SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1), (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))""") @@ -171,11 +165,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -192,11 +184,9 @@ def update(tenant_id, user_id, changes): users.email, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -272,12 +262,10 @@ def get(user_id, tenant_id): users.user_id AS id, email, role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - appearance, api_key, origin, role_id, @@ -296,7 +284,7 @@ def get(user_id, tenant_id): {"userId": user_id, "tenant_id": tenant_id}) ) r = cur.fetchone() - return helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + return helper.dict_to_camel_case(r) def generate_new_api_key(user_id): @@ -316,7 +304,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"] + ALLOW_EDIT = ["name", "email", "admin", "roleId"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -349,11 +337,6 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): return {"data": user} -def edit_appearance(user_id, tenant_id, changes): - updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) - return {"data": updated_user} - - def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -363,8 +346,7 @@ def get_by_email_only(email): users.tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -389,8 +371,7 @@ def get_by_email_reset(email, reset_token): users.tenant_id, users.email, users.role, - users.name, - basic_authentication.generated_password, + users.name, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member @@ -414,8 +395,7 @@ def get_members(tenant_id): users.email, users.role, users.name, - users.created_at, - basic_authentication.generated_password, + users.created_at, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, @@ -642,11 +622,9 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): users.tenant_id, users.role, users.name, - basic_authentication.generated_password AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, users.origin, users.role_id, roles.name AS role_name, @@ -678,7 +656,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if r is not None: if for_change_password: return True - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) jwt_iat = change_jwt_iat(r['id']) return { "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], @@ -698,11 +676,9 @@ def authenticate_sso(email, internal_id, exp=None): users.tenant_id, users.role, users.name, - False AS change_password, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance, origin, role_id FROM public.users AS users @@ -713,7 +689,7 @@ def authenticate_sso(email, internal_id, exp=None): r = cur.fetchone() if r is not None: - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + r = helper.dict_to_camel_case(r) jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) return authorizers.generate_jwt(r['id'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", @@ -740,11 +716,9 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id= u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - u.appearance, origin FROM u;""", {"tenant_id": tenant_id, "email": email, "internal_id": internal_id, @@ -774,7 +748,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in created_at= default, api_key= default, jwt_iat= NULL, - appearance= default, weekly_report= default WHERE user_id = %(user_id)s RETURNING * @@ -782,7 +755,6 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in au AS ( UPDATE public.basic_authentication SET password= default, - generated_password= default, invitation_token= default, invited_at= default, change_pwd_token= default, @@ -795,11 +767,9 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in u.email, u.role, u.name, - TRUE AS change_password, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - u.appearance, origin FROM u;""", {"tenant_id": tenant_id, "email": email, "internal_id": internal_id, diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 31ed1d099..6d7cf8e73 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -72,18 +72,18 @@ def login(data: schemas.UserLoginSchema = Body(...)): @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + t = tenants.get_by_tenant_id(context.tenant_id) + if t is not None: + t.pop("createdAt") + t["tenantName"] = t.pop("name") return { 'data': { **r, - "limits": { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) - }, + **t, **license.get_status(context.tenant_id), "smtp": helper.has_smtp(), "saml2": SAML2_helper.is_saml2_available(), - "iceServers": assist.get_ice_servers() + # "iceServers": assist.get_ice_servers() } } @@ -209,30 +209,25 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = m_key=key, project_id=projectId)} -@app.get('/plans', tags=["plan"]) -def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": license.get_status(context.tenant_id) - } - - @public_app.get('/general_stats', tags=["private"], include_in_schema=False) def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} -@app.get('/client', tags=['projects']) -def get_client(context: schemas.CurrentContext = Depends(OR_context)): - r = tenants.get_by_tenant_id(context.tenant_id) - if r is not None: - r.pop("createdAt") - - return { - 'data': r - } - - @app.get('/projects', tags=['projects']) def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True, user_id=context.user_id)} + + +@app.get('/limits', tags=['accounts']) +def get_limits(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': { + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + } + } + } diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql similarity index 88% rename from ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql rename to ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 325d419ba..1ab026565 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,18 +1,25 @@ BEGIN; -CREATE OR REPLACE FUNCTION openreplay_version() +CREATE OR REPLACE +FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1-ee' -$$ LANGUAGE sql IMMUTABLE; +SELECT 'v1.6.1-ee' $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards - ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + ADD COLUMN IF NOT +EXISTS description text NOT NULL DEFAULT ''; -CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); -CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); -CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); +CREATE +INDEX IF NOT +EXISTS traces_created_at_idx ON traces (created_at); +CREATE +INDEX IF NOT +EXISTS traces_action_idx ON traces (action); +CREATE +INDEX IF NOT +EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) @@ -115,9 +122,9 @@ VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 - }', true, true, true, 'avg_fps', 'predefined', 'overview') -ON CONFLICT (predefined_key) DO UPDATE - SET name=excluded.name, + }', true, true, true, 'avg_fps', 'predefined', 'overview') ON CONFLICT (predefined_key) DO +UPDATE +SET name =excluded.name, category=excluded.category, default_config=excluded.default_config, is_predefined=excluded.is_predefined, @@ -126,4 +133,10 @@ ON CONFLICT (predefined_key) DO UPDATE metric_type=excluded.metric_type, view_type=excluded.view_type; +ALTER TABLE users + DROP COLUMN appearance; + +ALTER TABLE basic_authentication + DROP COLUMN generated_password; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index ec29b1dfc..efb4bb1d1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.0-ee' +SELECT 'v1.7.0-ee' $$ LANGUAGE sql IMMUTABLE; @@ -187,67 +187,6 @@ $$ name text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, api_key text UNIQUE default generate_api_key(20) not null, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT'{}'::jsonb, @@ -264,7 +203,6 @@ $$ ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, invitation_token text NULL DEFAULT NULL, invited_at timestamp without time zone NULL DEFAULT NULL, change_pwd_token text NULL DEFAULT NULL, diff --git a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql similarity index 97% rename from scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql rename to scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 4f1c7c28f..233f4fc5e 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.6.1/1.6.1.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -2,7 +2,7 @@ BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1' +SELECT 'v1.7.0' $$ LANGUAGE sql IMMUTABLE; @@ -121,4 +121,10 @@ ON CONFLICT (predefined_key) DO UPDATE metric_type=excluded.metric_type, view_type=excluded.view_type; +ALTER TABLE users + DROP COLUMN appearance; + +ALTER TABLE basic_authentication + DROP COLUMN generated_password; + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 91a590688..519997f72 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.0' +SELECT 'v1.7.0' $$ LANGUAGE sql IMMUTABLE; -- --- accounts.sql --- @@ -142,67 +142,6 @@ $$ name text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, api_key text UNIQUE default generate_api_key(20) not null, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT '{}'::jsonb, @@ -213,7 +152,6 @@ $$ ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, invitation_token text NULL DEFAULT NULL, invited_at timestamp without time zone NULL DEFAULT NULL, change_pwd_token text NULL DEFAULT NULL, From 06a52e505eecd60f59d7e79208c8fe3dc3dd2d65 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:12:08 +0200 Subject: [PATCH 159/221] feat(api): fixed edition feat(api): fixed expiration date feat(api): fixed change name feat(api): fixed change role feat(api): fixed has password feat(api): refactored edit user feat(api): refactored edit member --- api/chalicelib/core/license.py | 9 +-- api/chalicelib/core/signup.py | 4 +- api/chalicelib/core/telemetry.py | 5 +- api/chalicelib/core/tenants.py | 6 +- api/chalicelib/core/users.py | 49 +++++++++-------- api/chalicelib/utils/helper.py | 4 -- api/routers/core.py | 2 +- api/routers/core_dynamic.py | 4 +- api/schemas.py | 9 ++- ee/api/chalicelib/core/license.py | 13 ++--- ee/api/chalicelib/core/signup.py | 4 +- ee/api/chalicelib/core/telemetry.py | 9 +-- ee/api/chalicelib/core/tenants.py | 8 +-- ee/api/chalicelib/core/users.py | 55 +++++++++++-------- ee/api/routers/core_dynamic.py | 2 +- ee/api/schemas_ee.py | 11 +++- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 51 ++++++++++------- .../db/init_dbs/postgresql/init_schema.sql | 3 +- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 26 ++++++--- .../db/init_dbs/postgresql/init_schema.sql | 3 +- 20 files changed, 149 insertions(+), 128 deletions(-) diff --git a/api/chalicelib/core/license.py b/api/chalicelib/core/license.py index 4a562ea7b..469753878 100644 --- a/api/chalicelib/core/license.py +++ b/api/chalicelib/core/license.py @@ -1,12 +1,9 @@ -from chalicelib.utils import pg_client +EDITION = 'foss' def get_status(tenant_id=None): - with pg_client.PostgresClient() as cur: - # cur.execute("SELECT * FROM public.tenants;") - cur.execute("SELECT edition FROM public.tenants;") - r = cur.fetchone() return { "hasActivePlan": True, - "edition": r.get("edition", "").upper() + "edition": EDITION, + "expirationDate": -1 } diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 4d320e0be..146da7305 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -67,8 +67,8 @@ def create_step1(data: schemas.UserSignupSchema): } query = f"""\ WITH t AS ( - INSERT INTO public.tenants (name, version_number, edition) - VALUES (%(organizationName)s, (SELECT openreplay_version()), 'fos') + INSERT INTO public.tenants (name, version_number) + VALUES (%(organizationName)s, (SELECT openreplay_version())) RETURNING api_key ), u AS ( diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index fa27fbe1c..e12200809 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -1,10 +1,11 @@ from chalicelib.utils import pg_client import requests +from chalicelib.core import license -def process_data(data, edition='fos'): +def process_data(data): return { - 'edition': edition, + 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], 'user_id': data["user_id"], diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index db154525c..e5b8cc63c 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -1,7 +1,7 @@ import schemas from chalicelib.utils import pg_client from chalicelib.utils import helper -from chalicelib.core import users +from chalicelib.core import users, license def get_by_tenant_id(tenant_id): @@ -13,7 +13,7 @@ def get_by_tenant_id(tenant_id): name, api_key, created_at, - edition, + '{license.EDITION}' AS edition, version_number, opt_out FROM public.tenants @@ -67,7 +67,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: - return {"error": "unauthorized"} + return {"errors": ["unauthorized, needs admin or owner"]} if data.name is None and data.opt_out is None: return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} changes = {} diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 408fb03c1..0e9852e2d 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -4,6 +4,7 @@ import secrets from decouple import config from fastapi import BackgroundTasks +import schemas from chalicelib.core import authorizers, metadata, projects from chalicelib.core import tenants, assist from chalicelib.utils import dev, email_helper @@ -240,7 +241,8 @@ def get(user_id, tenant_id): (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, - api_key + api_key, + TRUE AS has_password FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.user_id = %(userId)s @@ -268,37 +270,36 @@ def generate_new_api_key(user_id): return helper.dict_to_camel_case(r) -def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin"] +def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id): user = get(user_id=user_id_to_update, tenant_id=tenant_id) - if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} + _changes = {} if editor_id == user_id_to_update: - if user["superAdmin"]: - changes.pop("admin") - elif user["admin"] != changes["admin"]: - return {"errors": ["cannot change your own role"]} + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} - keys = list(changes.keys()) - for k in keys: - if k not in ALLOW_EDIT or changes[k] is None: - changes.pop(k) - keys = list(changes.keys()) + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email - if len(keys) > 0: - if "email" in keys and changes["email"] != user["email"]: - if email_exists(changes["email"]): - return {"errors": ["email already exists."]} - if get_deleted_user_by_email(changes["email"]) is not None: - return {"errors": ["email previously deleted."]} - if "admin" in keys: - changes["role"] = "admin" if changes.pop("admin") else "member" - if len(changes.keys()) > 0: - updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes) + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name - return {"data": updated_user} + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if len(_changes.keys()) > 0: + updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": updated_user} return {"data": user} diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 042b2a94b..2716cf111 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -365,10 +365,6 @@ def has_smtp(): return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0 -def get_edition(): - return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss" - - def old_search_payload_to_flat(values): # in case the old search body was passed if values.get("events") is not None: diff --git a/api/routers/core.py b/api/routers/core.py index 3008e94b7..5265287e6 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1076,7 +1076,7 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context @app.put('/account', tags=["account"]) def edit_account(data: schemas.EditUserSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 7791c5677..918d81541 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -24,7 +24,7 @@ def get_all_signup(): return {"data": {"tenants": tenants.tenants_exists(), "sso": None, "ssoProvider": None, - "edition": helper.get_edition()}} + "edition": license.EDITION}} @public_app.post('/login', tags=["authentication"]) @@ -181,7 +181,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/api/schemas.py b/api/schemas.py index fc61999ad..cb83789cd 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -12,7 +12,7 @@ def attribute_to_camel_case(snake_str): def transform_email(email: str) -> str: - return email.lower() if isinstance(email, str) else email + return email.lower().strip() if isinstance(email, str) else email class _Grecaptcha(BaseModel): @@ -37,7 +37,7 @@ class UserSignupSchema(UserLoginSchema): class EditUserSchema(BaseModel): name: Optional[str] = Field(None) email: Optional[EmailStr] = Field(None) - admin: Optional[bool] = Field(False) + admin: Optional[bool] = Field(None) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) @@ -127,13 +127,11 @@ class CreateMemberSchema(BaseModel): _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) -class EditMemberSchema(BaseModel): +class EditMemberSchema(EditUserSchema): name: str = Field(...) email: EmailStr = Field(...) admin: bool = Field(False) - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - class EditPasswordByInvitationSchema(BaseModel): invitation: str = Field(...) @@ -796,6 +794,7 @@ class MetricTableViewType(str, Enum): class MetricType(str, Enum): timeseries = "timeseries" table = "table" + predefined = "predefined" class TableMetricOfType(str, Enum): diff --git a/ee/api/chalicelib/core/license.py b/ee/api/chalicelib/core/license.py index c1c9823d7..c067d4758 100644 --- a/ee/api/chalicelib/core/license.py +++ b/ee/api/chalicelib/core/license.py @@ -1,17 +1,12 @@ -from decouple import config - from chalicelib.core import unlock -from chalicelib.utils import pg_client + +EDITION = 'ee' def get_status(tenant_id): - with pg_client.PostgresClient() as cur: - cur.execute( - # cur.mogrify("SELECT * FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) - cur.mogrify("SELECT edition FROM public.tenants WHERE tenant_id=%(tenant_id)s;", {"tenant_id": tenant_id})) - r = cur.fetchone() license = unlock.get_license() return { "hasActivePlan": unlock.is_valid(), - "edition": r.get("edition", "").lower(), + "edition": EDITION, + "expirationDate": unlock.get_expiration_date() } diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 0415efc79..605520df4 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -64,8 +64,8 @@ def create_step1(data: schemas.UserSignupSchema): "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})} query = """\ WITH t AS ( - INSERT INTO public.tenants (name, version_number, edition) - VALUES (%(companyName)s, (SELECT openreplay_version()), 'ee') + INSERT INTO public.tenants (name, version_number) + VALUES (%(companyName)s, (SELECT openreplay_version())) RETURNING tenant_id, api_key ), r AS ( diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index 9c82290fb..51fd55787 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -1,10 +1,11 @@ from chalicelib.utils import pg_client +from chalicelib.core import license import requests -def process_data(data, edition='fos'): +def process_data(data): return { - 'edition': edition, + 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], 'user_id': data["user_id"], @@ -56,7 +57,7 @@ def compute(): ) data = cur.fetchall() requests.post('https://api.openreplay.com/os/telemetry', - json={"stats": [process_data(d, edition='ee') for d in data]}) + json={"stats": [process_data(d) for d in data]}) def new_client(tenant_id): @@ -67,4 +68,4 @@ def new_client(tenant_id): FROM public.tenants WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id})) data = cur.fetchone() - requests.post('https://api.openreplay.com/os/signup', json=process_data(data, edition='ee')) \ No newline at end of file + requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 45491f654..cecb8a9cf 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -1,4 +1,4 @@ -from chalicelib.core import users +from chalicelib.core import users, license from chalicelib.utils import helper from chalicelib.utils import pg_client @@ -12,7 +12,7 @@ def get_by_tenant_key(tenant_key): t.name, t.api_key, t.created_at, - t.edition, + '{license.EDITION}' AS edition, t.version_number, t.opt_out FROM public.tenants AS t @@ -32,7 +32,7 @@ def get_by_tenant_id(tenant_id): t.name, t.api_key, t.created_at, - t.edition, + '{license.EDITION}' AS edition, t.version_number, t.opt_out, t.user_id AS tenant_key @@ -90,7 +90,7 @@ def update(tenant_id, user_id, data): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: - return {"error": "unauthorized"} + return {"errors": ["unauthorized, needs admin or owner"]} if "name" not in data and "optOut" not in data: return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} changes = {} diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 1f97fbd09..91c2384c4 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -4,6 +4,8 @@ import secrets from decouple import config from fastapi import BackgroundTasks +import schemas +import schemas_ee from chalicelib.core import authorizers, metadata, projects, roles from chalicelib.core import tenants, assist from chalicelib.utils import dev, SAML2_helper @@ -303,37 +305,44 @@ def generate_new_api_key(user_id): return helper.dict_to_camel_case(r) -def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "roleId"] +def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id): user = get(user_id=user_id_to_update, tenant_id=tenant_id) - if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} + _changes = {} if editor_id == user_id_to_update: - if user["superAdmin"]: - changes.pop("admin") - elif user["admin"] != changes["admin"]: - return {"errors": ["cannot change your own role"]} + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + if changes.roleId is not None: + if user["superAdmin"]: + changes.roleId = None + elif changes.roleId != user["roleId"]: + return {"errors": ["cannot change your own role"]} - keys = list(changes.keys()) - for k in keys: - if k not in ALLOW_EDIT or changes[k] is None: - changes.pop(k) - keys = list(changes.keys()) + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email - if len(keys) > 0: - if "email" in keys and changes["email"] != user["email"]: - if email_exists(changes["email"]): - return {"errors": ["email already exists."]} - if get_deleted_user_by_email(changes["email"]) is not None: - return {"errors": ["email previously deleted."]} - if "admin" in keys: - changes["role"] = "admin" if changes.pop("admin") else "member" - if len(changes.keys()) > 0: - updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes) + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name - return {"data": updated_user} + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if changes.roleId is not None: + _changes["roleId"] = changes.roleId + + if len(_changes.keys()) > 0: + updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": updated_user} return {"data": user} diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 6d7cf8e73..667d42c79 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -187,7 +187,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 794dfdd64..0375521ad 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -1,6 +1,6 @@ from typing import Optional, List, Literal -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, EmailStr import schemas from chalicelib.utils.TimeUTC import TimeUTC @@ -21,7 +21,14 @@ class CreateMemberSchema(schemas.CreateMemberSchema): roleId: Optional[int] = Field(None) -class EditMemberSchema(schemas.EditMemberSchema): +class EditUserSchema(schemas.EditUserSchema): + roleId: Optional[int] = Field(None) + + +class EditMemberSchema(EditUserSchema): + name: str = Field(...) + email: EmailStr = Field(...) + admin: bool = Field(False) roleId: int = Field(...) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 1ab026565..01153848f 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,25 +1,42 @@ BEGIN; CREATE OR REPLACE -FUNCTION openreplay_version() + FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.6.1-ee' $$ LANGUAGE sql IMMUTABLE; +SELECT 'v1.6.1-ee' +$$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT -EXISTS description text NOT NULL DEFAULT ''; + EXISTS description text NOT NULL DEFAULT ''; CREATE -INDEX IF NOT -EXISTS traces_created_at_idx ON traces (created_at); + INDEX IF NOT + EXISTS traces_created_at_idx ON traces (created_at); CREATE -INDEX IF NOT -EXISTS traces_action_idx ON traces (action); + INDEX IF NOT + EXISTS traces_action_idx ON traces (action); CREATE -INDEX IF NOT -EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + INDEX IF NOT + EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); + + + +ALTER TABLE users + DROP COLUMN IF EXISTS appearance; + +ALTER TABLE basic_authentication + DROP COLUMN IF EXISTS generated_password; + +ALTER TABLE tenants + DROP COLUMN IF EXISTS edition; + +ALTER TABLE dashboards + ALTER COLUMN user_id DROP NOT NULL; + +COMMIT; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) @@ -122,21 +139,13 @@ VALUES ('Captured sessions', 'web vitals', '{ "col": 1, "row": 1, "position": 0 - }', true, true, true, 'avg_fps', 'predefined', 'overview') ON CONFLICT (predefined_key) DO -UPDATE -SET name =excluded.name, + }', true, true, true, 'avg_fps', 'predefined', 'overview') +ON CONFLICT (predefined_key) DO UPDATE + SET name =excluded.name, category=excluded.category, default_config=excluded.default_config, is_predefined=excluded.is_predefined, is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; - -ALTER TABLE users - DROP COLUMN appearance; - -ALTER TABLE basic_authentication - DROP COLUMN generated_password; - -COMMIT; \ No newline at end of file + view_type=excluded.view_type; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index efb4bb1d1..5bf02f4e1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -147,7 +147,6 @@ $$ api_key text UNIQUE default generate_api_key(20) not null, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - edition varchar(3) NOT NULL, version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, @@ -777,7 +776,7 @@ $$ ( dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + user_id integer REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 233f4fc5e..3f5552640 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -9,6 +9,22 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS dashboards ADD COLUMN IF NOT EXISTS description text NOT NULL DEFAULT ''; + + +ALTER TABLE users + DROP COLUMN IF EXISTS appearance; + +ALTER TABLE basic_authentication + DROP COLUMN IF EXISTS generated_password; + +ALTER TABLE tenants + DROP COLUMN IF EXISTS edition; + +ALTER TABLE dashboards + ALTER COLUMN user_id DROP NOT NULL; + +COMMIT; + INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) VALUES ('Captured sessions', 'web vitals', '{ @@ -119,12 +135,4 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; - -ALTER TABLE users - DROP COLUMN appearance; - -ALTER TABLE basic_authentication - DROP COLUMN generated_password; - -COMMIT; \ No newline at end of file + view_type=excluded.view_type; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 519997f72..6cbd17dc8 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -121,7 +121,6 @@ $$ name text NOT NULL, api_key text NOT NULL DEFAULT generate_api_key(20), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - edition varchar(3) NOT NULL, version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, @@ -928,7 +927,7 @@ $$ ( dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + user_id integer REFERENCES users (user_id) ON DELETE SET NULL, name text NOT NULL, description text NOT NULL DEFAULT '', is_public boolean NOT NULL DEFAULT TRUE, From 7d426ee79a4334c49c6c25677f0039b282ab6e73 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:18:22 +0200 Subject: [PATCH 160/221] feat(api): fixed notifications count query --- api/chalicelib/core/notifications.py | 4 +--- ee/api/chalicelib/core/notifications.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/notifications.py b/api/chalicelib/core/notifications.py index ce3c4d61a..c3eadcccd 100644 --- a/api/chalicelib/core/notifications.py +++ b/api/chalicelib/core/notifications.py @@ -34,9 +34,7 @@ def get_all_count(tenant_id, user_id): LEFT JOIN (SELECT notification_id FROM public.user_viewed_notifications WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) - WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL - ORDER BY created_at DESC - LIMIT 100;""", + WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""", {"user_id": user_id}) ) row = cur.fetchone() diff --git a/ee/api/chalicelib/core/notifications.py b/ee/api/chalicelib/core/notifications.py index 5ba58f242..0069063c7 100644 --- a/ee/api/chalicelib/core/notifications.py +++ b/ee/api/chalicelib/core/notifications.py @@ -36,9 +36,7 @@ def get_all_count(tenant_id, user_id): FROM public.user_viewed_notifications WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id) WHERE (notifications.tenant_id =%(tenant_id)s - OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL - ORDER BY created_at DESC - LIMIT 100;""", + OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""", {"tenant_id": tenant_id, "user_id": user_id}) ) rows = helper.list_to_camel_case(cur.fetchall()) From b5a646b2332c806824b55af279f11d82cd613ecc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 18:34:52 +0200 Subject: [PATCH 161/221] feat(api): EE fixed edition --- ee/api/routers/core_dynamic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 667d42c79..196764ad9 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -24,7 +24,7 @@ def get_all_signup(): return {"data": {"tenants": tenants.tenants_exists(), "sso": SAML2_helper.is_saml2_available(), "ssoProvider": SAML2_helper.get_saml2_provider(), - "edition": helper.get_edition()}} + "edition": license.EDITION}} @public_app.post('/login', tags=["authentication"]) From 8d49a588e41ff41c610eff349f9273d858e8820e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 19:17:55 +0200 Subject: [PATCH 162/221] feat(api): funnel widget --- api/chalicelib/core/custom_metrics.py | 7 ++++++- api/chalicelib/core/funnels.py | 16 ++++++++++++++++ api/schemas.py | 1 + 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 3e7fc100a..f26fdb6a9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -2,7 +2,7 @@ import json from typing import Union import schemas -from chalicelib.core import sessions +from chalicelib.core import sessions, funnels from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -43,6 +43,11 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): + if data.metric_type == schemas.MetricType.funnel: + if len(data.series) == 0: + return {} + return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: return series_charts diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 16e95989d..e0eb99dc3 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -251,6 +251,22 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu "totalDropDueToIssues": total_drop_due_to_issues}} +# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): +def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema): + data.events = filter_stages(__parse_events(data.events)) + data.events = __fix_stages(data.events) + if len(data.events) == 0: + return {"stages": [], "totalDropDueToIssues": 0} + insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) + insights = helper.list_to_camel_case(insights) + if len(insights) > 0: + if total_drop_due_to_issues > insights[0]["sessionsCount"]: + total_drop_due_to_issues = insights[0]["sessionsCount"] + insights[-1]["dropDueToIssues"] = total_drop_due_to_issues + return {"stages": insights, + "totalDropDueToIssues": total_drop_due_to_issues} + + def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: diff --git a/api/schemas.py b/api/schemas.py index cb83789cd..0902fb269 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -795,6 +795,7 @@ class MetricType(str, Enum): timeseries = "timeseries" table = "table" predefined = "predefined" + funnel = "funnel" class TableMetricOfType(str, Enum): From 4912841a9e5aeeb4836e3bc88bbe264604ab1c79 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Jun 2022 20:10:40 +0200 Subject: [PATCH 163/221] feat(api): funnel widget issues --- api/chalicelib/core/custom_metrics.py | 19 +++++++++++++ api/chalicelib/core/funnels.py | 13 +++++++++ api/routers/subs/metrics.py | 28 +++++++++++++++++-- api/schemas.py | 8 ++++++ .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 12 ++++---- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 ++ .../db/init_dbs/postgresql/init_schema.sql | 12 ++++---- 8 files changed, 80 insertions(+), 15 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index f26fdb6a9..0e9061a11 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -110,6 +110,25 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi return results +def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + results.append({"seriesId": s.series_id, "seriesName": s.name, + **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}) + + return results + + def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): results = [] if data.series is None: diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index e0eb99dc3..22774fb16 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -296,6 +296,19 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe last_stage=len(data.events)))} +# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): +def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema): + data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) + if len(data.events) < 0: + return {"issues": []} + + return { + "issues": helper.dict_to_camel_case( + significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, + last_stage=len(data.events)))} + + def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): with pg_client.PostgresClient() as cur: cur.execute( diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index a33b75d0b..57e3b28f7 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -1,7 +1,7 @@ from fastapi import Body, Depends import schemas -from chalicelib.core import dashboards, custom_metrics +from chalicelib.core import dashboards, custom_metrics, funnels from or_dependencies import OR_context from routers.base import get_routers @@ -107,13 +107,24 @@ def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchem @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) -def try_custom_metric_sessions(projectId: int, - data: schemas.CustomMetricSessionsPayloadSchema = Body(...), +def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) return {"data": data} +@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) +def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if len(data.series) == 0: + return {"data": []} + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp + data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) + return {"data": data} + + @app.post('/{projectId}/metrics', tags=["dashboard"]) @app.put('/{projectId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) @@ -149,6 +160,17 @@ def get_custom_metric_sessions(projectId: int, metric_id: int, return {"data": data} +@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) +def get_custom_metric__funnel_issues(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), diff --git a/api/schemas.py b/api/schemas.py index 0902fb269..c1979811e 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -872,6 +872,14 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) + @root_validator(pre=True) + def transform_series(cls, values): + if values.get("series") is not None and len(values["series"]) > 1 and values.get( + "metric_type") == MetricType.funnel.value: + values["series"] = [values["series"][0]] + + return values + class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema): series_id: Optional[int] = Field(None) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 01153848f..1fb572626 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -37,6 +37,7 @@ ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; COMMIT; +ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 5bf02f4e1..d044f1636 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -201,11 +201,11 @@ $$ CREATE TABLE IF NOT EXISTS basic_authentication ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + password text DEFAULT NULL, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, changed_at timestamp, UNIQUE (user_id) ); @@ -726,7 +726,7 @@ $$ CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at); CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action); - CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); + CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined','funnel'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); CREATE TABLE IF NOT EXISTS metrics ( diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 3f5552640..b0275a3a8 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -25,6 +25,8 @@ ALTER TABLE dashboards COMMIT; +ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; + INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type) VALUES ('Captured sessions', 'web vitals', '{ diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 6cbd17dc8..f870b7824 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -150,11 +150,11 @@ $$ CREATE TABLE basic_authentication ( user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + password text DEFAULT NULL, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, changed_at timestamp, UNIQUE (user_id) ); @@ -877,7 +877,7 @@ $$ CREATE INDEX jobs_start_at_idx ON jobs (start_at); CREATE INDEX jobs_project_id_idx ON jobs (project_id); - CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); + CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined', 'funnel'); CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); CREATE TABLE metrics ( From bf60c83f3b323c300decece0ad9fbc1dff4fe89d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Jun 2022 17:21:13 +0200 Subject: [PATCH 164/221] feat(api): errors widget --- api/chalicelib/core/custom_metrics.py | 28 +++++++++++++++++++++++++-- api/chalicelib/core/errors.py | 15 ++++++-------- api/routers/core.py | 2 +- api/routers/subs/metrics.py | 23 +++++++++++++++++----- api/schemas.py | 10 +++++++--- ee/api/chalicelib/core/errors.py | 21 +++++++++----------- 6 files changed, 67 insertions(+), 32 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 0e9061a11..7c04b6c6b 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -2,7 +2,7 @@ import json from typing import Union import schemas -from chalicelib.core import sessions, funnels +from chalicelib.core import sessions, funnels, errors from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -42,11 +42,16 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): return results -def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): +def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): if data.metric_type == schemas.MetricType.funnel: if len(data.series) == 0: return {} return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + elif data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.issues \ + and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ + and data.metric_format == schemas.MetricFormatType.errors_list: + return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: @@ -129,6 +134,25 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric return results +def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + results.append({"seriesId": s.series_id, "seriesName": s.name, + **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}) + + return results + + def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): results = [] if data.series is None: diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 983d091f8..2026f9232 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -425,10 +425,9 @@ def __get_sort_key(key): def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): - empty_response = {"data": { - 'total': 0, - 'errors': [] - }} + empty_response = {'total': 0, + 'errors': [] + } platform = None for f in data.filters: @@ -544,7 +543,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): rows = cur.fetchall() total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: - return {"data": {"count": total}} + return {"count": total} if total == 0: rows = [] @@ -592,10 +591,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] offset -= len(rows) return { - "data": { - 'total': total - offset, - 'errors': helper.list_to_camel_case(rows) - } + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) } diff --git a/api/routers/core.py b/api/routers/core.py index 5265287e6..2a38d0a75 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -903,7 +903,7 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), @app.post('/{projectId}/errors/search', tags=['errors']) def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return errors.search(data, projectId, user_id=context.user_id) + return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index 57e3b28f7..e00d2d4f7 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -102,7 +102,7 @@ def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_c @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.merged_live(project_id=projectId, data=data)} + return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) @@ -162,10 +162,23 @@ def get_custom_metric_sessions(projectId: int, metric_id: int, @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) -def get_custom_metric__funnel_issues(projectId: int, metric_id: int, - data: schemas.CustomMetricSessionsPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) +def get_custom_metric_funnel_issues(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) +def get_custom_metric_errors_list(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} diff --git a/api/schemas.py b/api/schemas.py index c1979811e..d1b84e915 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -479,6 +479,11 @@ class IssueType(str, Enum): js_exception = 'js_exception' +class MetricFormatType(str, Enum): + session_count = 'sessionCount' + errors_list = 'errors' + + class __MixedSearchFilter(BaseModel): is_event: bool = Field(...) @@ -761,8 +766,7 @@ class MobileSignPayloadSchema(BaseModel): keys: List[str] = Field(...) -class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema): - # class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema): +class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema, SearchErrorsSchema): startDate: Optional[int] = Field(None) endDate: Optional[int] = Field(None) sort: Optional[str] = Field(None) @@ -836,7 +840,7 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): metric_type: MetricType = Field(MetricType.timeseries) metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id) metric_value: List[IssueType] = Field([]) - metric_format: Optional[str] = Field(None) + metric_format: Optional[MetricFormatType] = Field(None) # metricFraction: float = Field(None, gt=0, lt=1) # This is used to handle wrong values sent by the UI diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index ecf1aeda2..9477f8ec7 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -83,7 +83,7 @@ def __rearrange_chart_details(start_at, end_at, density, chart): for i in range(len(chart)): chart[i] = {"timestamp": chart[i][0], "count": chart[i][1]} chart = metrics.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density, - neutral={"count": 0}) + neutral={"count": 0}) return chart @@ -466,10 +466,9 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): - empty_response = {"data": { - 'total': 0, - 'errors': [] - }} + empty_response = {'total': 0, + 'errors': [] + } platform = None for f in data.filters: @@ -585,7 +584,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): rows = cur.fetchall() total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: - return {"data": {"count": total}} + return {"count": total} if total == 0: rows = [] @@ -633,10 +632,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] offset -= len(rows) return { - "data": { - 'total': total - offset, - 'errors': helper.list_to_camel_case(rows) - } + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) } @@ -790,8 +787,8 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo for i in range(len(r["chart"])): r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]} r["chart"] = metrics.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, - end_time=data.endDate, - density=data.density, neutral={"count": 0}) + end_time=data.endDate, + density=data.density, neutral={"count": 0}) offset = len(rows) rows = [r for r in rows if r["stack"] is None or (len(r["stack"]) == 0 or len(r["stack"]) > 1 From 53fc845f9a9febdf65b81fc3f6f571700435d965 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Jun 2022 19:03:06 +0200 Subject: [PATCH 165/221] feat(api): errors widget chart feat(api): funnels widget chart --- api/chalicelib/core/custom_metrics.py | 61 +++++++++++++++++++-------- api/chalicelib/core/events.py | 10 ++++- api/chalicelib/core/sessions.py | 4 +- 3 files changed, 54 insertions(+), 21 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 7c04b6c6b..5f7e12656 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -42,16 +42,34 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): return results +def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema): + return data.metric_type == schemas.MetricType.funnel + + +def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): + if len(data.series) == 0: + return {} + return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) + + +def __is_errors_list(data): + return data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.issues \ + and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ + and data.metric_format == schemas.MetricFormatType.errors_list + + +def __get_errors_list(project_id, user_id, data): + if len(data.series) == 0: + return [] + return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) + + def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): - if data.metric_type == schemas.MetricType.funnel: - if len(data.series) == 0: - return {} - return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) - elif data.metric_type == schemas.MetricType.table \ - and data.metric_of == schemas.TableMetricOfType.issues \ - and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ - and data.metric_format == schemas.MetricFormatType.errors_list: - return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) + if __is_funnel_chart(data): + return __get_funnel_chart(project_id=project_id, data=data) + elif __is_errors_list(data): + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: @@ -85,15 +103,22 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa if metric is None: return None metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) - series_charts = __try_live(project_id=project_id, data=metric) - if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: - return series_charts - results = [{}] * len(series_charts[0]) - for i in range(len(results)): - for j, series_chart in enumerate(series_charts): - results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], - metric.series[j].name: series_chart[i]["count"]} - return results + + return merged_live(project_id=project_id, data=metric, user_id=user_id) + # if __is_funnel_chart(metric): + # return __get_funnel_chart(project_id=project_id, data=metric) + # elif __is_errors_list(metric): + # return __get_errors_list(project_id=project_id, user_id=user_id, data=metric) + # + # series_charts = __try_live(project_id=project_id, data=metric) + # if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: + # return series_charts + # results = [{}] * len(series_charts[0]) + # for i in range(len(results)): + # for j, series_chart in enumerate(series_charts): + # results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], + # metric.series[j].name: series_chart[i]["count"]} + # return results def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 272b86002..d07cf1042 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -435,7 +435,15 @@ def __get_autocomplete_table(value, project_id): query = cur.mogrify(" UNION ".join(sub_queries) + ";", {"project_id": project_id, "value": helper.string_to_sql_like(value), "svalue": helper.string_to_sql_like("^" + value)}) - cur.execute(query) + try: + cur.execute(query) + except Exception as err: + print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") + print(query.decode('UTF-8')) + print("--------- VALUE -----------") + print(value) + print("--------------------") + raise err results = helper.list_to_camel_case(cur.fetchall()) return results diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index e717f1d07..7543f9c9d 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -254,9 +254,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e cur.execute(main_query) except Exception as err: print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") - print(main_query) + print(main_query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(data.dict()) + print(data.json()) print("--------------------") raise err if errors_only: From 23a98d83d7b5e4cb97b41047e6caae37b848b855 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:13:05 +0200 Subject: [PATCH 166/221] feat(api): table of sessions widget --- api/chalicelib/core/custom_metrics.py | 26 ++++++++++++++++++++++++-- api/schemas.py | 1 + 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5f7e12656..d6ebebc76 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -48,7 +48,10 @@ def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema): def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): if len(data.series) == 0: - return {} + return { + "stages": [], + "totalDropDueToIssues": 0 + } return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) @@ -61,15 +64,34 @@ def __is_errors_list(data): def __get_errors_list(project_id, user_id, data): if len(data.series) == 0: - return [] + return { + "total": 0, + "errors": [] + } return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) +def __is_sessions_list(data): + return data.metric_type == schemas.MetricType.table \ + and data.metric_of == schemas.TableMetricOfType.sessions + + +def __get_sessions_list(project_id, user_id, data): + if len(data.series) == 0: + return { + "total": 0, + "sessions": [] + } + return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) + + def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None): if __is_funnel_chart(data): return __get_funnel_chart(project_id=project_id, data=data) elif __is_errors_list(data): return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + elif __is_sessions_list(data): + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: diff --git a/api/schemas.py b/api/schemas.py index d1b84e915..ab063a9b9 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -810,6 +810,7 @@ class TableMetricOfType(str, Enum): user_id = FilterType.user_id.value issues = FilterType.issue.value visited_url = EventType.location.value + sessions = "SESSIONS" class TimeseriesMetricOfType(str, Enum): From fa7a57eb3f369c157586a5c86303d0d58015a9f8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:42:52 +0200 Subject: [PATCH 167/221] feat(api): changed slowest_domains response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 05c5233f8..fd5809d06 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1721,7 +1721,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT resources.url_host AS domain, - AVG(resources.duration) AS avg + AVG(resources.duration) AS value FROM events.resources INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY resources.url_host @@ -1740,7 +1740,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), avg = cur.fetchone()["avg"] else: avg = 0 - return {"avg": avg, "partition": rows} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 2d6aa7201..5c4db36f1 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1661,7 +1661,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT resources.url_host AS domain, - COALESCE(avgOrNull(resources.duration),0) AS avg + COALESCE(avgOrNull(resources.duration),0) AS value FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY resources.url_host @@ -1675,7 +1675,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, "partition": rows} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), From 597da9fc11b38f9ab56a73d673884748d6a55253 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:53:55 +0200 Subject: [PATCH 168/221] feat(api): changed speed_location response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index fd5809d06..cf9fc6a1c 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1069,7 +1069,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- pg_sub_query.append("pages.speed_index>0") with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS avg + pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS value FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY sessions.user_country @@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- avg = cur.fetchone()["avg"] else: avg = 0 - return {"avg": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 5c4db36f1..9beb13cc1 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1046,7 +1046,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS avg + ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY pages.user_country @@ -1059,7 +1059,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), From 3f35b01a5e95ba2b4f3e812e0e5397cfbb6d227b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 13:54:25 +0200 Subject: [PATCH 169/221] feat(api): changed speed_location response --- api/chalicelib/core/metrics.py | 2 +- ee/api/chalicelib/core/metrics.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index cf9fc6a1c..abed83cbb 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1073,7 +1073,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY sessions.user_country - ORDER BY avg,sessions.user_country;""" + ORDER BY value, sessions.user_country;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 9beb13cc1..9bbfe13ef 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1050,7 +1050,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY pages.user_country - ORDER BY avg,pages.user_country;""" + ORDER BY value ,pages.user_country;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} From 26ce0c8e8637f09b4cc1205884bec12daff61422 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 14:09:13 +0200 Subject: [PATCH 170/221] feat(api): changed crashes response --- api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/metrics.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index abed83cbb..87b870bbe 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1500,7 +1500,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), pg_sub_query_chart.append("m_issues.type = 'crash'") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COUNT(sessions) AS count + COUNT(sessions) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT sessions.session_id @@ -1558,7 +1558,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), versions.append({v["version"]: v["count"] / (r["total"] / 100)}) r["versions"] = versions - return {"chart": rows, "browsers": browsers} + return {"chart": rows, "browsers": browsers,"unit": schemas.TemplatePredefinedUnits.count} def __get_neutral(rows, add_All_if_empty=True): diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 9bbfe13ef..640394b3c 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1460,7 +1460,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(sessions.session_id) AS count + COUNT(sessions.session_id) AS value FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1514,8 +1514,9 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), result = {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"count": 0}), - "browsers": browsers} + neutral={"value": 0}), + "browsers": browsers, + "unit": schemas.TemplatePredefinedUnits.count} return result From 5e85da6533b42558f46df455d39c1592b94913e9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 14:12:21 +0200 Subject: [PATCH 171/221] feat(api): changed pages_response_time_distribution response --- api/chalicelib/core/metrics.py | 7 ++++--- ee/api/chalicelib/core/metrics.py | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 87b870bbe..c78200363 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -1171,7 +1171,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( else: quantiles = [0 for i in range(len(quantiles_keys))] result = { - "avg": avg, + "value": avg, "total": sum(r["count"] for r in rows), "chart": [], "percentiles": [{ @@ -1179,7 +1179,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( "responseTime": int(quantiles[i]) } for i, v in enumerate(quantiles_keys) ], - "extremeValues": [{"count": 0}] + "extremeValues": [{"count": 0}], + "unit": schemas.TemplatePredefinedUnits.millisecond } rows = helper.list_to_camel_case(rows) _99 = result["percentiles"][-1]["responseTime"] @@ -1558,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), versions.append({v["version"]: v["count"] / (r["total"] / 100)}) r["versions"] = versions - return {"chart": rows, "browsers": browsers,"unit": schemas.TemplatePredefinedUnits.count} + return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count} def __get_neutral(rows, add_All_if_empty=True): diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 640394b3c..a86af9315 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1133,7 +1133,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)}) result = { - "avg": avg, + "value": avg, "total": sum(r["count"] for r in rows), "chart": [], "percentiles": [{ @@ -1142,7 +1142,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( quantiles[0]["values"][i] if quantiles[0]["values"][i] is not None and not math.isnan( quantiles[0]["values"][i]) else 0)} for i, v in enumerate(quantiles_keys) ], - "extremeValues": [{"count": 0}] + "extremeValues": [{"count": 0}], + "unit": schemas.TemplatePredefinedUnits.millisecond } if len(rows) > 0: rows = helper.list_to_camel_case(rows) From 0fd7d1d80c2b63be8dce982ca8e1c74f9842dca8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 17:23:17 +0200 Subject: [PATCH 172/221] feat(api): changes feat(db): changes --- api/chalicelib/core/telemetry.py | 8 +++++--- ee/api/chalicelib/core/telemetry.py | 10 ++++++---- .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 14 ++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 2 +- .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 14 ++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 2 +- 6 files changed, 41 insertions(+), 9 deletions(-) diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index e12200809..8098c9cd7 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -8,7 +8,8 @@ def process_data(data): 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], - 'user_id': data["user_id"], + 'user_id': data["tenant_key"], + 'tenant_key': data["tenant_key"], 'owner_email': None if data["opt_out"] else data["email"], 'organization_name': None if data["opt_out"] else data["name"], 'users_count': data["t_users"], @@ -28,7 +29,7 @@ def compute(): t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0), t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0), t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0) - RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" ) data = cur.fetchone() @@ -40,6 +41,7 @@ def new_client(): cur.execute( f"""SELECT *, (SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email - FROM public.tenants;""") + FROM public.tenants + LIMIT 1;""") data = cur.fetchone() requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index 51fd55787..a002f8501 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -8,7 +8,8 @@ def process_data(data): 'edition': license.EDITION, 'tracking': data["opt_out"], 'version': data["version_number"], - 'user_id': data["user_id"], + 'user_id': data["tenant_key"], + 'tenant_key': data["tenant_key"], 'owner_email': None if data["opt_out"] else data["email"], 'organization_name': None if data["opt_out"] else data["name"], 'users_count': data["t_users"], @@ -51,7 +52,7 @@ def compute(): FROM public.tenants ) AS all_tenants WHERE tenants.tenant_id = all_tenants.tenant_id - RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, (SELECT openreplay_version()) AS version_number, (SELECT email FROM public.users WHERE role = 'owner' AND users.tenant_id=tenants.tenant_id LIMIT 1);""" ) @@ -64,8 +65,9 @@ def new_client(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""SELECT *, - (SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s) AS email + (SELECT email FROM public.users WHERE tenant_id=%(tenant_id)s AND role='owner' LIMIT 1) AS email FROM public.tenants - WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id})) + WHERE tenant_id=%(tenant_id)s + LIMIT 1;""", {"tenant_id": tenant_id})) data = cur.fetchone() requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 1fb572626..6569ef682 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -36,6 +36,20 @@ ALTER TABLE tenants ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; +DO +$$ + BEGIN + IF EXISTS(SELECT * + FROM information_schema.columns + WHERE table_name = 'tenants' + and column_name = 'user_id') + THEN + ALTER TABLE tenants + RENAME COLUMN user_id TO tenant_key; + END IF; + END +$$; + COMMIT; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index d044f1636..50cd912fa 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -142,7 +142,7 @@ $$ CREATE TABLE IF NOT EXISTS tenants ( tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id text NOT NULL DEFAULT generate_api_key(20), + tenant_key text NOT NULL DEFAULT generate_api_key(20), name text NOT NULL, api_key text UNIQUE default generate_api_key(20) not null, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index b0275a3a8..8f07edf0f 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -23,6 +23,20 @@ ALTER TABLE tenants ALTER TABLE dashboards ALTER COLUMN user_id DROP NOT NULL; +DO +$$ + BEGIN + IF EXISTS(SELECT * + FROM information_schema.columns + WHERE table_name = 'tenants' + and column_name = 'user_id') + THEN + ALTER TABLE tenants + RENAME COLUMN user_id TO tenant_key; + END IF; + END +$$; + COMMIT; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index f870b7824..b26483e9b 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -117,7 +117,7 @@ $$ CREATE TABLE tenants ( tenant_id integer NOT NULL DEFAULT 1, - user_id text NOT NULL DEFAULT generate_api_key(20), + tenant_key text NOT NULL DEFAULT generate_api_key(20), name text NOT NULL, api_key text NOT NULL DEFAULT generate_api_key(20), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), From 779c85dfda97ab92edf2ebd6209d3eb3bb124fa0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Jun 2022 17:37:49 +0200 Subject: [PATCH 173/221] feat(api): changes --- ee/api/chalicelib/core/tenants.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index cecb8a9cf..71119fd13 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -16,9 +16,9 @@ def get_by_tenant_key(tenant_key): t.version_number, t.opt_out FROM public.tenants AS t - WHERE t.user_id = %(user_id)s AND t.deleted_at ISNULL + WHERE t.tenant_key = %(tenant_key)s AND t.deleted_at ISNULL LIMIT 1;""", - {"user_id": tenant_key}) + {"tenant_key": tenant_key}) ) return helper.dict_to_camel_case(cur.fetchone()) @@ -35,7 +35,7 @@ def get_by_tenant_id(tenant_id): '{license.EDITION}' AS edition, t.version_number, t.opt_out, - t.user_id AS tenant_key + t.tenant_key FROM public.tenants AS t WHERE t.tenant_id = %(tenantId)s AND t.deleted_at ISNULL LIMIT 1;""", From 8aec59549589ebb51918e5ebfc15f049596f5ed7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 11:35:25 +0200 Subject: [PATCH 174/221] feat(api): changed connexion pool configuration feat(alerts): changed connexion pool configuration --- api/.env.default | 3 ++- api/Dockerfile.alerts | 1 + api/chalicelib/utils/pg_client.py | 4 +++- ee/api/.env.default | 3 ++- ee/api/Dockerfile.alerts | 1 + 5 files changed, 9 insertions(+), 3 deletions(-) diff --git a/api/.env.default b/api/.env.default index 30ff0b02d..aa14fc993 100644 --- a/api/.env.default +++ b/api/.env.default @@ -36,7 +36,8 @@ pg_password=asayerPostgres pg_port=5432 pg_user=postgres pg_timeout=30 -pg_minconn=45 +pg_minconn=20 +pg_maxconn=50 PG_RETRY_MAX=50 PG_RETRY_INTERVAL=2 put_S3_TTL=20 diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index 7d8dd8200..65668f79b 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -3,6 +3,7 @@ LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" ENV APP_NAME alerts ENV pg_minconn 2 +ENV pg_maxconn 10 # Add Tini # Startup daemon ENV TINI_VERSION v0.19.0 diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 3d60dda5c..1c4625873 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -52,7 +52,9 @@ def make_pool(): except (Exception, psycopg2.DatabaseError) as error: print("Error while closing all connexions to PostgreSQL", error) try: - postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), 100, **PG_CONFIG) + postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), + config("pg_maxconn", cast=int, default=80), + **PG_CONFIG) if (postgreSQL_pool): print("Connection pool created successfully") except (Exception, psycopg2.DatabaseError) as error: diff --git a/ee/api/.env.default b/ee/api/.env.default index 8215908b2..7687566d7 100644 --- a/ee/api/.env.default +++ b/ee/api/.env.default @@ -45,7 +45,8 @@ pg_password=asayerPostgres pg_port=5432 pg_user=postgres pg_timeout=30 -pg_minconn=45 +pg_minconn=20 +pg_maxconn=50 PG_RETRY_MAX=50 PG_RETRY_INTERVAL=2 put_S3_TTL=20 diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index ae8d308c8..1deff0a57 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -4,6 +4,7 @@ LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* ENV APP_NAME alerts ENV pg_minconn 2 +ENV pg_maxconn 10 # Add Tini # Startup daemon From 119ecd774335c688c3a38d01e24b50c33c25630a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 11:53:47 +0200 Subject: [PATCH 175/221] feat(api): ignore weekly report if SMTP not configured --- api/chalicelib/core/weekly_report.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 3d857ccc0..bebdd9f6e 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -29,6 +29,9 @@ def edit_config(user_id, weekly_report): def cron(): + if not helper.has_smtp(): + print("!!! No SMTP configuration found, ignoring weekly report") + return with pg_client.PostgresClient(long_query=True) as cur: params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), "1_week_ago": TimeUTC.midnight(delta_days=-7), From 63d2fce3b5738143f6e7a14c6cf5f0cd4ec4759a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 12:31:29 +0200 Subject: [PATCH 176/221] feat(api): fixed weekly report feat(api): optimised weekly report --- api/chalicelib/core/weekly_report.py | 13 +++++++------ ee/api/.gitignore | 1 - ee/api/clean.sh | 1 - .../helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 1 + .../helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- scripts/helm/db/init_dbs/postgresql/init_schema.sql | 2 ++ 7 files changed, 12 insertions(+), 9 deletions(-) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index bebdd9f6e..88e785b94 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -33,7 +33,8 @@ def cron(): print("!!! No SMTP configuration found, ignoring weekly report") return with pg_client.PostgresClient(long_query=True) as cur: - params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), + params = {"tomorrow": TimeUTC.midnight(delta_days=1), + "3_days_ago": TimeUTC.midnight(delta_days=-3), "1_week_ago": TimeUTC.midnight(delta_days=-7), "2_week_ago": TimeUTC.midnight(delta_days=-14), "5_week_ago": TimeUTC.midnight(delta_days=-35)} @@ -46,18 +47,18 @@ def cron(): COALESCE(week_0_issues.count, 0) AS this_week_issues_count, COALESCE(week_1_issues.count, 0) AS past_week_issues_count, COALESCE(month_1_issues.count, 0) AS past_month_issues_count - FROM public.projects + FROM (SELECT project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects INNER JOIN LATERAL ( SELECT sessions.project_id FROM public.sessions WHERE sessions.project_id = projects.project_id AND start_ts >= %(3_days_ago)s + AND start_ts < %(tomorrow)s LIMIT 1) AS recently_active USING (project_id) INNER JOIN LATERAL ( SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails FROM public.users - WHERE users.tenant_id = projects.tenant_id - AND users.deleted_at ISNULL + WHERE users.deleted_at ISNULL AND users.weekly_report ) AS users ON (TRUE) LEFT JOIN LATERAL ( @@ -66,6 +67,7 @@ def cron(): INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count @@ -82,8 +84,7 @@ def cron(): WHERE sessions.project_id = projects.project_id AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT - ) AS month_1_issues ON (TRUE) - WHERE projects.deleted_at ISNULL;"""), params) + ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] for p in projects_data: diff --git a/ee/api/.gitignore b/ee/api/.gitignore index fb839d5e6..a0bd649f3 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -214,7 +214,6 @@ Pipfile /chalicelib/core/socket_ios.py /chalicelib/core/sourcemaps.py /chalicelib/core/sourcemaps_parser.py -/chalicelib/core/weekly_report.py /chalicelib/saml /chalicelib/utils/html/ /chalicelib/utils/__init__.py diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 861d1d9f1..549228366 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -38,7 +38,6 @@ rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py rm -rf ./chalicelib/core/sourcemaps.py rm -rf ./chalicelib/core/sourcemaps_parser.py -rm -rf ./chalicelib/core/weekly_report.py rm -rf ./chalicelib/saml rm -rf ./chalicelib/utils/html/ rm -rf ./chalicelib/utils/__init__.py diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 6569ef682..7b5169c3c 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -51,6 +51,7 @@ $$ $$; COMMIT; +CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 50cd912fa..76ed78d87 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -258,6 +258,7 @@ $$ CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key); + CREATE INDEX IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; DROP TRIGGER IF EXISTS on_insert_or_update ON projects; CREATE TRIGGER on_insert_or_update AFTER INSERT OR UPDATE diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 8f07edf0f..00bf4ec1d 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -38,7 +38,7 @@ $$ $$; COMMIT; - +CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index b26483e9b..f06ff4f9a 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -201,6 +201,8 @@ $$ ); CREATE INDEX projects_project_key_idx ON public.projects (project_key); + CREATE INDEX projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL; + CREATE TRIGGER on_insert_or_update AFTER INSERT OR UPDATE ON projects From 0dd7914375e1539ec487e8208964474c7c795eaa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 12:33:36 +0200 Subject: [PATCH 177/221] feat(api): EE changed weekly report feat(api): changed login response --- api/routers/core_dynamic.py | 2 - ee/api/chalicelib/core/weekly_report.py | 245 ++++++++++++++++++++++++ ee/api/routers/core_dynamic.py | 2 - 3 files changed, 245 insertions(+), 4 deletions(-) create mode 100644 ee/api/chalicelib/core/weekly_report.py diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 918d81541..a407e2833 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -52,9 +52,7 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") c["smtp"] = helper.has_smtp() - c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] - r["iceServers"] = c["iceServers"] return { 'jwt': r.pop('jwt'), 'data': { diff --git a/ee/api/chalicelib/core/weekly_report.py b/ee/api/chalicelib/core/weekly_report.py new file mode 100644 index 000000000..e652010d4 --- /dev/null +++ b/ee/api/chalicelib/core/weekly_report.py @@ -0,0 +1,245 @@ +from chalicelib.utils import pg_client, helper, email_helper +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.helper import get_issue_title + +LOWEST_BAR_VALUE = 3 + + +def get_config(user_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""\ + SELECT users.weekly_report + FROM public.users + WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s + LIMIT 1;""", {"user_id": user_id})) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit_config(user_id, weekly_report): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""\ + UPDATE public.users + SET weekly_report= %(weekly_report)s + WHERE users.deleted_at ISNULL + AND users.user_id=%(user_id)s + RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report})) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def cron(): + if not helper.has_smtp(): + print("!!! No SMTP configuration found, ignoring weekly report") + return + with pg_client.PostgresClient(long_query=True) as cur: + params = {"tomorrow": TimeUTC.midnight(delta_days=1), + "3_days_ago": TimeUTC.midnight(delta_days=-3), + "1_week_ago": TimeUTC.midnight(delta_days=-7), + "2_week_ago": TimeUTC.midnight(delta_days=-14), + "5_week_ago": TimeUTC.midnight(delta_days=-35)} + cur.execute(cur.mogrify("""\ + SELECT project_id, + name AS project_name, + users.emails AS emails, + TO_CHAR(DATE_TRUNC('day', now()) - INTERVAL '1 week', 'Mon. DDth, YYYY') AS period_start, + TO_CHAR(DATE_TRUNC('day', now()), 'Mon. DDth, YYYY') AS period_end, + COALESCE(week_0_issues.count, 0) AS this_week_issues_count, + COALESCE(week_1_issues.count, 0) AS past_week_issues_count, + COALESCE(month_1_issues.count, 0) AS past_month_issues_count + FROM (SELECT tenant_id, project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects + INNER JOIN LATERAL ( + SELECT sessions.project_id + FROM public.sessions + WHERE sessions.project_id = projects.project_id + AND start_ts >= %(3_days_ago)s + AND start_ts < %(tomorrow)s + LIMIT 1) AS recently_active USING (project_id) + INNER JOIN LATERAL ( + SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails + FROM public.users + WHERE users.tenant_id = projects.tenant_id + AND users.deleted_at ISNULL + AND users.weekly_report + ) AS users ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp < %(tomorrow)s + ) AS week_0_issues ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + ) AS week_1_issues ON (TRUE) + LEFT JOIN LATERAL ( + SELECT COUNT(1) AS count + FROM events_common.issues + INNER JOIN public.sessions USING (session_id) + WHERE sessions.project_id = projects.project_id + AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + ) AS month_1_issues ON (TRUE);"""), params) + projects_data = cur.fetchall() + emails_to_send = [] + for p in projects_data: + params["project_id"] = p["project_id"] + print(f"checking {p['project_name']} : {p['project_id']}") + if len(p["emails"]) == 0 \ + or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0: + print('ignore') + continue + print("valid") + p["past_week_issues_evolution"] = helper.__decimal_limit( + helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1) + p["past_month_issues_evolution"] = helper.__decimal_limit( + helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1) + cur.execute(cur.mogrify(""" + SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short, + TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, + ( + SELECT COUNT(*) + FROM events_common.issues INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT + AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT + ) AS issues_count + FROM generate_series( + DATE_TRUNC('day', now()) - INTERVAL '7 days', + DATE_TRUNC('day', now()) - INTERVAL '1 day', + '1 day'::INTERVAL + ) AS timestamp_i + ORDER BY timestamp_i;""", params)) + days_partition = cur.fetchall() + max_days_partition = max(x['issues_count'] for x in days_partition) + for d in days_partition: + if max_days_partition <= 0: + d["value"] = LOWEST_BAR_VALUE + else: + d["value"] = d["issues_count"] * 100 / max_days_partition + d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE + cur.execute(cur.mogrify("""\ + SELECT type, COUNT(*) AS count + FROM events_common.issues INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT + GROUP BY type + ORDER BY count DESC, type + LIMIT 4;""", params)) + issues_by_type = cur.fetchall() + max_issues_by_type = sum(i["count"] for i in issues_by_type) + for i in issues_by_type: + i["type"] = get_issue_title(i["type"]) + if max_issues_by_type <= 0: + i["value"] = LOWEST_BAR_VALUE + else: + i["value"] = i["count"] * 100 / max_issues_by_type + cur.execute(cur.mogrify("""\ + SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short, + TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, + COALESCE((SELECT JSONB_AGG(sub) + FROM ( + SELECT type, COUNT(*) AS count + FROM events_common.issues + INNER JOIN public.issues USING (issue_id) + WHERE project_id = %(project_id)s + AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT + AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT + GROUP BY type + ORDER BY count + ) AS sub), '[]'::JSONB) AS partition + FROM generate_series( + DATE_TRUNC('day', now()) - INTERVAL '7 days', + DATE_TRUNC('day', now()) - INTERVAL '1 day', + '1 day'::INTERVAL + ) AS timestamp_i + GROUP BY timestamp_i + ORDER BY timestamp_i;""", params)) + issues_breakdown_by_day = cur.fetchall() + for i in issues_breakdown_by_day: + i["sum"] = sum(x["count"] for x in i["partition"]) + for j in i["partition"]: + j["type"] = get_issue_title(j["type"]) + max_days_partition = max(i["sum"] for i in issues_breakdown_by_day) + for i in issues_breakdown_by_day: + for j in i["partition"]: + if max_days_partition <= 0: + j["value"] = LOWEST_BAR_VALUE + else: + j["value"] = j["count"] * 100 / max_days_partition + j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE + cur.execute(cur.mogrify(""" + SELECT type, + COUNT(*) AS issue_count, + COUNT(DISTINCT session_id) AS sessions_count, + (SELECT COUNT(DISTINCT sessions.session_id) + FROM public.sessions + INNER JOIN events_common.issues AS sci USING (session_id) + INNER JOIN public.issues AS si USING (issue_id) + WHERE si.project_id = %(project_id)s + AND sessions.project_id = %(project_id)s + AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 weeks') * 1000)::BIGINT + AND si.type = mi.type + AND sessions.duration IS NOT NULL + ) AS last_week_sessions_count, + (SELECT COUNT(DISTINCT sci.session_id) + FROM public.sessions + INNER JOIN events_common.issues AS sci USING (session_id) + INNER JOIN public.issues AS si USING (issue_id) + WHERE si.project_id = %(project_id)s + AND sessions.project_id = %(project_id)s + AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 weeks') * 1000)::BIGINT + AND si.type = mi.type + AND sessions.duration IS NOT NULL + ) AS last_month_sessions_count + FROM events_common.issues + INNER JOIN public.issues AS mi USING (issue_id) + INNER JOIN public.sessions USING (session_id) + WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL + AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + GROUP BY type + ORDER BY issue_count DESC;""", params)) + issues_breakdown_list = cur.fetchall() + if len(issues_breakdown_list) > 4: + others = {"type": "Others", + "sessions_count": sum(i["sessions_count"] for i in issues_breakdown_list[4:]), + "issue_count": sum(i["issue_count"] for i in issues_breakdown_list[4:]), + "last_week_sessions_count": sum( + i["last_week_sessions_count"] for i in issues_breakdown_list[4:]), + "last_month_sessions_count": sum( + i["last_month_sessions_count"] for i in issues_breakdown_list[4:])} + issues_breakdown_list = issues_breakdown_list[:4] + issues_breakdown_list.append(others) + for i in issues_breakdown_list: + i["type"] = get_issue_title(i["type"]) + i["last_week_sessions_evolution"] = helper.__decimal_limit( + helper.__progress(i["sessions_count"], i["last_week_sessions_count"]), 1) + i["last_month_sessions_evolution"] = helper.__decimal_limit( + helper.__progress(i["sessions_count"], i["last_month_sessions_count"]), 1) + i["sessions_count"] = f'{i["sessions_count"]:,}' + keep_types = [i["type"] for i in issues_breakdown_list] + for i in issues_breakdown_by_day: + keep = [] + for j in i["partition"]: + if j["type"] in keep_types: + keep.append(j) + i["partition"] = keep + emails_to_send.append({"email": p.pop("emails"), + "data": { + **p, + "days_partition": days_partition, + "issues_by_type": issues_by_type, + "issues_breakdown_by_day": issues_breakdown_by_day, + "issues_breakdown_list": issues_breakdown_list + }}) + print(f">>> Sending weekly report to {len(emails_to_send)} email-group") + for e in emails_to_send: + email_helper.weekly_report2(recipients=e["email"], data=e["data"]) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 196764ad9..89f6a9bc9 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -57,9 +57,7 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") c["smtp"] = helper.has_smtp() - c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] - r["iceServers"] = c["iceServers"] return { 'jwt': r.pop('jwt'), 'data': { From 557d855ae588e7078d964a56fe9c0bd027031f2f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:29:54 +0200 Subject: [PATCH 178/221] feat(api): changed login response --- api/chalicelib/core/users.py | 2 +- api/routers/core.py | 33 +++++++++++++++++++++-- api/routers/core_dynamic.py | 39 +--------------------------- ee/api/chalicelib/core/users.py | 2 +- ee/api/routers/core_dynamic.py | 46 +++------------------------------ 5 files changed, 37 insertions(+), 85 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 0e9852e2d..082e9aca9 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -557,7 +557,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, 1 AS tenant_id, users.role, users.name, diff --git a/api/routers/core.py b/api/routers/core.py index 2a38d0a75..c997229ba 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,7 +1,8 @@ from typing import Union from decouple import config -from fastapi import Depends, Body, BackgroundTasks +from fastapi import Depends, Body, BackgroundTasks, HTTPException +from starlette import status import schemas from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ @@ -13,7 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \ custom_metrics, saved_search from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import email_helper +from chalicelib.utils import email_helper, helper, captcha from chalicelib.utils.TimeUTC import TimeUTC from or_dependencies import OR_context from routers.base import get_routers @@ -21,6 +22,34 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() +@public_app.post('/login', tags=["authentication"]) +def login(data: schemas.UserLoginSchema = Body(...)): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid captcha." + ) + + r = users.authenticate(data.email, data.password, for_plugin=False) + if r is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="You’ve entered invalid Email or Password." + ) + if "errors" in r: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=r["errors"][0] + ) + r["smtp"] = helper.has_smtp() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r + } + } + + @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index a407e2833..06cd2937a 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -1,17 +1,15 @@ from typing import Optional from decouple import config -from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from fastapi import Body, Depends, BackgroundTasks from starlette.responses import RedirectResponse import schemas -from chalicelib.core import assist from chalicelib.core import integrations_manager from chalicelib.core import sessions from chalicelib.core import tenants, users, metadata, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha from chalicelib.utils import helper from or_dependencies import OR_context from routers.base import get_routers @@ -27,41 +25,6 @@ def get_all_signup(): "edition": license.EDITION}} -@public_app.post('/login', tags=["authentication"]) -def login(data: schemas.UserLoginSchema = Body(...)): - if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid captcha." - ) - - r = users.authenticate(data.email, data.password, for_plugin=False) - if r is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="You’ve entered invalid Email or Password." - ) - - tenant_id = r.pop("tenantId") - - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["smtp"] = helper.has_smtp() - r["smtp"] = c["smtp"] - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 91c2384c4..6a51a1d80 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -627,7 +627,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.tenant_id, users.role, users.name, diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 89f6a9bc9..73e597b52 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -1,17 +1,17 @@ from typing import Optional from decouple import config -from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from fastapi import Body, Depends, BackgroundTasks from starlette.responses import RedirectResponse import schemas import schemas_ee from chalicelib.core import integrations_manager from chalicelib.core import sessions -from chalicelib.core import tenants, users, metadata, projects, license, assist +from chalicelib.core import tenants, users, metadata, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha, SAML2_helper +from chalicelib.utils import SAML2_helper from chalicelib.utils import helper from or_dependencies import OR_context from routers.base import get_routers @@ -27,46 +27,6 @@ def get_all_signup(): "edition": license.EDITION}} -@public_app.post('/login', tags=["authentication"]) -def login(data: schemas.UserLoginSchema = Body(...)): - if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid captcha." - ) - - r = users.authenticate(data.email, data.password, for_plugin=False) - if r is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="You’ve entered invalid Email or Password." - ) - if "errors" in r: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail=r["errors"][0] - ) - - tenant_id = r.pop("tenantId") - - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["smtp"] = helper.has_smtp() - r["smtp"] = c["smtp"] - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - @app.get('/account', tags=['accounts']) def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) From 40d60f7769b6b1fb52a8338511f2062e30bb54ef Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:44:05 +0200 Subject: [PATCH 179/221] feat(api): fixed login response --- api/chalicelib/core/users.py | 2 +- ee/api/chalicelib/core/users.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 082e9aca9..8eb08bd35 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -586,7 +586,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): {"user_id": r["id"]}) cur.execute(query) return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], + "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"), "email": email, diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 6a51a1d80..49081b12a 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -666,7 +666,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): if for_change_password: return True r = helper.dict_to_camel_case(r) - jwt_iat = change_jwt_iat(r['id']) + jwt_iat = change_jwt_iat(r['userId']) return { "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], TimeUTC.datetime_to_timestamp(jwt_iat), @@ -681,7 +681,7 @@ def authenticate_sso(email, internal_id, exp=None): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.tenant_id, users.role, users.name, @@ -699,7 +699,7 @@ def authenticate_sso(email, internal_id, exp=None): if r is not None: r = helper.dict_to_camel_case(r) - jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) return authorizers.generate_jwt(r['id'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", exp=(exp + jwt_iat // 1000) if exp is not None else None) From 6c377bc4e57c169bbf56320b6b5d52c8670218e6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 15:49:24 +0200 Subject: [PATCH 180/221] feat(api): fixed login response --- ee/api/chalicelib/core/users.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 49081b12a..e5d5d17c9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -668,7 +668,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): r = helper.dict_to_camel_case(r) jwt_iat = change_jwt_iat(r['userId']) return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], + "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], TimeUTC.datetime_to_timestamp(jwt_iat), aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"), "email": email, @@ -700,7 +700,7 @@ def authenticate_sso(email, internal_id, exp=None): if r is not None: r = helper.dict_to_camel_case(r) jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) - return authorizers.generate_jwt(r['id'], r['tenantId'], + return authorizers.generate_jwt(r['userId'], r['tenantId'], jwt_iat, aud=f"front:{helper.get_stage_name()}", exp=(exp + jwt_iat // 1000) if exp is not None else None) return None From 405d83d4e0c36d468a27262d3934135896166cea Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 16:31:08 +0200 Subject: [PATCH 181/221] feat(api): optimised weekly report --- api/chalicelib/core/weekly_report.py | 10 +++++----- ee/api/chalicelib/core/weekly_report.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 88e785b94..952bf584b 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -66,7 +66,7 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= %(1_week_ago)s AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( @@ -74,16 +74,16 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(2_week_ago)s ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(5_week_ago)s ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] diff --git a/ee/api/chalicelib/core/weekly_report.py b/ee/api/chalicelib/core/weekly_report.py index e652010d4..90256d795 100644 --- a/ee/api/chalicelib/core/weekly_report.py +++ b/ee/api/chalicelib/core/weekly_report.py @@ -67,7 +67,7 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT + AND issues.timestamp >= %(1_week_ago)s AND issues.timestamp < %(tomorrow)s ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( @@ -75,16 +75,16 @@ def cron(): FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(2_week_ago)s ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id - AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT - AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT + AND issues.timestamp <= %(1_week_ago)s + AND issues.timestamp >= %(5_week_ago)s ) AS month_1_issues ON (TRUE);"""), params) projects_data = cur.fetchall() emails_to_send = [] From 421a1f110457be25821e3e8077dffdba40ef7fe6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:19:51 +0200 Subject: [PATCH 182/221] feat(api): custom metrics config --- api/chalicelib/core/custom_metrics.py | 6 ++++-- api/schemas.py | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index d6ebebc76..a2794f22e 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -228,9 +228,11 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} query = cur.mogrify(f"""\ WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, - view_type, metric_type, metric_of, metric_value, metric_format) + view_type, metric_type, metric_of, metric_value, + metric_format, default_config) VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, - %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, %(metric_format)s) + %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, + %(metric_format)s, %(default_config)s) RETURNING *) INSERT INTO metric_series(metric_id, index, name, filter) diff --git a/api/schemas.py b/api/schemas.py index ab063a9b9..5c1a33927 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -874,8 +874,15 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): alias_generator = attribute_to_camel_case +class CustomMetricsConfigSchema(BaseModel): + col: Optional[int] = Field(default=2) + row: Optional[int] = Field(default=2) + position: Optional[int] = Field(default=0) + + class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) + config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema()) @root_validator(pre=True) def transform_series(cls, values): From bafae833d5f3d35410d54f56e4927ab5f1abb8fb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:36:22 +0200 Subject: [PATCH 183/221] feat(api): limited long task DB --- api/chalicelib/core/sessions.py | 6 +++--- api/chalicelib/utils/pg_client.py | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 7543f9c9d..5f9a6d3ab 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1199,7 +1199,7 @@ def get_session_ids_by_user_ids(project_id, user_ids): def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: query = cur.mogrify( """\ DELETE FROM public.sessions @@ -1213,7 +1213,7 @@ def delete_sessions_by_session_ids(session_ids): def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: query = cur.mogrify( """\ DELETE FROM public.sessions @@ -1227,6 +1227,6 @@ def delete_sessions_by_user_ids(project_id, user_ids): def count_all(): - with pg_client.PostgresClient(long_query=True) as cur: + with pg_client.PostgresClient(unlimited_query=True) as cur: row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") return row.get("count", 0) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 1c4625873..2abc9f6c7 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -76,12 +76,17 @@ class PostgresClient: cursor = None long_query = False - def __init__(self, long_query=False): + def __init__(self, long_query=False, unlimited_query=False): self.long_query = long_query - if long_query: + if unlimited_query: + long_config = dict(_PG_CONFIG) + long_config["application_name"] += "-UNLIMITED" + self.connection = psycopg2.connect(**long_config) + elif long_query: long_config = dict(_PG_CONFIG) long_config["application_name"] += "-LONG" - self.connection = psycopg2.connect(**_PG_CONFIG) + long_config["options"] = f"-c statement_timeout={config('pg_long_timeout', cast=int, default=5*60) * 1000}" + self.connection = psycopg2.connect(**long_config) else: self.connection = postgreSQL_pool.getconn() From 46e7f5b83eda38f42ecf9ae55e603ad7453c429a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Jun 2022 17:51:47 +0200 Subject: [PATCH 184/221] feat(api): custom metrics config --- api/chalicelib/core/custom_metrics.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index a2794f22e..c3bdb134d 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -225,7 +225,9 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa _data[f"filter_{i}"] = s.filter.json() series_len = len(data.series) data.series = None - params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} + params = {"user_id": user_id, "project_id": project_id, + "default_config": json.dumps(data.config.dict()), + **data.dict(), **_data} query = cur.mogrify(f"""\ WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, view_type, metric_type, metric_of, metric_value, From 03e0dbf0e438a865d5c8e634fe9ba9a43c432e08 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 13:19:24 +0200 Subject: [PATCH 185/221] feat(api): optimised get session details --- api/chalicelib/core/events.py | 7 +++---- api/chalicelib/core/resources.py | 13 ++++++++++--- api/chalicelib/core/sessions.py | 6 ++++-- ee/api/chalicelib/core/resources.py | 12 +++++++++--- 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index d07cf1042..8f978c40a 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -472,14 +472,13 @@ def search(text, event_type, project_id, source, key): return {"data": rows} -def get_errors_by_session_id(session_id): +def get_errors_by_session_id(session_id, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT er.*,ur.*, er.timestamp - s.start_ts AS time FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) - WHERE - er.session_id = %(session_id)s - ORDER BY timestamp;""", {"session_id": session_id})) + WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s + ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) errors = cur.fetchall() for e in errors: e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) diff --git a/api/chalicelib/core/resources.py b/api/chalicelib/core/resources.py index d85e56b6f..1e2f4718e 100644 --- a/api/chalicelib/core/resources.py +++ b/api/chalicelib/core/resources.py @@ -1,8 +1,10 @@ from chalicelib.utils import helper, pg_client +from decouple import config -def get_by_session_id(session_id, project_id): +def get_by_session_id(session_id, project_id, start_ts, duration): with pg_client.PostgresClient() as cur: + delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000 ch_query = """\ SELECT timestamp AS datetime, @@ -16,8 +18,13 @@ def get_by_session_id(session_id, project_id): success, COALESCE(status, CASE WHEN success THEN 200 END) AS status FROM events.resources INNER JOIN sessions USING (session_id) - WHERE session_id = %(session_id)s AND project_id= %(project_id)s;""" - params = {"session_id": session_id, "project_id": project_id} + WHERE session_id = %(session_id)s + AND project_id= %(project_id)s + AND sessions.start_ts=%(start_ts)s + AND resources.timestamp>=%(res_start_ts)s + AND resources.timestamp>=%(res_end_ts)s;""" + params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, + "res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, } cur.execute(cur.mogrify(ch_query, params)) rows = cur.fetchall() return helper.list_to_camel_case(rows) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 5f9a6d3ab..9894d24c4 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -85,7 +85,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ else: data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] # to keep only the first stack data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if @@ -94,7 +94,9 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["start_ts"], + duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id) diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py index 4e4f1c4e8..7666e66cf 100644 --- a/ee/api/chalicelib/core/resources.py +++ b/ee/api/chalicelib/core/resources.py @@ -1,16 +1,22 @@ from chalicelib.utils import helper from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC +from decouple import config -def get_by_session_id(session_id, project_id): +def get_by_session_id(session_id, project_id, start_ts, duration): with ch_client.ClickHouseClient() as ch: + delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000 ch_query = """\ SELECT datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status FROM resources - WHERE session_id = toUInt64(%(session_id)s) AND project_id=%(project_id)s;""" - params = {"session_id": session_id, "project_id": project_id} + WHERE session_id = toUInt64(%(session_id)s) + AND project_id=%(project_id)s + AND datetime >= toDateTime(%(res_start_ts)s / 1000) + AND datetime <= toDateTime(%(res_end_ts)s / 1000);""" + params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, + "res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, } rows = ch.execute(query=ch_query, params=params) results = [] for r in rows: From 7d4596c074ae936cafdfcb692b14a5bb9d9f35ef Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 15:24:21 +0200 Subject: [PATCH 186/221] feat(api): get sessions details fix --- api/chalicelib/core/sessions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 9894d24c4..ae18ac888 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -95,7 +95,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["start_ts"], + start_ts=data["startTs"], duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) From c68edbc705113c0125c73a7974fad1af3da8a2bb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 15:59:54 +0200 Subject: [PATCH 187/221] feat(api): fixed login --- api/chalicelib/core/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 8eb08bd35..794121064 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -583,7 +583,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): SET jwt_iat = timezone('utc'::text, now()) WHERE user_id = %(user_id)s RETURNING jwt_iat;""", - {"user_id": r["id"]}) + {"user_id": r["userId"]}) cur.execute(query) return { "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], From 531b1124396b84c5207e61f4c6926d5a67de4676 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 16:07:56 +0200 Subject: [PATCH 188/221] feat(api): fixed custom metrics timestamp issue --- api/chalicelib/core/custom_metrics.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index c3bdb134d..f7de1a7c9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -52,6 +52,8 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): "stages": [], "totalDropDueToIssues": 0 } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) @@ -68,6 +70,8 @@ def __get_errors_list(project_id, user_id, data): "total": 0, "errors": [] } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) @@ -78,10 +82,13 @@ def __is_sessions_list(data): def __get_sessions_list(project_id, user_id, data): if len(data.series) == 0: + print("empty series") return { "total": 0, "sessions": [] } + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) From f296b2734607b1a7f4ec6fdb43de0e1f19062c0b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 18:24:03 +0200 Subject: [PATCH 189/221] feat(api): optimised get issues for get session-details --- api/chalicelib/core/events.py | 6 +++--- api/chalicelib/core/issues.py | 8 +++++--- api/chalicelib/core/sessions.py | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 8f978c40a..dd9562de1 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -28,8 +28,8 @@ def __merge_cells(rows, start, count, replacement): return rows -def __get_grouped_clickrage(rows, session_id): - click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage") +def __get_grouped_clickrage(rows, session_id, project_id): + click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) if len(click_rage_issues) == 0: return rows @@ -63,7 +63,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False): ) rows = cur.fetchall() if group_clickrage: - rows = __get_grouped_clickrage(rows=rows, session_id=session_id) + rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) cur.execute(cur.mogrify(""" SELECT diff --git a/api/chalicelib/core/issues.py b/api/chalicelib/core/issues.py index e1aa54712..e4ac11745 100644 --- a/api/chalicelib/core/issues.py +++ b/api/chalicelib/core/issues.py @@ -44,16 +44,18 @@ def get(project_id, issue_id): return helper.dict_to_camel_case(data) -def get_by_session_id(session_id, issue_type=None): +def get_by_session_id(session_id, project_id, issue_type=None): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ SELECT * FROM events_common.issues INNER JOIN public.issues USING (issue_id) - WHERE session_id = %(session_id)s {"AND type = %(type)s" if issue_type is not None else ""} + WHERE session_id = %(session_id)s + AND project_id= %(project_id)s + {"AND type = %(type)s" if issue_type is not None else ""} ORDER BY timestamp;""", - {"session_id": session_id, "type": issue_type}) + {"session_id": session_id, "project_id": project_id, "type": issue_type}) ) return helper.list_to_camel_case(cur.fetchall()) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index ae18ac888..b3cd81c1d 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -99,7 +99,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id) + data['issues'] = issues.get_by_session_id(session_id=session_id,project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) From b4b3a6c26e34024ed1e6adf9f6a09e8860281750 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:20:16 +0200 Subject: [PATCH 190/221] feat(api): custom metrics fixed templates response --- api/chalicelib/core/dashboards.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 25dbdada3..bdd0518e0 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -38,6 +38,9 @@ def get_templates(project_id, user_id): for w in r["widgets"]: w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) + for s in w["series"]: + s["filter"] = helper.old_search_payload_to_flat(s["filter"]) + return helper.list_to_camel_case(rows) From 1448cb45e944baa13073bde57aa4f0bb7bd216c8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:26:00 +0200 Subject: [PATCH 191/221] feat(api): metrics table of errors --- api/chalicelib/core/custom_metrics.py | 4 +--- api/schemas.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index f7de1a7c9..2967b7fec 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -59,9 +59,7 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema): def __is_errors_list(data): return data.metric_type == schemas.MetricType.table \ - and data.metric_of == schemas.TableMetricOfType.issues \ - and len(data.metric_value) == 1 and data.metric_value[0] == schemas.IssueType.js_exception \ - and data.metric_format == schemas.MetricFormatType.errors_list + and data.metric_of == schemas.TableMetricOfType.errors def __get_errors_list(project_id, user_id, data): diff --git a/api/schemas.py b/api/schemas.py index 5c1a33927..715bf0f84 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -481,7 +481,6 @@ class IssueType(str, Enum): class MetricFormatType(str, Enum): session_count = 'sessionCount' - errors_list = 'errors' class __MixedSearchFilter(BaseModel): @@ -811,6 +810,7 @@ class TableMetricOfType(str, Enum): issues = FilterType.issue.value visited_url = EventType.location.value sessions = "SESSIONS" + errors = IssueType.js_exception.value class TimeseriesMetricOfType(str, Enum): From c7c6cd2187b8d49559e395f2923ebf2d3110e885 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Jun 2022 19:56:27 +0200 Subject: [PATCH 192/221] feat(api):metrics get sessions related to issue --- api/chalicelib/core/custom_metrics.py | 31 +++++++++++++++++++++++++++ api/routers/core.py | 2 +- api/routers/subs/metrics.py | 12 +++++++++++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 2967b7fec..b92934912 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -500,3 +500,34 @@ def change_state(project_id, metric_id, user_id, status): {"metric_id": metric_id, "status": status, "user_id": user_id}) ) return get(metric_id=metric_id, project_id=project_id, user_id=user_id) + + +def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, + data: schemas.CustomMetricSessionsPayloadSchema + # , range_value=None, start_date=None, end_date=None + ): + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) + if metric is None: + return None + results = [] + for s in metric.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + issues = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) + issues = issues.get("significant", []) + issues.get("insignificant", []) + issue = None + for i in issues: + if i.get("issueId", "") == issue_id: + issue = i + break + results.append({"seriesId": s.series_id, "seriesName": s.name, + "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, + issue=issue, data=s.filter) + if issue is not None else {"total": 0, "sessions": []}, + "issue": issue}) + return results diff --git a/api/routers/core.py b/api/routers/core.py index c997229ba..3f3d91e80 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -772,7 +772,7 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. @app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) -def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, +def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) if issue is None: diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index e00d2d4f7..c68eec3e9 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -172,6 +172,18 @@ def get_custom_metric_funnel_issues(projectId: int, metric_id: int, return {"data": data} +@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) +def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id, + metric_id=metric_id, issue_id=issueId, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) def get_custom_metric_errors_list(projectId: int, metric_id: int, From 5b1185b87235a515297b64b81b0afb2a8f68f052 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 14:56:46 +0200 Subject: [PATCH 193/221] feat(api): metric-funnel changed response --- api/chalicelib/core/custom_metrics.py | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index b92934912..5a7fdcea6 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -174,16 +174,13 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page - results.append({"seriesId": s.series_id, "seriesName": s.name, - **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}) - - return results + return {"seriesId": s.series_id, "seriesName": s.name, + **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): @@ -193,16 +190,13 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSe metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page - results.append({"seriesId": s.series_id, "seriesName": s.name, - **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results + return {"seriesId": s.series_id, "seriesName": s.name, + **errors.search(data=s.filter, project_id=project_id, user_id=user_id)} def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): @@ -512,7 +506,6 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None - results = [] for s in metric.series: s.filter.startDate = data.startTimestamp s.filter.endDate = data.endTimestamp @@ -525,9 +518,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, if i.get("issueId", "") == issue_id: issue = i break - results.append({"seriesId": s.series_id, "seriesName": s.name, - "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, - issue=issue, data=s.filter) - if issue is not None else {"total": 0, "sessions": []}, - "issue": issue}) - return results + return {"seriesId": s.series_id, "seriesName": s.name, + "sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, + issue=issue, data=s.filter) + if issue is not None else {"total": 0, "sessions": []}, + "issue": issue} From 7d4d0fadbde016a7d2502a0d90045d0ef4da1424 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 15:07:39 +0200 Subject: [PATCH 194/221] feat(api): requirements upgrade --- api/requirements.txt | 6 +++--- ee/api/requirements.txt | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index d615851d1..f08b6db46 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,14 +1,14 @@ requests==2.27.1 urllib3==1.26.9 -boto3==1.22.6 -pyjwt==2.3.0 +boto3==1.24.8 +pyjwt==2.4.0 psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 -fastapi==0.75.2 +fastapi==0.78.0 uvicorn[standard]==0.17.6 python-decouple==3.6 pydantic[email]==1.8.2 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index f14d6022d..e96ed6ae5 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,14 +1,14 @@ requests==2.27.1 urllib3==1.26.9 -boto3==1.22.6 -pyjwt==2.3.0 +boto3==1.24.8 +pyjwt==2.4.0 psycopg2-binary==2.9.3 elasticsearch==7.9.1 jira==3.1.1 clickhouse-driver==0.2.3 python3-saml==1.12.0 -fastapi==0.75.2 +fastapi==0.78.0 python-multipart==0.0.5 uvicorn[standard]==0.17.6 python-decouple==3.6 From 03dbf42d1118ddb337c7c04cbd4cf2bbf01a3c46 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 17:19:58 +0200 Subject: [PATCH 195/221] feat(assist): FOSS assist search --- utilities/server.js | 2 ++ utilities/servers/websocket.js | 53 +++++++++++++++++++++++----------- utilities/utils/helper.js | 30 +++++++++++++++++-- 3 files changed, 66 insertions(+), 19 deletions(-) diff --git a/utilities/server.js b/utilities/server.js index b0eadcccd..ad03aafab 100644 --- a/utilities/server.js +++ b/utilities/server.js @@ -7,6 +7,8 @@ const HOST = '0.0.0.0'; const PORT = 9001; const wsapp = express(); +wsapp.use(express.json()); +wsapp.use(express.urlencoded({extended: true})); wsapp.use(request_logger("[wsapp]")); wsapp.use(`/assist/${process.env.S3_KEY}`, socket.wsRouter); diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index d2399477e..799304f20 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,12 +28,26 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractUserIdFromRequest = function (req) { +const extractFiltersFromRequest = function (req) { + let filters = {}; if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; + filters.userID = [req.query.userId]; } - return undefined; + filters = {...filters, ...req.body}; + let _filters = {} + for (let k of Object.keys(filters)) { + if (filters[k] !== undefined && filters[k] !== null) { + _filters[k] = filters[k]; + if (!Array.isArray(_filters[k])) { + _filters[k] = [_filters[k]]; + } + for (let i = 0; i < _filters[k].length; i++) { + _filters[k][i] = String(_filters[k][i]); + } + } + } + return Object.keys(_filters).length > 0 ? _filters : undefined; } const extractProjectKeyFromRequest = function (req) { @@ -57,18 +71,18 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); - + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -80,21 +94,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -106,10 +122,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -119,8 +136,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -133,11 +150,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = extractFiltersFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -147,8 +165,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -161,6 +179,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 98322c417..070463e00 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -24,7 +24,33 @@ const request_logger = (identity) => { next(); } }; - +const isValidSession = function (sessionInfo, filters) { + let foundAll = true; + for (const [key, values] of Object.entries(filters)) { + let found = false; + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } + } + } + foundAll &&= found; + if (!found) { + break; + } + } + return foundAll; +} +const hasFilters = function (filters) { + return filters !== undefined && Object.keys(filters).length > 0; +} module.exports = { - extractPeerId, request_logger + extractPeerId, request_logger, isValidSession, hasFilters }; \ No newline at end of file From 58aea53101d691042b28099e4bb33b71fa1e7bc9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 18:01:34 +0200 Subject: [PATCH 196/221] feat(assist): assist upgrade uWebSockets feat(assist): assist upgrade SocketIo --- ee/utilities/package-lock.json | 97 ++++++++++++++++------------------ ee/utilities/package.json | 4 +- utilities/package-lock.json | 93 +++++++++++++------------------- utilities/package.json | 2 +- 4 files changed, 85 insertions(+), 111 deletions(-) diff --git a/ee/utilities/package-lock.json b/ee/utilities/package-lock.json index 98ef3f745..19699560a 100644 --- a/ee/utilities/package-lock.json +++ b/ee/utilities/package-lock.json @@ -13,9 +13,9 @@ "@socket.io/redis-adapter": "^7.1.0", "express": "^4.17.1", "redis": "^4.0.3", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2", - "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" + "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0" } }, "node_modules/@maxmind/geoip2-node": { @@ -83,14 +83,6 @@ "@node-redis/client": "^1.0.0" } }, - "node_modules/@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/@socket.io/redis-adapter": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz", @@ -121,9 +113,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "node_modules/@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -332,9 +324,9 @@ } }, "node_modules/engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -352,12 +344,9 @@ } }, "node_modules/engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "dependencies": { - "@socket.io/base64-arraybuffer": "~1.0.2" - }, + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==", "engines": { "node": ">=10.0.0" } @@ -667,7 +656,7 @@ "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } @@ -869,15 +858,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "engines": { @@ -902,6 +891,11 @@ "node": ">=10.0.0" } }, + "node_modules/socket.io/node_modules/socket.io-adapter": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" + }, "node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -1092,11 +1086,6 @@ "integrity": "sha512-HGQ8YooJ8Mx7l28tD7XjtB3ImLEjlUxG1wC1PAjxu6hPJqjPshUZxAICzDqDjtIbhDTf48WXXUcx8TQJB1XTKA==", "requires": {} }, - "@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==" - }, "@socket.io/redis-adapter": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.1.0.tgz", @@ -1124,9 +1113,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "accepts": { "version": "1.3.8", @@ -1281,9 +1270,9 @@ "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "requires": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -1298,12 +1287,9 @@ } }, "engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "requires": { - "@socket.io/base64-arraybuffer": "~1.0.2" - } + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==" }, "escape-html": { "version": "1.0.3", @@ -1546,7 +1532,7 @@ "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "on-finished": { "version": "2.3.0", @@ -1696,16 +1682,23 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "requires": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" + }, + "dependencies": { + "socket.io-adapter": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" + } } }, "socket.io-adapter": { @@ -1774,7 +1767,7 @@ }, "uWebSockets.js": { "version": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#a58e810e47a23696410f6073c8c905dc38f75da5", - "from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.6.0" + "from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.10.0" }, "vary": { "version": "1.1.2", diff --git a/ee/utilities/package.json b/ee/utilities/package.json index 99c2666da..bd35ec6a6 100644 --- a/ee/utilities/package.json +++ b/ee/utilities/package.json @@ -22,8 +22,8 @@ "@socket.io/redis-adapter": "^7.1.0", "express": "^4.17.1", "redis": "^4.0.3", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2", - "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" + "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0" } } diff --git a/utilities/package-lock.json b/utilities/package-lock.json index d4ef1c007..e8d8d3129 100644 --- a/utilities/package-lock.json +++ b/utilities/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "express": "^4.17.1", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2" } }, @@ -26,14 +26,6 @@ "maxmind": "^4.2.0" } }, - "node_modules/@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", @@ -50,9 +42,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "node_modules/@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -232,9 +224,9 @@ } }, "node_modules/engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -252,12 +244,9 @@ } }, "node_modules/engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "dependencies": { - "@socket.io/base64-arraybuffer": "~1.0.2" - }, + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==", "engines": { "node": ">=10.0.0" } @@ -549,7 +538,7 @@ "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } @@ -706,15 +695,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "engines": { @@ -722,9 +711,9 @@ } }, "node_modules/socket.io-adapter": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz", - "integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" }, "node_modules/socket.io-parser": { "version": "4.0.4", @@ -916,11 +905,6 @@ "maxmind": "^4.2.0" } }, - "@socket.io/base64-arraybuffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", - "integrity": "sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ==" - }, "@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", @@ -937,9 +921,9 @@ "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, "@types/node": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", - "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, "accepts": { "version": "1.3.8", @@ -1074,9 +1058,9 @@ "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "engine.io": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.3.tgz", - "integrity": "sha512-rqs60YwkvWTLLnfazqgZqLa/aKo+9cueVfEi/dZ8PyGyaf8TLOxj++4QMIgeG3Gn0AhrWiFXvghsoY9L9h25GA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.0.tgz", + "integrity": "sha512-4KzwW3F3bk+KlzSOY57fj/Jx6LyRQ1nbcyIadehl+AnXjKT7gDO0ORdRi/84ixvMKTym6ZKuxvbzN62HDDU1Lg==", "requires": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -1106,12 +1090,9 @@ } }, "engine.io-parser": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.3.tgz", - "integrity": "sha512-BtQxwF27XUNnSafQLvDi0dQ8s3i6VgzSoQMJacpIcGNrlUdfHSKbgm3jmjCVvQluGzqwujQMPAoMai3oYSTurg==", - "requires": { - "@socket.io/base64-arraybuffer": "~1.0.2" - } + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz", + "integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==" }, "escape-html": { "version": "1.0.3", @@ -1314,7 +1295,7 @@ "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "on-finished": { "version": "2.3.0", @@ -1423,15 +1404,15 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "socket.io": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.4.1.tgz", - "integrity": "sha512-s04vrBswdQBUmuWJuuNTmXUVJhP0cVky8bBDhdkf8y0Ptsu7fKU2LuLbts9g+pdmAdyMMn8F/9Mf1/wbtUN0fg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.1.tgz", + "integrity": "sha512-0y9pnIso5a9i+lJmsCdtmTTgJFFSvNQKDnPQRz28mGNnxbmqYg2QPtJTLFxhymFZhAIn50eHAKzJeiNaKr+yUQ==", "requires": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.1.0", - "socket.io-adapter": "~2.3.3", + "engine.io": "~6.2.0", + "socket.io-adapter": "~2.4.0", "socket.io-parser": "~4.0.4" }, "dependencies": { @@ -1451,9 +1432,9 @@ } }, "socket.io-adapter": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.3.3.tgz", - "integrity": "sha512-Qd/iwn3VskrpNO60BeRyCyr8ZWw9CPZyitW4AQwmRZ8zCiyDiL+znRnWX6tDHXnWn1sJrM1+b6Mn6wEDJJ4aYQ==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz", + "integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg==" }, "socket.io-parser": { "version": "4.0.4", diff --git a/utilities/package.json b/utilities/package.json index 73b92d0f2..cb6fb2b65 100644 --- a/utilities/package.json +++ b/utilities/package.json @@ -20,7 +20,7 @@ "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "express": "^4.17.1", - "socket.io": "^4.4.1", + "socket.io": "^4.5.1", "ua-parser-js": "^1.0.2" } } From 181195ffdef7eb6b6c7d812b8b868b89f5a3e216 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 18:01:52 +0200 Subject: [PATCH 197/221] feat(assist): assist refactored --- utilities/servers/websocket.js | 18 +++--------------- utilities/utils/helper.js | 17 ++++++++++++++++- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 799304f20..8ef276939 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession, objectToObjectOfArrays} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -34,20 +34,8 @@ const extractFiltersFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.userID = [req.query.userId]; } - filters = {...filters, ...req.body}; - let _filters = {} - for (let k of Object.keys(filters)) { - if (filters[k] !== undefined && filters[k] !== null) { - _filters[k] = filters[k]; - if (!Array.isArray(_filters[k])) { - _filters[k] = [_filters[k]]; - } - for (let i = 0; i < _filters[k].length; i++) { - _filters[k][i] = String(_filters[k][i]); - } - } - } - return Object.keys(_filters).length > 0 ? _filters : undefined; + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; } const extractProjectKeyFromRequest = function (req) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 070463e00..a874efa65 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -51,6 +51,21 @@ const isValidSession = function (sessionInfo, filters) { const hasFilters = function (filters) { return filters !== undefined && Object.keys(filters).length > 0; } +const objectToObjectOfArrays = function (obj) { + let _obj = {} + for (let k of Object.keys(obj)) { + if (obj[k] !== undefined && obj[k] !== null) { + _obj[k] = obj[k]; + if (!Array.isArray(_obj[k])) { + _obj[k] = [_obj[k]]; + } + for (let i = 0; i < _obj[k].length; i++) { + _obj[k][i] = String(_obj[k][i]); + } + } + } + return _obj; +} module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters + extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays }; \ No newline at end of file From c6a6a77e7197172e12c574474c65957319f0fe0d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 19:37:04 +0200 Subject: [PATCH 198/221] feat(assist): EE assist search --- ee/utilities/.gitignore | 5 ++- ee/utilities/server.js | 7 ++- ee/utilities/servers/websocket-cluster.js | 46 +++++++++----------- ee/utilities/servers/websocket.js | 47 +++++++++----------- ee/utilities/utils/helper-ee.js | 53 +++++++++++++++++++++++ 5 files changed, 102 insertions(+), 56 deletions(-) create mode 100644 ee/utilities/utils/helper-ee.js diff --git a/ee/utilities/.gitignore b/ee/utilities/.gitignore index 0aaf625c9..f54e439ba 100644 --- a/ee/utilities/.gitignore +++ b/ee/utilities/.gitignore @@ -10,6 +10,7 @@ build.sh servers/peerjs-server.js servers/sourcemaps-handler.js servers/sourcemaps-server.js -#servers/websocket.js -/utils /Dockerfile +/utils/geoIP.js +/utils/HeapSnapshot.js +/utils/helper.js diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 429b37c25..fc319d79c 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -16,8 +16,9 @@ const PREFIX = process.env.prefix || `/assist` if (process.env.uws !== "true") { let wsapp = express(); + wsapp.use(express.json()); + wsapp.use(express.urlencoded({extended: true})); wsapp.use(request_logger("[wsapp]")); - wsapp.use(request_logger("[app]")); wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => { res.statusCode = 200; res.end("ok!"); @@ -73,10 +74,14 @@ if (process.env.uws !== "true") { } } uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 0b8a56699..4b3cb0a42 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,7 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractFiltersFromRequest} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); @@ -59,19 +60,6 @@ const uniqueSessions = function (data) { return resArr; } -const extractUserIdFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getQuery("userId")) { - debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - return req.getQuery("userId"); - } - } else if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; - } - return undefined; -} - const extractProjectKeyFromRequest = function (req) { if (process.env.uws === "true") { if (req.getParameter(0)) { @@ -103,7 +91,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); @@ -111,10 +99,11 @@ const socketsList = async function (req, res) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -126,21 +115,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -152,10 +143,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -165,8 +157,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -180,11 +172,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -194,8 +187,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -209,6 +202,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 51fa4cc41..63f38b94e 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,7 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); +const {extractFiltersFromRequest} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -42,19 +43,6 @@ const createSocketIOServer = function (server, prefix) { } } -const extractUserIdFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getQuery("userId")) { - debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - return req.getQuery("userId"); - } - } else if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - return req.query.userId; - } - return undefined; -} - const extractProjectKeyFromRequest = function (req) { if (process.env.uws === "true") { if (req.getParameter(0)) { @@ -86,18 +74,18 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId = extractUserIdFromRequest(req); - + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -109,21 +97,23 @@ const socketsList = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { + if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { - if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo + && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(sessionId); } } @@ -135,10 +125,11 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -148,8 +139,8 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -162,11 +153,12 @@ const socketsLive = async function (req, res) { respond(res, liveSessions); } wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let userId = extractUserIdFromRequest(req); + let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -176,8 +168,8 @@ const socketsLiveByProject = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - if (userId) { - if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + if (hasFilters(filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -190,6 +182,7 @@ const socketsLiveByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js new file mode 100644 index 000000000..522158a01 --- /dev/null +++ b/ee/utilities/utils/helper-ee.js @@ -0,0 +1,53 @@ +const {objectToObjectOfArrays} = require('./helper'); +const getBodyFromUWSResponse = async function (res) { + return new Promise(((resolve, reject) => { + let buffer; + res.onData((ab, isLast) => { + let chunk = Buffer.from(ab); + if (buffer) { + buffer = Buffer.concat([buffer, chunk]); + } else { + buffer = Buffer.concat([chunk]); + } + if (isLast) { + let json; + try { + json = JSON.parse(buffer); + } catch (e) { + console.error(e); + /* res.close calls onAborted */ + try { + res.close(); + } catch (e2) { + console.error(e2); + } + json = {}; + } + resolve(json); + } + }); + })); +} +const extractFiltersFromRequest = async function (req, res) { + let filters = {}; + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + filters.userID = [req.getQuery("userId")]; + } + + let body = await getBodyFromUWSResponse(res); + filters = {...filters, ...body}; + } else { + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + filters.userID = [req.query.userId]; + } + filters = {...filters, ...req.body}; + } + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; +} +module.exports = { + extractFiltersFromRequest +}; \ No newline at end of file From 43184d5c43ad4646ae45269b676e61f15b3f983e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 19:42:16 +0200 Subject: [PATCH 199/221] feat(assist): assist refactored --- ee/utilities/clean.sh | 8 ++++++++ ee/utilities/prepare-dev.sh | 2 ++ ee/utilities/utils/helper-ee.js | 10 +++------- utilities/servers/websocket.js | 12 +----------- utilities/utils/helper.js | 11 ++++++++++- 5 files changed, 24 insertions(+), 19 deletions(-) create mode 100755 ee/utilities/clean.sh create mode 100755 ee/utilities/prepare-dev.sh diff --git a/ee/utilities/clean.sh b/ee/utilities/clean.sh new file mode 100755 index 000000000..3e8ec080b --- /dev/null +++ b/ee/utilities/clean.sh @@ -0,0 +1,8 @@ +rm -rf ./utils/geoIP.js +rm -rf ./utils/HeapSnapshot.js +rm -rf ./utils/helper.js + +rm -rf servers/peerjs-server.js +rm -rf servers/sourcemaps-handler.js +rm -rf servers/sourcemaps-server.js +rm -rf build.sh \ No newline at end of file diff --git a/ee/utilities/prepare-dev.sh b/ee/utilities/prepare-dev.sh new file mode 100755 index 000000000..2daecbfc1 --- /dev/null +++ b/ee/utilities/prepare-dev.sh @@ -0,0 +1,2 @@ +#!/bin/bash +rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./ \ No newline at end of file diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 522158a01..7853d67ad 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -1,4 +1,4 @@ -const {objectToObjectOfArrays} = require('./helper'); +const helper = require('./helper'); const getBodyFromUWSResponse = async function (res) { return new Promise(((resolve, reject) => { let buffer; @@ -39,13 +39,9 @@ const extractFiltersFromRequest = async function (req, res) { let body = await getBodyFromUWSResponse(res); filters = {...filters, ...body}; } else { - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; - } - filters = {...filters, ...req.body}; + return helper.extractFiltersFromRequest(req); } - filters = objectToObjectOfArrays({...filters, ...req.body}); + filters = helper.objectToObjectOfArrays({...filters, ...req.body}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 8ef276939..5658bbd57 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,7 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, objectToObjectOfArrays} = require('../utils/helper'); +const {extractPeerId, hasFilters, isValidSession, extractFiltersFromRequest} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,16 +28,6 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractFiltersFromRequest = function (req) { - let filters = {}; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; - } - filters = objectToObjectOfArrays({...filters, ...req.body}); - return Object.keys(filters).length > 0 ? filters : undefined; -} - const extractProjectKeyFromRequest = function (req) { if (req.params.projectKey) { debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index a874efa65..54fbfd8ef 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -66,6 +66,15 @@ const objectToObjectOfArrays = function (obj) { } return _obj; } +const extractFiltersFromRequest = function (req) { + let filters = {}; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + filters.userID = [req.query.userId]; + } + filters = objectToObjectOfArrays({...filters, ...req.body}); + return Object.keys(filters).length > 0 ? filters : undefined; +} module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays + extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays, extractFiltersFromRequest }; \ No newline at end of file From ef609aa196fef6ce1753841e0391524c3209b67a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 20:09:36 +0200 Subject: [PATCH 200/221] feat(api): search live sessions --- api/chalicelib/core/assist.py | 35 ++++++++++++++++++++++++++--------- api/routers/core.py | 13 ++++++++++--- api/schemas.py | 16 ++++++++++++++++ 3 files changed, 52 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index b2926fd0c..e656c0728 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,6 +1,7 @@ import requests from decouple import config +import schemas from chalicelib.core import projects SESSION_PROJECTION_COLS = """s.project_id, @@ -19,14 +20,29 @@ SESSION_PROJECTION_COLS = """s.project_id, """ -def get_live_sessions_ws(project_id, user_id=None): +def get_live_sessions_ws_user_id(project_id, user_id): + data = { + "filter": {"userId": user_id} + } + return __get_live_sessions_ws(project_id=project_id, data=data) + + +def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema): + data = { + "filter": {}, + "pagination": {"limit": body.limit, "page": body.page}, + "sort": {"key": body.sort, "order": body.order} + } + for f in body.filters: + data["filter"][f.type] = f.value + return __get_live_sessions_ws(project_id=project_id, data=data) + + +def __get_live_sessions_ws(project_id, data): project_key = projects.get_project_key(project_id) - params = {} - if user_id and len(user_id) > 0: - params["userId"] = user_id try: - connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params, - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.post(config("assist") % config("S3_KEY") + f"/{project_key}", json=data, + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) @@ -53,7 +69,7 @@ def get_live_sessions_ws(project_id, user_id=None): def get_live_session_by_id(project_id, session_id): - all_live = get_live_sessions_ws(project_id) + all_live = __get_live_sessions_ws(project_id, data={"filter": {"sessionId": session_id}}) for l in all_live: if str(l.get("sessionID")) == str(session_id): return l @@ -64,8 +80,9 @@ def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) try: - connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}", - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.post(config("assistList") % config("S3_KEY") + f"/{project_key}", + json={"filter": {"sessionId": session_id}}, + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/api/routers/core.py b/api/routers/core.py index 3f3d91e80..7ad57334e 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,4 +1,4 @@ -from typing import Union +from typing import Union, Optional from decouple import config from fastapi import Depends, Body, BackgroundTasks, HTTPException @@ -773,7 +773,7 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. @app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) if issue is None: return {"errors": ["issue not found"]} @@ -859,7 +859,14 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/assist/sessions', tags=["assist"]) def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId, user_id=userId) + data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId) + return {'data': data} + + +@app.post('/{projectId}/assist/sessions', tags=["assist"]) +def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions_ws(projectId, body=data) return {'data': data} diff --git a/api/schemas.py b/api/schemas.py index 715bf0f84..3fb9a6805 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1008,3 +1008,19 @@ class CustomMetricAndTemplate(BaseModel): class Config: alias_generator = attribute_to_camel_case + + +class LiveSessionsSearchPayloadSchema(_PaginatedSchema): + filters: List[SessionSearchFilterSchema] = Field([]) + sort: str = Field(default="startTs") + order: SortOrderType = Field(default=SortOrderType.desc) + group_by_user: bool = Field(default=False) + + @root_validator(pre=True) + def transform_order(cls, values): + if values.get("order") is not None: + values["order"] = values["order"].upper() + return values + + class Config: + alias_generator = attribute_to_camel_case \ No newline at end of file From 0aa94bbc3c2b307dcd65baeb35fb81557e8f79cc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Jun 2022 20:12:03 +0200 Subject: [PATCH 201/221] feat(assist): assist changed search payload --- ee/utilities/utils/helper-ee.js | 4 ++-- utilities/utils/helper.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 7853d67ad..18fca5fe4 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -37,11 +37,11 @@ const extractFiltersFromRequest = async function (req, res) { } let body = await getBodyFromUWSResponse(res); - filters = {...filters, ...body}; + filters = {...filters, ...(body.filter || {})}; } else { return helper.extractFiltersFromRequest(req); } - filters = helper.objectToObjectOfArrays({...filters, ...req.body}); + filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 54fbfd8ef..531cf9f64 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -72,7 +72,7 @@ const extractFiltersFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.userID = [req.query.userId]; } - filters = objectToObjectOfArrays({...filters, ...req.body}); + filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { From ccf951f8e403921bfada4ecd408ae71826a5319a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 15:05:41 +0200 Subject: [PATCH 202/221] feat(api): optimized live session check feat(assist): optimized live session check feat(assist): sort feat(assist): pagination --- api/chalicelib/core/assist.py | 5 +-- ee/utilities/server.js | 2 + ee/utilities/servers/websocket-cluster.js | 36 +++++++-------- ee/utilities/servers/websocket.js | 38 +++++++--------- ee/utilities/utils/helper-ee.js | 24 ++++++++++ utilities/servers/websocket.js | 36 ++++++++------- utilities/utils/helper.js | 55 ++++++++++++++++++++--- 7 files changed, 127 insertions(+), 69 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index e656c0728..f647e95f1 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -80,9 +80,8 @@ def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) try: - connected_peers = requests.post(config("assistList") % config("S3_KEY") + f"/{project_key}", - json={"filter": {"sessionId": session_id}}, - timeout=config("assistTimeout", cast=int, default=5)) + connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index fc319d79c..327a664a0 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -77,11 +77,13 @@ if (process.env.uws !== "true") { uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 4b3cb0a42..57ba2ab6c 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,8 +1,12 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); -const {extractFiltersFromRequest} = require('../utils/helper-ee'); +const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + extractFiltersFromRequest +} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); @@ -60,20 +64,6 @@ const uniqueSessions = function (data) { return resArr; } -const extractProjectKeyFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getParameter(0)) { - debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); - return req.getParameter(0); - } - } else if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - - const getAvailableRooms = async function () { return io.of('/').adapter.allRooms(); } @@ -120,12 +110,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -144,6 +135,7 @@ const socketsListByProject = async function (req, res) { } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -169,7 +161,7 @@ const socketsLive = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -177,12 +169,13 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -199,10 +192,11 @@ const socketsLiveByProject = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []); } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 63f38b94e..8c34bd91a 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,8 +1,12 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession} = require('../utils/helper'); -const {extractFiltersFromRequest} = require('../utils/helper-ee'); +const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + extractFiltersFromRequest +} = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -43,20 +47,6 @@ const createSocketIOServer = function (server, prefix) { } } -const extractProjectKeyFromRequest = function (req) { - if (process.env.uws === "true") { - if (req.getParameter(0)) { - debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); - return req.getParameter(0); - } - } else if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - - const getAvailableRooms = async function () { return io.sockets.adapter.rooms.keys(); } @@ -102,12 +92,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -122,10 +113,11 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -150,7 +142,7 @@ const socketsLive = async function (req, res) { } } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -158,12 +150,13 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = await extractFiltersFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -179,10 +172,11 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 18fca5fe4..2ea57a421 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -28,6 +28,28 @@ const getBodyFromUWSResponse = async function (res) { }); })); } +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else { + return helper.extractProjectKeyFromRequest(req); + } + return undefined; +} +const extractSessionIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(1)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(1)}`); + return req.getParameter(1); + } + } else { + return helper.extractSessionIdFromRequest(req); + } + return undefined; +} const extractFiltersFromRequest = async function (req, res) { let filters = {}; if (process.env.uws === "true") { @@ -45,5 +67,7 @@ const extractFiltersFromRequest = async function (req, res) { return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { + extractProjectKeyFromRequest, + extractSessionIdFromRequest, extractFiltersFromRequest }; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 5658bbd57..27e8fba4a 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -1,7 +1,15 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, extractFiltersFromRequest} = require('../utils/helper'); +const { + extractPeerId, + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + hasFilters, + isValidSession, + extractPayloadFromRequest, + sortPaginate +} = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; @@ -28,14 +36,6 @@ const createSocketIOServer = function (server, prefix) { }); } -const extractProjectKeyFromRequest = function (req) { - if (req.params.projectKey) { - debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); - return req.params.projectKey; - } - return undefined; -} - const getAvailableRooms = async function () { return io.sockets.adapter.rooms.keys(); @@ -49,7 +49,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -60,7 +60,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -77,12 +77,13 @@ wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractFiltersFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === _projectKey) { + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -100,11 +101,12 @@ const socketsListByProject = async function (req, res) { respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -125,7 +127,7 @@ const socketsLive = async function (req, res) { } } } - respond(res, liveSessions); + respond(res, sortPaginate(liveSessions, filters)); } wsRouter.get(`/sockets-live`, socketsLive); wsRouter.post(`/sockets-live`, socketsLive); @@ -133,7 +135,7 @@ wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractFiltersFromRequest(req); + let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -154,7 +156,7 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 531cf9f64..f47a7f540 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -24,6 +24,20 @@ const request_logger = (identity) => { next(); } }; +const extractProjectKeyFromRequest = function (req) { + if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} +const extractSessionIdFromRequest = function (req) { + if (req.params.sessionId) { + debug && console.log(`[WS]where sessionId=${req.params.sessionId}`); + return req.params.sessionId; + } + return undefined; +} const isValidSession = function (sessionInfo, filters) { let foundAll = true; for (const [key, values] of Object.entries(filters)) { @@ -49,7 +63,7 @@ const isValidSession = function (sessionInfo, filters) { return foundAll; } const hasFilters = function (filters) { - return filters !== undefined && Object.keys(filters).length > 0; + return filters && filters.filter && Object.keys(filters.filter).length > 0; } const objectToObjectOfArrays = function (obj) { let _obj = {} @@ -66,15 +80,44 @@ const objectToObjectOfArrays = function (obj) { } return _obj; } -const extractFiltersFromRequest = function (req) { - let filters = {}; +const extractPayloadFromRequest = function (req) { + let filters = { + "filter": {}, + "sort": {"key": undefined, "order": false}, + "pagination": {"limit": undefined, "page": undefined} + }; if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); - filters.userID = [req.query.userId]; + filters.filter.userID = [req.query.userId]; } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); - return Object.keys(filters).length > 0 ? filters : undefined; + return filters; +} +const sortPaginate = function (list, filters) { + list.sort((a, b) => { + let aV = (a[filters.sort.key] || a["timestamp"]); + let bV = (b[filters.sort.key] || b["timestamp"]); + return aV > bV ? 1 : aV < bV ? -1 : 0; + }) + + if (filters.sort.order) { + list.reverse(); + } + + if (filters.pagination.page && filters.pagination.limit) { + return list.slice((filters.pagination.page - 1) * filters.pagination.limit, + filters.pagination.page * filters.pagination.limit); + } + return list; } module.exports = { - extractPeerId, request_logger, isValidSession, hasFilters, objectToObjectOfArrays, extractFiltersFromRequest + extractPeerId, + request_logger, + extractProjectKeyFromRequest, + extractSessionIdFromRequest, + isValidSession, + hasFilters, + objectToObjectOfArrays, + extractPayloadFromRequest, + sortPaginate }; \ No newline at end of file From fbe37babbc576dc48955e68b129668bedf29a08c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 16:03:37 +0200 Subject: [PATCH 203/221] feat(assist): sessions search handle nested objects --- utilities/utils/helper.js | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index f47a7f540..10c03c830 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -43,15 +43,22 @@ const isValidSession = function (sessionInfo, filters) { for (const [key, values] of Object.entries(filters)) { let found = false; for (const [skey, svalue] of Object.entries(sessionInfo)) { - if (skey.toLowerCase() === key.toLowerCase()) { - for (let v of values) { - if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (svalue !== undefined && svalue !== null) { + if (svalue.constructor === Object) { + if (isValidSession(svalue, {key: values})) { found = true; break; } - } - if (found) { - break; + } else if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } } } } @@ -82,10 +89,19 @@ const objectToObjectOfArrays = function (obj) { } const extractPayloadFromRequest = function (req) { let filters = { + "query": {}, "filter": {}, "sort": {"key": undefined, "order": false}, "pagination": {"limit": undefined, "page": undefined} }; + if (req.query.q) { + debug && console.log(`[WS]where q=${req.query.q}`); + filters.query.value = [req.query.q]; + } + if (req.query.key) { + debug && console.log(`[WS]where key=${req.query.key}`); + filters.query.key = [req.query.key]; + } if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; From c0c1a86209b11ec608b8b9ac82af22d5be8a5343 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 17:15:02 +0200 Subject: [PATCH 204/221] feat(assist): autocomplete --- ee/utilities/server.js | 1 + ee/utilities/servers/websocket-cluster.js | 33 +++++++++++++++++---- ee/utilities/servers/websocket.js | 36 +++++++++++++++++++---- ee/utilities/utils/helper-ee.js | 4 +-- utilities/servers/websocket.js | 23 +++++++++++++++ utilities/utils/helper.js | 18 ++++++++++++ 6 files changed, 102 insertions(+), 13 deletions(-) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 327a664a0..480a2b27e 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -84,6 +84,7 @@ if (process.env.uws !== "true") { uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 57ba2ab6c..6dd69a4bc 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -5,7 +5,7 @@ const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../ut const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); @@ -81,7 +81,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); @@ -111,7 +111,7 @@ const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -139,7 +139,7 @@ wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -170,7 +170,7 @@ const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -198,6 +198,29 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 8c34bd91a..dac389fa8 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -5,7 +5,7 @@ const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../ut const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -64,7 +64,7 @@ const respond = function (res, data) { const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -93,7 +93,7 @@ const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -121,7 +121,7 @@ wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -151,7 +151,7 @@ const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); let _sessionId = extractSessionIdFromRequest(req); - let filters = await extractFiltersFromRequest(req, res); + let filters = await extractPayloadFromRequest(req, res); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { @@ -178,6 +178,29 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -361,6 +384,7 @@ module.exports = { socketsList, socketsListByProject, socketsLive, - socketsLiveByProject + socketsLiveByProject, + autocomplete } }; \ No newline at end of file diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 2ea57a421..b29fbffef 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -50,7 +50,7 @@ const extractSessionIdFromRequest = function (req) { } return undefined; } -const extractFiltersFromRequest = async function (req, res) { +const extractPayloadFromRequest = async function (req, res) { let filters = {}; if (process.env.uws === "true") { if (req.getQuery("userId")) { @@ -69,5 +69,5 @@ const extractFiltersFromRequest = async function (req, res) { module.exports = { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractFiltersFromRequest + extractPayloadFromRequest }; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 27e8fba4a..3587d9ad8 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -161,6 +161,29 @@ const socketsLiveByProject = async function (req, res) { wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +const autocomplete = async function (req, res) { + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let filters = extractPayloadFromRequest(req); + let results = []; + if (filters.query && Object.keys(filters.query).length > 0) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey} = extractPeerId(peerId); + if (projectKey === _projectKey) { + let connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) { + results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)]; + } + } + } + } + } + respond(res, results); +} +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 10c03c830..ff7d560f7 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -69,6 +69,23 @@ const isValidSession = function (sessionInfo, filters) { } return foundAll; } +const getValidAttributes = function (sessionInfo, query) { + let matches = []; + let deduplicate = []; + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (svalue !== undefined && svalue !== null) { + if (svalue.constructor === Object) { + matches = [...matches, ...getValidAttributes(svalue, query)] + } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) + && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 + && deduplicate.indexOf(skey + '_' + svalue) < 0) { + matches.push({"type": skey, "value": svalue}); + deduplicate.push(skey + '_' + svalue); + } + } + } + return matches; +} const hasFilters = function (filters) { return filters && filters.filter && Object.keys(filters.filter).length > 0; } @@ -129,6 +146,7 @@ const sortPaginate = function (list, filters) { module.exports = { extractPeerId, request_logger, + getValidAttributes, extractProjectKeyFromRequest, extractSessionIdFromRequest, isValidSession, From 4fe3f87d46daee9395627b97af156e9735c0de5e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 17:22:43 +0200 Subject: [PATCH 205/221] feat(api): assist autocomplete --- api/chalicelib/core/assist.py | 28 ++++++++++++++++++++++++++++ api/routers/core.py | 6 ++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index f647e95f1..1804da669 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -102,6 +102,34 @@ def is_live(project_id, session_id, project_key=None): return str(session_id) in connected_peers +def autocomplete(project_id, q: str, key: str = None): + project_key = projects.get_project_key(project_id) + params = {"q": q} + if key: + params["key"] = key + try: + results = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete", + params=params, timeout=config("assistTimeout", cast=int, default=5)) + if results.status_code != 200: + print("!! issue with the peer-server") + print(results.text) + return {"errors": [f"Something went wrong wile calling assist:{results.text}"]} + results = results.json().get("data", []) + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + return {"errors": ["Assist request timeout"]} + except Exception as e: + print("issue getting Assist response") + print(str(e)) + print("expected JSON, received:") + try: + print(results.text) + except: + print("couldn't get response") + return {"errors": ["Something went wrong wile calling assist"]} + return results + + def get_ice_servers(): return config("iceServers") if config("iceServers", default=None) is not None \ and len(config("iceServers")) > 0 else None diff --git a/api/routers/core.py b/api/routers/core.py index 7ad57334e..2ac949057 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -136,10 +136,12 @@ def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType, schemas.PerformanceEventType, schemas.FetchFilterType, schemas.GraphqlFilterType] = None, - key: str = None, - source: str = None, context: schemas.CurrentContext = Depends(OR_context)): + key: str = None, source: str = None, live: bool = False, + context: schemas.CurrentContext = Depends(OR_context)): if len(q) == 0: return {"data": []} + if live: + return assist.autocomplete(project_id=projectId, q=q, key=key) if type in [schemas.FetchFilterType._url]: type = schemas.EventType.request elif type in [schemas.GraphqlFilterType._name]: From bd9dbc93931aaab34eafd42f651b401ce5cb2134 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 18:45:31 +0200 Subject: [PATCH 206/221] feat(assist): payload extraction debug --- ee/utilities/utils/helper-ee.js | 2 ++ utilities/utils/helper.js | 2 ++ 2 files changed, 4 insertions(+) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index b29fbffef..273212954 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -64,6 +64,8 @@ const extractPayloadFromRequest = async function (req, res) { return helper.extractFiltersFromRequest(req); } filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + debug && console.log("payload/filters:") + debug && console.log(filters) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index ff7d560f7..ae26b228b 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -124,6 +124,8 @@ const extractPayloadFromRequest = function (req) { filters.filter.userID = [req.query.userId]; } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + debug && console.log("payload/filters:") + debug && console.log(filters) return filters; } const sortPaginate = function (list, filters) { From a59a8c01332f3ef1d10798016ac3d70833b9b6f6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 19:04:43 +0200 Subject: [PATCH 207/221] feat(assist): changed debug --- ee/utilities/utils/helper-ee.js | 2 +- utilities/utils/helper.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 273212954..6ae4039bb 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -65,7 +65,7 @@ const extractPayloadFromRequest = async function (req, res) { } filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); debug && console.log("payload/filters:") - debug && console.log(filters) + debug && console.log(JSON.stringify(filters)) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index ae26b228b..2e4a327a2 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -125,7 +125,7 @@ const extractPayloadFromRequest = function (req) { } filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); debug && console.log("payload/filters:") - debug && console.log(filters) + debug && console.log(JSON.stringify(filters)) return filters; } const sortPaginate = function (list, filters) { From ab02495f6314c3316b5e4bcf0b5d70902f767740 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 19:25:50 +0200 Subject: [PATCH 208/221] feat(api): changed assist search payload --- api/schemas.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/api/schemas.py b/api/schemas.py index 3fb9a6805..77a5db26d 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1010,11 +1010,34 @@ class CustomMetricAndTemplate(BaseModel): alias_generator = attribute_to_camel_case +class LiveFilterType(str, Enum): + user_os = FilterType.user_os.value + user_browser = FilterType.user_browser.value + user_device = FilterType.user_device.value + user_country = FilterType.user_country.value + user_id = FilterType.user_id.value + user_anonymous_id = FilterType.user_anonymous_id.value + rev_id = FilterType.rev_id.value + page_title = "pageTitle" + # + # platform = "PLATFORM" + # metadata = "METADATA" + # issue = "ISSUE" + # events_count = "EVENTS_COUNT" + # utm_source = "UTM_SOURCE" + # utm_medium = "UTM_MEDIUM" + # utm_campaign = "UTM_CAMPAIGN" + + +class LiveSessionSearchFilterSchema(BaseModel): + value: Union[List[str], str] = Field(...) + type: LiveFilterType = Field(...) + + class LiveSessionsSearchPayloadSchema(_PaginatedSchema): - filters: List[SessionSearchFilterSchema] = Field([]) - sort: str = Field(default="startTs") + filters: List[LiveSessionSearchFilterSchema] = Field([]) + sort: str = Field(default="timestamp") order: SortOrderType = Field(default=SortOrderType.desc) - group_by_user: bool = Field(default=False) @root_validator(pre=True) def transform_order(cls, values): @@ -1023,4 +1046,4 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema): return values class Config: - alias_generator = attribute_to_camel_case \ No newline at end of file + alias_generator = attribute_to_camel_case From 47fb100b4f863e063508635280a35dbe9e3aa4ba Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 20:24:32 +0200 Subject: [PATCH 209/221] feat(assist): fixed multiple values filter support for search --- utilities/utils/helper.js | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 2e4a327a2..6e4e1cb5b 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -91,14 +91,16 @@ const hasFilters = function (filters) { } const objectToObjectOfArrays = function (obj) { let _obj = {} - for (let k of Object.keys(obj)) { - if (obj[k] !== undefined && obj[k] !== null) { - _obj[k] = obj[k]; - if (!Array.isArray(_obj[k])) { - _obj[k] = [_obj[k]]; - } - for (let i = 0; i < _obj[k].length; i++) { - _obj[k][i] = String(_obj[k][i]); + if (obj) { + for (let k of Object.keys(obj)) { + if (obj[k] !== undefined && obj[k] !== null) { + _obj[k] = obj[k]; + if (!Array.isArray(_obj[k])) { + _obj[k] = [_obj[k]]; + } + for (let i = 0; i < _obj[k].length; i++) { + _obj[k][i] = String(_obj[k][i]); + } } } } @@ -123,7 +125,8 @@ const extractPayloadFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; } - filters = objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); + filters.filters = objectToObjectOfArrays(filters.filter); + filters = {...filters, ...(req.body.filter || {})}; debug && console.log("payload/filters:") debug && console.log(JSON.stringify(filters)) return filters; From d1ef7ea1c736156653ae02b658f05ebdaf145532 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 21:56:59 +0200 Subject: [PATCH 210/221] feat(assist): full search feat(api): live sessions full search --- api/chalicelib/core/assist.py | 5 +++- api/schemas.py | 27 ++++++++++------- ee/utilities/servers/websocket-cluster.js | 8 ++--- ee/utilities/servers/websocket.js | 8 ++--- ee/utilities/utils/helper-ee.js | 34 +++++++++++++++++---- utilities/servers/websocket.js | 6 ++-- utilities/utils/helper.js | 36 +++++++++++++++-------- 7 files changed, 84 insertions(+), 40 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 1804da669..5ff067229 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -34,7 +34,10 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche "sort": {"key": body.sort, "order": body.order} } for f in body.filters: - data["filter"][f.type] = f.value + if f.type == schemas.LiveFilterType.metadata: + data["filter"][f.source] = f.value + else: + data["filter"][f.type.value] = f.value return __get_live_sessions_ws(project_id=project_id, data=data) diff --git a/api/schemas.py b/api/schemas.py index 77a5db26d..45a7bc3d8 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1018,25 +1018,32 @@ class LiveFilterType(str, Enum): user_id = FilterType.user_id.value user_anonymous_id = FilterType.user_anonymous_id.value rev_id = FilterType.rev_id.value - page_title = "pageTitle" - # - # platform = "PLATFORM" - # metadata = "METADATA" - # issue = "ISSUE" - # events_count = "EVENTS_COUNT" - # utm_source = "UTM_SOURCE" - # utm_medium = "UTM_MEDIUM" - # utm_campaign = "UTM_CAMPAIGN" + page_title = "PAGETITLE" + session_id = "SESSIONID" + metadata = "METADATA" + user_UUID = "USERUUID" + tracker_version = "TRACKERVERSION" + user_browser_version = "USERBROWSERVERSION" + user_device_type = "USERDEVICETYPE", + timestamp = "TIMESTAMP" class LiveSessionSearchFilterSchema(BaseModel): value: Union[List[str], str] = Field(...) type: LiveFilterType = Field(...) + source: Optional[str] = Field(None) + + @root_validator + def validator(cls, values): + if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value: + assert values.get("source") is not None, "source should not be null for METADATA type" + assert len(values.get("source")) > 0, "source should not be empty for METADATA type" + return values class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: str = Field(default="timestamp") + sort: LiveFilterType = Field(default=LiveFilterType.timestamp) order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 6dd69a4bc..f414939fe 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -93,7 +93,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -122,7 +122,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -150,7 +150,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -181,7 +181,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index dac389fa8..686b62293 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -75,7 +75,7 @@ const socketsList = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -104,7 +104,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -132,7 +132,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -162,7 +162,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 6ae4039bb..41fe456cb 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -1,4 +1,5 @@ const helper = require('./helper'); +let debug = process.env.debug === "1" || false; const getBodyFromUWSResponse = async function (res) { return new Promise(((resolve, reject) => { let buffer; @@ -51,21 +52,42 @@ const extractSessionIdFromRequest = function (req) { return undefined; } const extractPayloadFromRequest = async function (req, res) { - let filters = {}; + let filters = { + "query": {}, + "filter": {} + }; if (process.env.uws === "true") { + if (req.getQuery("q")) { + debug && console.log(`[WS]where q=${req.getQuery("q")}`); + filters.query.value = [req.getQuery("q")]; + } + if (req.getQuery("key")) { + debug && console.log(`[WS]where key=${req.getQuery("key")}`); + filters.query.key = [req.getQuery("key")]; + } if (req.getQuery("userId")) { debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); filters.userID = [req.getQuery("userId")]; } let body = await getBodyFromUWSResponse(res); - filters = {...filters, ...(body.filter || {})}; + filters = { + ...filters, + "sort": { + "key": body.sort && body.sort.key ? body.sort.key : undefined, + "order": body.sort && body.sort.order === "DESC" + }, + "pagination": { + "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, + "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + } + } + filters.filter = {...filters.filter, ...(body.filter || {})}; } else { - return helper.extractFiltersFromRequest(req); + return helper.extractPayloadFromRequest(req); } - filters = helper.objectToObjectOfArrays({...filters, ...(req.body.filter || {})}); - debug && console.log("payload/filters:") - debug && console.log(JSON.stringify(filters)) + filters.filter = helper.objectToObjectOfArrays(filters.filter); + debug && console.log("payload/filters:" + JSON.stringify(filters)) return Object.keys(filters).length > 0 ? filters : undefined; } module.exports = { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 3587d9ad8..59d221042 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -89,7 +89,7 @@ const socketsListByProject = async function (req, res) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo - && isValidSession(item.handshake.query.sessionInfo, filters)) { + && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(sessionId); } } @@ -117,7 +117,7 @@ const socketsLive = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { @@ -146,7 +146,7 @@ const socketsLiveByProject = async function (req, res) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (hasFilters(filters)) { - if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters)) { + if (item.handshake.query.sessionInfo && isValidSession(item.handshake.query.sessionInfo, filters.filter)) { liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 6e4e1cb5b..de002f89e 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -44,8 +44,8 @@ const isValidSession = function (sessionInfo, filters) { let found = false; for (const [skey, svalue] of Object.entries(sessionInfo)) { if (svalue !== undefined && svalue !== null) { - if (svalue.constructor === Object) { - if (isValidSession(svalue, {key: values})) { + if (typeof (svalue) === "object") { + if (isValidSession(svalue, {[key]: values})) { found = true; break; } @@ -74,7 +74,7 @@ const getValidAttributes = function (sessionInfo, query) { let deduplicate = []; for (const [skey, svalue] of Object.entries(sessionInfo)) { if (svalue !== undefined && svalue !== null) { - if (svalue.constructor === Object) { + if (typeof (svalue) === "object") { matches = [...matches, ...getValidAttributes(svalue, query)] } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 @@ -110,8 +110,14 @@ const extractPayloadFromRequest = function (req) { let filters = { "query": {}, "filter": {}, - "sort": {"key": undefined, "order": false}, - "pagination": {"limit": undefined, "page": undefined} + "sort": { + "key": req.body.sort && req.body.sort.key ? req.body.sort.key : undefined, + "order": req.body.sort && req.body.sort.order === "DESC" + }, + "pagination": { + "limit": req.body.pagination && req.body.pagination.limit ? req.body.pagination.limit : undefined, + "page": req.body.pagination && req.body.pagination.page ? req.body.pagination.page : undefined + } }; if (req.query.q) { debug && console.log(`[WS]where q=${req.query.q}`); @@ -125,17 +131,23 @@ const extractPayloadFromRequest = function (req) { debug && console.log(`[WS]where userId=${req.query.userId}`); filters.filter.userID = [req.query.userId]; } - filters.filters = objectToObjectOfArrays(filters.filter); - filters = {...filters, ...(req.body.filter || {})}; - debug && console.log("payload/filters:") - debug && console.log(JSON.stringify(filters)) + filters.filter = objectToObjectOfArrays(filters.filter); + filters.filter = {...filters.filter, ...(req.body.filter || {})}; + debug && console.log("payload/filters:" + JSON.stringify(filters)) return filters; } const sortPaginate = function (list, filters) { + let skey = "timestamp"; + if (list.length > 0 && filters.sort.key) { + for (let key of Object.keys(list[0])) { + if (key.toLowerCase() == filters.sort.key.toLowerCase()) { + skey = key; + break; + } + } + } list.sort((a, b) => { - let aV = (a[filters.sort.key] || a["timestamp"]); - let bV = (b[filters.sort.key] || b["timestamp"]); - return aV > bV ? 1 : aV < bV ? -1 : 0; + return a[skey] > b[skey] ? 1 : a[skey] < b[skey] ? -1 : 0; }) if (filters.sort.order) { From 2e5acdabc36156f7758c5d89325fd0f42d62b014 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Jun 2022 22:44:41 +0200 Subject: [PATCH 211/221] feat(assist): full autocomplete feat(assist): solved endpoints conflicts feat(api): live sessions full autocomplete --- api/chalicelib/core/assist.py | 2 +- api/or_dependencies.py | 4 ++- ee/utilities/server.js | 3 +- ee/utilities/servers/websocket-cluster.js | 44 ++++++++++++++--------- ee/utilities/servers/websocket.js | 43 +++++++++++++--------- ee/utilities/utils/helper-ee.js | 41 ++++++++++----------- utilities/servers/websocket.js | 38 ++++++++++++-------- utilities/utils/helper.js | 22 +++++++++--- 8 files changed, 122 insertions(+), 75 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 5ff067229..977d98826 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -130,7 +130,7 @@ def autocomplete(project_id, q: str, key: str = None): except: print("couldn't get response") return {"errors": ["Something went wrong wile calling assist"]} - return results + return {"data": results} def get_ice_servers(): diff --git a/api/or_dependencies.py b/api/or_dependencies.py index 7eee72c49..824670687 100644 --- a/api/or_dependencies.py +++ b/api/or_dependencies.py @@ -33,7 +33,9 @@ class ORRoute(APIRoute): if isinstance(response, JSONResponse): response: JSONResponse = response body = json.loads(response.body.decode('utf8')) - if response.status_code == 200 and body is not None and body.get("errors") is not None: + if response.status_code == 200 \ + and body is not None and isinstance(body, dict) \ + and body.get("errors") is not None: if "not found" in body["errors"][0]: response.status_code = status.HTTP_404_NOT_FOUND else: diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 480a2b27e..93d6d2a2e 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -75,16 +75,17 @@ if (process.env.uws !== "true") { } uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive)); + uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); - uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete)); socket.start(uapp); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index f414939fe..2062e5794 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,7 +1,14 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractPeerId, + hasFilters, + isValidSession, + sortPaginate, + getValidAttributes, + uniqueAutocomplete +} = require('../utils/helper'); const { extractProjectKeyFromRequest, extractSessionIdFromRequest, @@ -104,8 +111,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -133,9 +138,6 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -163,8 +165,6 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -194,14 +194,11 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractPayloadFromRequest(req); + let filters = await extractPayloadFromRequest(req); let results = []; if (filters.query && Object.keys(filters.query).length > 0) { let rooms = await getAvailableRooms(); @@ -217,9 +214,8 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -292,6 +288,21 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + + module.exports = { wsRouter, start: (server, prefix) => { @@ -420,6 +431,7 @@ module.exports = { socketsList, socketsListByProject, socketsLive, - socketsLiveByProject + socketsLiveByProject, + autocomplete } }; \ No newline at end of file diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 686b62293..02267fb66 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,11 +1,18 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const {extractPeerId, hasFilters, isValidSession, sortPaginate} = require('../utils/helper'); +const { + extractPeerId, + hasFilters, + isValidSession, + sortPaginate, + getValidAttributes, + uniqueAutocomplete +} = require('../utils/helper'); const { extractProjectKeyFromRequest, extractSessionIdFromRequest, - extractPayloadFromRequest + extractPayloadFromRequest, } = require('../utils/helper-ee'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -86,8 +93,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -115,9 +120,6 @@ const socketsListByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -144,8 +146,6 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -174,14 +174,11 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); - let filters = extractPayloadFromRequest(req); + let filters = await extractPayloadFromRequest(req); let results = []; if (filters.query && Object.keys(filters.query).length > 0) { let rooms = await getAvailableRooms(); @@ -197,9 +194,8 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -270,6 +266,21 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + + module.exports = { wsRouter, start: (server, prefix) => { diff --git a/ee/utilities/utils/helper-ee.js b/ee/utilities/utils/helper-ee.js index 41fe456cb..dc821b94a 100644 --- a/ee/utilities/utils/helper-ee.js +++ b/ee/utilities/utils/helper-ee.js @@ -17,11 +17,11 @@ const getBodyFromUWSResponse = async function (res) { } catch (e) { console.error(e); /* res.close calls onAborted */ - try { - res.close(); - } catch (e2) { - console.error(e2); - } + // try { + // res.close(); + // } catch (e2) { + // console.error(e2); + // } json = {}; } resolve(json); @@ -59,30 +59,31 @@ const extractPayloadFromRequest = async function (req, res) { if (process.env.uws === "true") { if (req.getQuery("q")) { debug && console.log(`[WS]where q=${req.getQuery("q")}`); - filters.query.value = [req.getQuery("q")]; + filters.query.value = req.getQuery("q"); } if (req.getQuery("key")) { debug && console.log(`[WS]where key=${req.getQuery("key")}`); - filters.query.key = [req.getQuery("key")]; + filters.query.key = req.getQuery("key"); } if (req.getQuery("userId")) { debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); - filters.userID = [req.getQuery("userId")]; + filters.filter.userID = [req.getQuery("userId")]; } - - let body = await getBodyFromUWSResponse(res); - filters = { - ...filters, - "sort": { - "key": body.sort && body.sort.key ? body.sort.key : undefined, - "order": body.sort && body.sort.order === "DESC" - }, - "pagination": { - "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, - "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + if (!filters.query.value) { + let body = await getBodyFromUWSResponse(res); + filters = { + ...filters, + "sort": { + "key": body.sort && body.sort.key ? body.sort.key : undefined, + "order": body.sort && body.sort.order === "DESC" + }, + "pagination": { + "limit": body.pagination && body.pagination.limit ? body.pagination.limit : undefined, + "page": body.pagination && body.pagination.page ? body.pagination.page : undefined + } } + filters.filter = {...filters.filter, ...(body.filter || {})}; } - filters.filter = {...filters.filter, ...(body.filter || {})}; } else { return helper.extractPayloadFromRequest(req); } diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 59d221042..4feac9f3f 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -8,7 +8,9 @@ const { hasFilters, isValidSession, extractPayloadFromRequest, - sortPaginate + sortPaginate, + getValidAttributes, + uniqueAutocomplete } = require('../utils/helper'); const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); @@ -71,8 +73,6 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/sockets-list`, socketsList); -wsRouter.post(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -100,9 +100,6 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); -wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); -wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -129,18 +126,17 @@ const socketsLive = async function (req, res) { } respond(res, sortPaginate(liveSessions, filters)); } -wsRouter.get(`/sockets-live`, socketsLive); -wsRouter.post(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); let _projectKey = extractProjectKeyFromRequest(req); + let _sessionId = extractSessionIdFromRequest(req); let filters = extractPayloadFromRequest(req); let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey} = extractPeerId(peerId); - if (projectKey === _projectKey) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -158,11 +154,9 @@ const socketsLiveByProject = async function (req, res) { } respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); } -wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); -wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); const autocomplete = async function (req, res) { - debug && console.log("[WS]looking for available LIVE sessions"); + debug && console.log("[WS]autocomplete"); let _projectKey = extractProjectKeyFromRequest(req); let filters = extractPayloadFromRequest(req); let results = []; @@ -180,9 +174,9 @@ const autocomplete = async function (req, res) { } } } - respond(res, results); + respond(res, uniqueAutocomplete(results)); } -wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); + const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -253,6 +247,20 @@ function extractSessionInfo(socket) { } } +wsRouter.get(`/sockets-list`, socketsList); +wsRouter.post(`/sockets-list`, socketsList); +wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject); +wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject); + +wsRouter.get(`/sockets-live`, socketsLive); +wsRouter.post(`/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live/:projectKey/autocomplete`, autocomplete); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject); + module.exports = { wsRouter, start: (server, prefix) => { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index de002f89e..32232d36d 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -51,7 +51,7 @@ const isValidSession = function (sessionInfo, filters) { } } else if (skey.toLowerCase() === key.toLowerCase()) { for (let v of values) { - if (svalue.toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { found = true; break; } @@ -77,7 +77,7 @@ const getValidAttributes = function (sessionInfo, query) { if (typeof (svalue) === "object") { matches = [...matches, ...getValidAttributes(svalue, query)] } else if ((query.key === undefined || skey.toLowerCase() === query.key.toLowerCase()) - && svalue.toLowerCase().indexOf(query.value.toLowerCase()) >= 0 + && String(svalue).toLowerCase().indexOf(query.value.toLowerCase()) >= 0 && deduplicate.indexOf(skey + '_' + svalue) < 0) { matches.push({"type": skey, "value": svalue}); deduplicate.push(skey + '_' + svalue); @@ -121,11 +121,11 @@ const extractPayloadFromRequest = function (req) { }; if (req.query.q) { debug && console.log(`[WS]where q=${req.query.q}`); - filters.query.value = [req.query.q]; + filters.query.value = req.query.q; } if (req.query.key) { debug && console.log(`[WS]where key=${req.query.key}`); - filters.query.key = [req.query.key]; + filters.query.key = req.query.key; } if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); @@ -160,6 +160,17 @@ const sortPaginate = function (list, filters) { } return list; } +const uniqueAutocomplete = function (list) { + let _list = []; + let deduplicate = []; + for (let e of list) { + if (deduplicate.indexOf(e.type + "_" + e.value) < 0) { + _list.push(e); + deduplicate.push(e.type + "_" + e.value) + } + } + return _list; +} module.exports = { extractPeerId, request_logger, @@ -170,5 +181,6 @@ module.exports = { hasFilters, objectToObjectOfArrays, extractPayloadFromRequest, - sortPaginate + sortPaginate, + uniqueAutocomplete }; \ No newline at end of file From 4e22038137a843065525f4e6e25b18ea4c150596 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 11:53:49 +0200 Subject: [PATCH 212/221] feat(assist): changed pagination response feat(assist): allow nested-key sort feat(api): support new live sessions pagination response --- api/chalicelib/core/assist.py | 7 ++++--- utilities/utils/helper.js | 33 ++++++++++++++++++++++----------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 977d98826..5cc2d70db 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -63,11 +63,12 @@ def __get_live_sessions_ws(project_id, data): except: print("couldn't get response") live_peers = [] - - for s in live_peers: + _live_peers = live_peers + if "sessions" in live_peers: + _live_peers = live_peers["sessions"] + for s in _live_peers: s["live"] = True s["projectId"] = project_id - live_peers = sorted(live_peers, key=lambda l: l.get("timestamp", 0), reverse=True) return live_peers diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 32232d36d..854f491a9 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -136,29 +136,40 @@ const extractPayloadFromRequest = function (req) { debug && console.log("payload/filters:" + JSON.stringify(filters)) return filters; } -const sortPaginate = function (list, filters) { - let skey = "timestamp"; - if (list.length > 0 && filters.sort.key) { - for (let key of Object.keys(list[0])) { - if (key.toLowerCase() == filters.sort.key.toLowerCase()) { - skey = key; - break; +const getValue = function (obj, key) { + if (obj !== undefined && obj !== null) { + let val; + for (let k of Object.keys(obj)) { + if (typeof (obj[k]) === "object") { + val = getValue(obj[k], key); + } else if (k.toLowerCase() === key.toLowerCase()) { + val = obj[k]; + } + + if (val !== undefined) { + return val; } } } + return undefined; +} +const sortPaginate = function (list, filters) { + const total = list.length; list.sort((a, b) => { - return a[skey] > b[skey] ? 1 : a[skey] < b[skey] ? -1 : 0; - }) + const vA = getValue(a, filters.sort.key || "timestamp"); + const vB = getValue(b, filters.sort.key || "timestamp"); + return vA > vB ? 1 : vA < vB ? -1 : 0; + }); if (filters.sort.order) { list.reverse(); } if (filters.pagination.page && filters.pagination.limit) { - return list.slice((filters.pagination.page - 1) * filters.pagination.limit, + list = list.slice((filters.pagination.page - 1) * filters.pagination.limit, filters.pagination.page * filters.pagination.limit); } - return list; + return {"total": total, "sessions": list}; } const uniqueAutocomplete = function (list) { let _list = []; From 6cc737218773b50508f72d679fb35b28c89cada5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 12:27:51 +0200 Subject: [PATCH 213/221] feat(api): support nested-key-sort for live sessions --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 45a7bc3d8..2fdcae4c3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1043,7 +1043,7 @@ class LiveSessionSearchFilterSchema(BaseModel): class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: LiveFilterType = Field(default=LiveFilterType.timestamp) + sort: Union[LiveFilterType, str] = Field(default=LiveFilterType.timestamp) order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) From f76c621350fd400bef5abd2f346c315bb49504cb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 14:02:20 +0200 Subject: [PATCH 214/221] feat(assist): support null&empty values for search feat(assist): changed single-session search feat(api): support null&empty values for live sessions search feat(api): support key-mapping for different names feat(api): support platform live-sessions search --- api/chalicelib/core/assist.py | 43 ++++++++++++++++------- api/schemas.py | 19 ++++++++-- ee/utilities/servers/websocket-cluster.js | 10 ++++-- ee/utilities/servers/websocket.js | 10 ++++-- utilities/servers/websocket.js | 10 ++++-- utilities/utils/helper.js | 28 ++++++++------- 6 files changed, 85 insertions(+), 35 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 5cc2d70db..bfacd9295 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -22,7 +22,7 @@ SESSION_PROJECTION_COLS = """s.project_id, def get_live_sessions_ws_user_id(project_id, user_id): data = { - "filter": {"userId": user_id} + "filter": {"userId": user_id} if user_id else {} } return __get_live_sessions_ws(project_id=project_id, data=data) @@ -73,18 +73,9 @@ def __get_live_sessions_ws(project_id, data): def get_live_session_by_id(project_id, session_id): - all_live = __get_live_sessions_ws(project_id, data={"filter": {"sessionId": session_id}}) - for l in all_live: - if str(l.get("sessionID")) == str(session_id): - return l - return None - - -def is_live(project_id, session_id, project_key=None): - if project_key is None: - project_key = projects.get_project_key(project_id) + project_key = projects.get_project_key(project_id) try: - connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}", timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") @@ -103,7 +94,33 @@ def is_live(project_id, session_id, project_key=None): except: print("couldn't get response") return False - return str(session_id) in connected_peers + return connected_peers + + +def is_live(project_id, session_id, project_key=None): + if project_key is None: + project_key = projects.get_project_key(project_id) + try: + connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}", + timeout=config("assistTimeout", cast=int, default=5)) + if connected_peers.status_code != 200: + print("!! issue with the peer-server") + print(connected_peers.text) + return False + connected_peers = connected_peers.json().get("data") + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + return False + except Exception as e: + print("issue getting Assist response") + print(str(e)) + print("expected JSON, received:") + try: + print(connected_peers.text) + except: + print("couldn't get response") + return False + return str(session_id) == connected_peers def autocomplete(project_id, q: str, key: str = None): diff --git a/api/schemas.py b/api/schemas.py index 2fdcae4c3..c2d2c5497 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1018,6 +1018,7 @@ class LiveFilterType(str, Enum): user_id = FilterType.user_id.value user_anonymous_id = FilterType.user_anonymous_id.value rev_id = FilterType.rev_id.value + platform = FilterType.platform.value page_title = "PAGETITLE" session_id = "SESSIONID" metadata = "METADATA" @@ -1025,7 +1026,6 @@ class LiveFilterType(str, Enum): tracker_version = "TRACKERVERSION" user_browser_version = "USERBROWSERVERSION" user_device_type = "USERDEVICETYPE", - timestamp = "TIMESTAMP" class LiveSessionSearchFilterSchema(BaseModel): @@ -1043,13 +1043,26 @@ class LiveSessionSearchFilterSchema(BaseModel): class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: Union[LiveFilterType, str] = Field(default=LiveFilterType.timestamp) + sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP") order: SortOrderType = Field(default=SortOrderType.desc) @root_validator(pre=True) - def transform_order(cls, values): + def transform(cls, values): if values.get("order") is not None: values["order"] = values["order"].upper() + if values.get("filters") is not None: + i = 0 + while i < len(values["filters"]): + if values["filters"][i]["values"] is None or len(values["filters"][i]["values"]) == 0: + del values["filters"][i] + else: + i += 1 + for i in values["filters"]: + if i.get("type") == LiveFilterType.platform.value: + i["type"] = LiveFilterType.user_device_type.value + if values.get("sort") is not None: + if values["sort"].lower() == "startts": + values["sort"] = "TIMESTAMP" return values class Config: diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 2062e5794..95cb13740 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -136,7 +136,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? liveSessions[_projectKey] + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -192,7 +195,10 @@ const socketsLiveByProject = async function (req, res) { liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []); } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 02267fb66..d2db03e61 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -118,7 +118,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -172,7 +175,10 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 4feac9f3f..1e676a02c 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -98,7 +98,10 @@ const socketsListByProject = async function (req, res) { } } } - respond(res, liveSessions[_projectKey] || []); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? liveSessions[_projectKey] + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const socketsLive = async function (req, res) { @@ -152,7 +155,10 @@ const socketsLiveByProject = async function (req, res) { } } } - respond(res, sortPaginate(liveSessions[_projectKey] || [], filters)); + liveSessions[_projectKey] = liveSessions[_projectKey] || []; + respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters) + : liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0] + : null); } const autocomplete = async function (req, res) { diff --git a/utilities/utils/helper.js b/utilities/utils/helper.js index 854f491a9..c976d1b5c 100644 --- a/utilities/utils/helper.js +++ b/utilities/utils/helper.js @@ -42,22 +42,24 @@ const isValidSession = function (sessionInfo, filters) { let foundAll = true; for (const [key, values] of Object.entries(filters)) { let found = false; - for (const [skey, svalue] of Object.entries(sessionInfo)) { - if (svalue !== undefined && svalue !== null) { - if (typeof (svalue) === "object") { - if (isValidSession(svalue, {[key]: values})) { - found = true; - break; - } - } else if (skey.toLowerCase() === key.toLowerCase()) { - for (let v of values) { - if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { + if (values !== undefined && values !== null) { + for (const [skey, svalue] of Object.entries(sessionInfo)) { + if (svalue !== undefined && svalue !== null) { + if (typeof (svalue) === "object") { + if (isValidSession(svalue, {[key]: values})) { found = true; break; } - } - if (found) { - break; + } else if (skey.toLowerCase() === key.toLowerCase()) { + for (let v of values) { + if (String(svalue).toLowerCase().indexOf(v.toLowerCase()) >= 0) { + found = true; + break; + } + } + if (found) { + break; + } } } } From 9fb5e7c4d100c107162786a7307049b1a39a8437 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 16:34:02 +0200 Subject: [PATCH 215/221] feat(api): fixed typo --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index c2d2c5497..c65236cd3 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1053,7 +1053,7 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema): if values.get("filters") is not None: i = 0 while i < len(values["filters"]): - if values["filters"][i]["values"] is None or len(values["filters"][i]["values"]) == 0: + if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0: del values["filters"][i] else: i += 1 From 891c7600a76cbb9e68cf9a9d89d953a405af9533 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 17:49:57 +0200 Subject: [PATCH 216/221] feat(api): custom metrics errors pagination feat(api): custom metrics sessions pagination --- api/chalicelib/core/custom_metrics.py | 4 ++++ api/schemas.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5a7fdcea6..5f6f1ac94 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -70,6 +70,8 @@ def __get_errors_list(project_id, user_id, data): } data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.page = data.page + data.series[0].filter.limit = data.limit return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) @@ -87,6 +89,8 @@ def __get_sessions_list(project_id, user_id, data): } data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.page = data.page + data.series[0].filter.limit = data.limit return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id) diff --git a/api/schemas.py b/api/schemas.py index c65236cd3..bacceea78 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -826,7 +826,7 @@ class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema): alias_generator = attribute_to_camel_case -class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema): +class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema): density: int = Field(7) class Config: From 7beb08f3984b96803de814004e8f0e6883eaee18 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 19:12:06 +0200 Subject: [PATCH 217/221] feat(db): migrate old funnels to new metric-funnels --- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 27 ++++++++++++++++++- .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 27 ++++++++++++++++++- 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 7b5169c3c..9c7e75b95 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -164,4 +164,29 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; \ No newline at end of file + view_type=excluded.view_type; + +BEGIN; +DO +$$ + BEGIN + IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND + EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + THEN + ALTER TABLE IF EXISTS metrics + ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; + WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter) + SELECT project_id, user_id, name, 'funnel', is_public, filter + FROM funnels + WHERE deleted_at ISNULL + RETURNING metric_id,_funnel_filter) + INSERT + INTO metric_series(metric_id, name, filter, index) + SELECT metric_id, 'Series 1', _funnel_filter, 0 + FROM f_t_m; + ALTER TABLE IF EXISTS metrics + DROP COLUMN IF EXISTS _funnel_filter; + END IF; + END +$$; +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 00bf4ec1d..bec9bdff3 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -151,4 +151,29 @@ ON CONFLICT (predefined_key) DO UPDATE is_template=excluded.is_template, is_public=excluded.is_public, metric_type=excluded.metric_type, - view_type=excluded.view_type; \ No newline at end of file + view_type=excluded.view_type; + +BEGIN; +DO +$$ + BEGIN + IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND + EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + THEN + ALTER TABLE IF EXISTS metrics + ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; + WITH f_t_m AS (INSERT INTO metrics (project_id, user_id, name, metric_type, is_public, _funnel_filter) + SELECT project_id, user_id, name, 'funnel', is_public, filter + FROM funnels + WHERE deleted_at ISNULL + RETURNING metric_id,_funnel_filter) + INSERT + INTO metric_series(metric_id, name, filter, index) + SELECT metric_id, 'Series 1', _funnel_filter, 0 + FROM f_t_m; + ALTER TABLE IF EXISTS metrics + DROP COLUMN IF EXISTS _funnel_filter; + END IF; + END +$$; +COMMIT; \ No newline at end of file From 4d111d6f4af8ca90b80dff4057c4fa5bb8898100 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Jun 2022 19:18:52 +0200 Subject: [PATCH 218/221] feat(db): migrate to v1.7.0: fixed cross-database references issue --- ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index 9c7e75b95..ee01e24e0 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -171,7 +171,7 @@ DO $$ BEGIN IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND - EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL)) THEN ALTER TABLE IF EXISTS metrics ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; diff --git a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql index bec9bdff3..2ed45dea7 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -158,7 +158,7 @@ DO $$ BEGIN IF (NOT EXISTS(SELECT 1 FROM metrics WHERE metric_type = 'funnel') AND - EXISTS(SELECT 1 FROM app.public.funnels WHERE deleted_at ISNULL)) + EXISTS(SELECT 1 FROM funnels WHERE deleted_at ISNULL)) THEN ALTER TABLE IF EXISTS metrics ADD COLUMN IF NOT EXISTS _funnel_filter jsonb NULL; From 1ee50b62eddca315e32501ffe1004c3d7a4f953e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Jun 2022 10:53:43 +0200 Subject: [PATCH 219/221] feat(api): full dependencies upgrade --- api/requirements.txt | 10 +++++----- ee/api/requirements.txt | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index f08b6db46..dd79c5324 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,15 +1,15 @@ -requests==2.27.1 +requests==2.28.0 urllib3==1.26.9 -boto3==1.24.8 +boto3==1.24.11 pyjwt==2.4.0 psycopg2-binary==2.9.3 -elasticsearch==7.9.1 -jira==3.1.1 +elasticsearch==8.2.3 +jira==3.2.0 fastapi==0.78.0 uvicorn[standard]==0.17.6 python-decouple==3.6 -pydantic[email]==1.8.2 +pydantic[email]==1.9.1 apscheduler==3.9.1 \ No newline at end of file diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index e96ed6ae5..1593a9206 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,16 +1,16 @@ -requests==2.27.1 +requests==2.28.0 urllib3==1.26.9 -boto3==1.24.8 +boto3==1.24.11 pyjwt==2.4.0 psycopg2-binary==2.9.3 -elasticsearch==7.9.1 -jira==3.1.1 -clickhouse-driver==0.2.3 +elasticsearch==8.2.3 +jira==3.2.0 +clickhouse-driver==0.2.4 python3-saml==1.12.0 fastapi==0.78.0 python-multipart==0.0.5 uvicorn[standard]==0.17.6 python-decouple==3.6 -pydantic[email]==1.8.2 +pydantic[email]==1.9.1 apscheduler==3.9.1 \ No newline at end of file From 38b65537c7fe5c30e6dba0adb1f0bab6faddb68f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Jun 2022 11:31:31 +0200 Subject: [PATCH 220/221] feat(api): fixed Elasticsearch upgrade --- api/chalicelib/core/log_tool_elasticsearch.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/log_tool_elasticsearch.py b/api/chalicelib/core/log_tool_elasticsearch.py index f82dd57c1..680c28e23 100644 --- a/api/chalicelib/core/log_tool_elasticsearch.py +++ b/api/chalicelib/core/log_tool_elasticsearch.py @@ -1,4 +1,5 @@ -from elasticsearch import Elasticsearch, RequestsHttpConnection +# from elasticsearch import Elasticsearch, RequestsHttpConnection +from elasticsearch import Elasticsearch from chalicelib.core import log_tools import base64 import logging @@ -65,7 +66,7 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): "use_ssl": use_ssl, "verify_certs": False, "ca_certs": False, - "connection_class": RequestsHttpConnection, + # "connection_class": RequestsHttpConnection, "timeout": timeout } if api_key_id is not None and len(api_key_id) > 0: From c10140b8d1a7397f27a2eec5f8e657360d6513ac Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Jun 2022 12:39:21 +0200 Subject: [PATCH 221/221] feat(api): changed empty funnel response --- api/chalicelib/core/significance.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index a868ef2d3..9bd0fa966 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -24,7 +24,6 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} - def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: """ Add minimal timestamp @@ -293,7 +292,6 @@ def pearson_corr(x: list, y: list): return r, confidence, False - def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage): """ Returns two lists with binary values 0/1: @@ -363,7 +361,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ return transitions, errors, all_errors, n_sess_affected - def get_affected_users_for_all_issues(rows, first_stage, last_stage): """ @@ -415,7 +412,6 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts - def count_sessions(rows, n_stages): session_counts = {i: set() for i in range(1, n_stages + 1)} for ind, row in enumerate(rows): @@ -467,7 +463,6 @@ def get_stages(stages, rows): return stages_list - def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False): """ @@ -544,7 +539,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues - def get_top_insights(filter_d, project_id): output = [] stages = filter_d.get("events", []) @@ -582,9 +576,8 @@ def get_top_insights(filter_d, project_id): return stages_list, total_drop_due_to_issues - def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): - output = dict({'critical_issues_count': 0}) + output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) stages = filter_d.get("events", []) # The result of the multi-stage query rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)