diff --git a/api/chalicelib/core/autocomplete.py b/api/chalicelib/core/autocomplete.py index 3ad845a14..d5bd022f8 100644 --- a/api/chalicelib/core/autocomplete.py +++ b/api/chalicelib/core/autocomplete.py @@ -1,5 +1,5 @@ import schemas -from chalicelib.core import countries +from chalicelib.core import countries, events, metadata from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.event_filter_definition import Event @@ -36,6 +36,7 @@ def __get_autocomplete_table(value, project_id): WHERE project_id = %(project_id)s AND type= '{e}' AND value ILIKE %(svalue)s + ORDER BY value LIMIT 5)""") if len(value) > 2: sub_queries.append(f"""(SELECT type, value @@ -43,6 +44,7 @@ def __get_autocomplete_table(value, project_id): WHERE project_id = %(project_id)s AND type= '{e}' AND value ILIKE %(value)s + ORDER BY value LIMIT 5)""") with pg_client.PostgresClient() as cur: query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";", @@ -70,7 +72,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value IN %(value)s ORDER BY value""" @@ -79,7 +81,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(svalue)s ORDER BY value LIMIT 5) @@ -88,7 +90,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(value)s ORDER BY value LIMIT 5);""" @@ -96,7 +98,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(svalue)s ORDER BY value LIMIT 10;""" @@ -122,6 +124,8 @@ def __generic_autocomplete_metas(typename): if typename == schemas.FilterType.user_country: params["value"] = tuple(countries.get_country_code_autocomplete(text)) + if len(params["value"]) == 0: + return [] query = cur.mogrify(__generic_query(typename, value_length=len(text)), params) cur.execute(query) @@ -129,3 +133,194 @@ def __generic_autocomplete_metas(typename): return rows return f + + +def __pg_errors_query(source=None, value_length=None): + if value_length is None or value_length > 2: + return f"""((SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" + return f"""((SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" + + +def __search_pg_errors(project_id, value, key=None, source=None): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(__pg_errors_query(source, + value_length=len(value)), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "source": source})) + results = helper.list_to_camel_case(cur.fetchall()) + return results + + +def __search_pg_errors_ios(project_id, value, key=None, source=None): + if len(value) > 2: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(value)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + LIMIT 5);""" + else: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{events.EventType.ERROR_IOS.ui_type}' AS type + FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5);""" + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) + results = helper.list_to_camel_case(cur.fetchall()) + return results + + +def __search_pg_metadata(project_id, value, key=None, source=None): + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys(): + return [] + sub_from = [] + if key is not None: + meta_keys = {key: meta_keys[key]} + + for k in meta_keys.keys(): + colname = metadata.index_to_colname(meta_keys[k]) + if len(value) > 2: + sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5) + UNION + (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(value)s LIMIT 5)) + """) + else: + sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5)""") + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify(f"""\ + SELECT key, value, 'METADATA' AS TYPE + FROM({" UNION ALL ".join(sub_from)}) AS all_metas + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) + results = helper.list_to_camel_case(cur.fetchall()) + return results diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index abed5d8cf..c5668db21 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -1,16 +1,13 @@ from typing import Optional import schemas +from chalicelib.core import autocomplete from chalicelib.core import issues -from chalicelib.core import metadata from chalicelib.core import sessions_metas - from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.event_filter_definition import SupportedFilter, Event -from chalicelib.core import autocomplete - def get_customs_by_sessionId2_pg(session_id, project_id): with pg_client.PostgresClient() as cur: @@ -101,203 +98,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False, event_ty return rows -def __pg_errors_query(source=None, value_length=None): - if value_length is None or value_length > 2: - return f"""((SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5));""" - return f"""((SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5));""" - - -def __search_pg_errors(project_id, value, key=None, source=None): - now = TimeUTC.now() - - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(__pg_errors_query(source, - value_length=len(value)), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value), - "source": source})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : errors") - return results - - -def __search_pg_errors_ios(project_id, value, key=None, source=None): - now = TimeUTC.now() - if len(value) > 2: - query = f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(value)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - LIMIT 5);""" - else: - query = f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - LIMIT 5);""" - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : errors") - return results - - -def __search_pg_metadata(project_id, value, key=None, source=None): - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys(): - return [] - sub_from = [] - if key is not None: - meta_keys = {key: meta_keys[key]} - - for k in meta_keys.keys(): - colname = metadata.index_to_colname(meta_keys[k]) - if len(value) > 2: - sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(svalue)s LIMIT 5) - UNION - (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(value)s LIMIT 5)) - """) - else: - sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(svalue)s LIMIT 5)""") - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(f"""\ - SELECT key, value, 'METADATA' AS TYPE - FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) - results = helper.list_to_camel_case(cur.fetchall()) - return results - - -class event_type: +class EventType: CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label") INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label") LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path") @@ -319,46 +120,46 @@ class event_type: SUPPORTED_TYPES = { - event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK), - query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)), - event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT), - query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)), - event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION), - query=autocomplete.__generic_query( - typename=event_type.LOCATION.ui_type)), - event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM), - query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)), - event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST), + EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK), + query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)), + EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT), + query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)), + EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION), query=autocomplete.__generic_query( - typename=event_type.REQUEST.ui_type)), - event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL), - query=autocomplete.__generic_query( - typename=event_type.GRAPHQL.ui_type)), - event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION), - query=autocomplete.__generic_query( - typename=event_type.STATEACTION.ui_type)), - event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors, - query=None), - event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata, - query=None), - # IOS - event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS), - query=autocomplete.__generic_query( - typename=event_type.CLICK_IOS.ui_type)), - event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS), - query=autocomplete.__generic_query( - typename=event_type.INPUT_IOS.ui_type)), - event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS), - query=autocomplete.__generic_query( - typename=event_type.VIEW_IOS.ui_type)), - event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS), + typename=EventType.LOCATION.ui_type)), + EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM), + query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)), + EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST), + query=autocomplete.__generic_query( + typename=EventType.REQUEST.ui_type)), + EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL), + query=autocomplete.__generic_query( + typename=EventType.GRAPHQL.ui_type)), + EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION), query=autocomplete.__generic_query( - typename=event_type.CUSTOM_IOS.ui_type)), - event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS), - query=autocomplete.__generic_query( - typename=event_type.REQUEST_IOS.ui_type)), - event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios, - query=None), + typename=EventType.STATEACTION.ui_type)), + EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors, + query=None), + EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_pg_metadata, + query=None), + # IOS + EventType.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_IOS), + query=autocomplete.__generic_query( + typename=EventType.CLICK_IOS.ui_type)), + EventType.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_IOS), + query=autocomplete.__generic_query( + typename=EventType.INPUT_IOS.ui_type)), + EventType.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_IOS), + query=autocomplete.__generic_query( + typename=EventType.VIEW_IOS.ui_type)), + EventType.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_IOS), + query=autocomplete.__generic_query( + typename=EventType.CUSTOM_IOS.ui_type)), + EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS), + query=autocomplete.__generic_query( + typename=EventType.REQUEST_IOS.ui_type)), + EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors_ios, + query=None), } @@ -366,7 +167,7 @@ def get_errors_by_session_id(session_id, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT er.*,ur.*, er.timestamp - s.start_ts AS time - FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) + FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) errors = cur.fetchall() @@ -383,11 +184,9 @@ def search(text, event_type, project_id, source, key): rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) # for IOS events autocomplete # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): - # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - # source=source) + # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - source=source) + rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): return sessions_metas.search(text, event_type, project_id) elif event_type.endswith("_IOS") \ diff --git a/api/chalicelib/core/events_ios.py b/api/chalicelib/core/events_ios.py index bae48599f..c5205ba15 100644 --- a/api/chalicelib/core/events_ios.py +++ b/api/chalicelib/core/events_ios.py @@ -7,8 +7,8 @@ def get_customs_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f"""\ SELECT c.*, - '{events.event_type.CUSTOM_IOS.ui_type}' AS type - FROM {events.event_type.CUSTOM_IOS.table} AS c + '{events.EventType.CUSTOM_IOS.ui_type}' AS type + FROM {events.EventType.CUSTOM_IOS.table} AS c WHERE c.session_id = %(session_id)s ORDER BY c.timestamp;""", @@ -23,8 +23,8 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT c.*, - '{events.event_type.CLICK_IOS.ui_type}' AS type - FROM {events.event_type.CLICK_IOS.table} AS c + '{events.EventType.CLICK_IOS.ui_type}' AS type + FROM {events.EventType.CLICK_IOS.table} AS c WHERE c.session_id = %(session_id)s ORDER BY c.timestamp;""", @@ -35,8 +35,8 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT i.*, - '{events.event_type.INPUT_IOS.ui_type}' AS type - FROM {events.event_type.INPUT_IOS.table} AS i + '{events.EventType.INPUT_IOS.ui_type}' AS type + FROM {events.EventType.INPUT_IOS.table} AS i WHERE i.session_id = %(session_id)s ORDER BY i.timestamp;""", @@ -46,8 +46,8 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT v.*, - '{events.event_type.VIEW_IOS.ui_type}' AS type - FROM {events.event_type.VIEW_IOS.table} AS v + '{events.EventType.VIEW_IOS.ui_type}' AS type + FROM {events.EventType.VIEW_IOS.table} AS v WHERE v.session_id = %(session_id)s ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id})) @@ -61,7 +61,7 @@ def get_crashes_by_session_id(session_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f""" SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time - FROM {events.event_type.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id) + FROM {events.EventType.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id) WHERE cr.session_id = %(session_id)s ORDER BY timestamp;""", {"session_id": session_id})) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 4618b2918..b7604a050 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -490,7 +490,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr extra_constraints.append( sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -644,45 +644,45 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr **sh.multi_values(event.value, value_key=e_k), **sh.multi_values(event.source, value_key=s_k)} - if event_type == events.event_type.CLICK.ui_type: - event_from = event_from % f"{events.event_type.CLICK.table} AS main " + if event_type == events.EventType.CLICK.ui_type: + event_from = event_from % f"{events.EventType.CLICK.table} AS main " if not is_any: if event.operator == schemas.ClickEventExtraOperator._on_selector: event_where.append( sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) else: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.INPUT.ui_type: - event_from = event_from % f"{events.event_type.INPUT.table} AS main " + elif event_type == events.EventType.INPUT.ui_type: + event_from = event_from % f"{events.EventType.INPUT.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value, value_key=e_k)) if event.source is not None and len(event.source) > 0: event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, value_key=f"custom{i}")) full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")} - elif event_type == events.event_type.LOCATION.ui_type: - event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + elif event_type == events.EventType.LOCATION.ui_type: + event_from = event_from % f"{events.EventType.LOCATION.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.CUSTOM.ui_type: - event_from = event_from % f"{events.event_type.CUSTOM.table} AS main " + elif event_type == events.EventType.CUSTOM.ui_type: + event_from = event_from % f"{events.EventType.CUSTOM.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.REQUEST.ui_type: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + elif event_type == events.EventType.REQUEST.ui_type: + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, value_key=e_k)) # elif event_type == events.event_type.GRAPHQL.ui_type: # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " @@ -690,14 +690,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # event_where.append( # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, # value_key=e_k)) - elif event_type == events.event_type.STATEACTION.ui_type: - event_from = event_from % f"{events.event_type.STATEACTION.table} AS main " + elif event_type == events.EventType.STATEACTION.ui_type: + event_from = event_from % f"{events.EventType.STATEACTION.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.ERROR.ui_type: - event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" + elif event_type == events.EventType.ERROR.ui_type: + event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" event.source = list(set(event.source)) if not is_any and event.value not in [None, "*", ""]: event_where.append( @@ -708,52 +708,52 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # ----- IOS - elif event_type == events.event_type.CLICK_IOS.ui_type: - event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main " + elif event_type == events.EventType.CLICK_IOS.ui_type: + event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.INPUT_IOS.ui_type: - event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main " + elif event_type == events.EventType.INPUT_IOS.ui_type: + event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) if event.source is not None and len(event.source) > 0: event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, value_key="custom{i}")) full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")} - elif event_type == events.event_type.VIEW_IOS.ui_type: - event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main " + elif event_type == events.EventType.VIEW_IOS.ui_type: + event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main " + elif event_type == events.EventType.CUSTOM_IOS.ui_type: + event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.REQUEST_IOS.ui_type: - event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main " + elif event_type == events.EventType.REQUEST_IOS.ui_type: + event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.ERROR_IOS.ui_type: - event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" + elif event_type == events.EventType.ERROR_IOS.ui_type: + event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" if not is_any and event.value not in [None, "*", ""]: event_where.append( sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", event.value, value_key=e_k)) elif event_type == schemas.PerformanceEventType.fetch_failed: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, value_key=e_k)) col = performance_event.get_col(event_type) colname = col["column"] @@ -778,7 +778,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr schemas.PerformanceEventType.location_avg_cpu_load, schemas.PerformanceEventType.location_avg_memory_usage ]: - event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + event_from = event_from % f"{events.EventType.LOCATION.table} AS main " col = performance_event.get_col(event_type) colname = col["column"] tname = "main" @@ -789,7 +789,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr f"{tname}.timestamp <= %(endDate)s"] if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", event.value, value_key=e_k)) e_k += "_custom" full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} @@ -798,7 +798,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) " + event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " if not isinstance(event.value[0].value, list): event.value[0].value = [event.value[0].value] if not isinstance(event.value[1].value, list): @@ -820,14 +820,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if not is_any: event_where.append( sh.multi_conditions( - f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s", + f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", event.value[0].value, value_key=e_k1)) s_op = sh.get_sql_operator(event.value[1].operator) is_any = sh.isAny_opreator(event.value[1].operator) if not is_any: event_where.append( sh.multi_conditions( - f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s", + f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", event.value[1].value, value_key=e_k2)) e_k += "_custom" @@ -837,7 +837,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr event.source, value_key=e_k)) elif event_type == schemas.EventType.request_details: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " apply = False for j, f in enumerate(event.filters): is_any = sh.isAny_opreator(f.operator) @@ -849,7 +849,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} if f.type == schemas.FetchFilterType._url: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text", + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) apply = True elif f.type == schemas.FetchFilterType._status_code: @@ -881,7 +881,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if not apply: continue elif event_type == schemas.EventType.graphql: - event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " + event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " for j, f in enumerate(event.filters): is_any = sh.isAny_opreator(f.operator) if is_any or len(f.value) == 0: @@ -892,7 +892,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} if f.type == schemas.GraphqlFilterType._name: event_where.append( - sh.multi_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value, + sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, value_key=e_k_f)) elif f.type == schemas.GraphqlFilterType._method: event_where.append( @@ -973,7 +973,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") if errors_only: - extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" + extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") extra_constraints.append("ser.project_id = %(project_id)s") # if error_status != schemas.ErrorStatus.all: diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 291fcbb4e..a2389da71 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -86,11 +86,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values["maxDuration"] = f["value"][1] elif filter_type == schemas.FilterType.referrer: # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"] + filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"] # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: if meta_keys is None: meta_keys = metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} @@ -135,31 +135,31 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: extra_from = [] op = sh.get_sql_operator(s["operator"]) event_type = s["type"].upper() - if event_type == events.event_type.CLICK.ui_type: - next_table = events.event_type.CLICK.table - next_col_name = events.event_type.CLICK.column - elif event_type == events.event_type.INPUT.ui_type: - next_table = events.event_type.INPUT.table - next_col_name = events.event_type.INPUT.column - elif event_type == events.event_type.LOCATION.ui_type: - next_table = events.event_type.LOCATION.table - next_col_name = events.event_type.LOCATION.column - elif event_type == events.event_type.CUSTOM.ui_type: - next_table = events.event_type.CUSTOM.table - next_col_name = events.event_type.CUSTOM.column + if event_type == events.EventType.CLICK.ui_type: + next_table = events.EventType.CLICK.table + next_col_name = events.EventType.CLICK.column + elif event_type == events.EventType.INPUT.ui_type: + next_table = events.EventType.INPUT.table + next_col_name = events.EventType.INPUT.column + elif event_type == events.EventType.LOCATION.ui_type: + next_table = events.EventType.LOCATION.table + next_col_name = events.EventType.LOCATION.column + elif event_type == events.EventType.CUSTOM.ui_type: + next_table = events.EventType.CUSTOM.table + next_col_name = events.EventType.CUSTOM.column # IOS -------------- - elif event_type == events.event_type.CLICK_IOS.ui_type: - next_table = events.event_type.CLICK_IOS.table - next_col_name = events.event_type.CLICK_IOS.column - elif event_type == events.event_type.INPUT_IOS.ui_type: - next_table = events.event_type.INPUT_IOS.table - next_col_name = events.event_type.INPUT_IOS.column - elif event_type == events.event_type.VIEW_IOS.ui_type: - next_table = events.event_type.VIEW_IOS.table - next_col_name = events.event_type.VIEW_IOS.column - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - next_table = events.event_type.CUSTOM_IOS.table - next_col_name = events.event_type.CUSTOM_IOS.column + elif event_type == events.EventType.CLICK_IOS.ui_type: + next_table = events.EventType.CLICK_IOS.table + next_col_name = events.EventType.CLICK_IOS.column + elif event_type == events.EventType.INPUT_IOS.ui_type: + next_table = events.EventType.INPUT_IOS.table + next_col_name = events.EventType.INPUT_IOS.column + elif event_type == events.EventType.VIEW_IOS.ui_type: + next_table = events.EventType.VIEW_IOS.table + next_col_name = events.EventType.VIEW_IOS.column + elif event_type == events.EventType.CUSTOM_IOS.ui_type: + next_table = events.EventType.CUSTOM_IOS.table + next_col_name = events.EventType.CUSTOM_IOS.column else: print("=================UNDEFINED") continue diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 6b6bcf287..bb08fb994 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -215,7 +215,7 @@ Pipfile.lock /chalicelib/core/metadata.py /chalicelib/core/mobile.py /chalicelib/core/sessions_assignments.py -/chalicelib/core/sessions_metas.py +#/chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py #exp /chalicelib/core/significance.py /chalicelib/core/socket_ios.py diff --git a/ee/api/chalicelib/core/autocomplete_exp.py b/ee/api/chalicelib/core/autocomplete_exp.py index 2abe97e8e..cf241f99e 100644 --- a/ee/api/chalicelib/core/autocomplete_exp.py +++ b/ee/api/chalicelib/core/autocomplete_exp.py @@ -1,6 +1,7 @@ import schemas +from chalicelib.core import countries, events, metadata from chalicelib.utils import ch_client -from chalicelib.utils import helper +from chalicelib.utils import helper, exp_ch_helper from chalicelib.utils.event_filter_definition import Event TABLE = "experimental.autocomplete" @@ -19,7 +20,17 @@ def __get_autocomplete_table(value, project_id): schemas.EventType.input] autocomplete_events.sort() sub_queries = [] + c_list = [] for e in autocomplete_events: + if e == schemas.FilterType.user_country: + c_list = countries.get_country_code_autocomplete(value) + if len(c_list) > 0: + sub_queries.append(f"""(SELECT DISTINCT ON(value) type, value + FROM {TABLE} + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value IN %(c_list)s)""") + continue sub_queries.append(f"""(SELECT type, value FROM {TABLE} WHERE project_id = %(project_id)s @@ -37,8 +48,10 @@ def __get_autocomplete_table(value, project_id): LIMIT 5)""") with ch_client.ClickHouseClient() as cur: query = " UNION DISTINCT ".join(sub_queries) + ";" - params = {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)} + params = {"project_id": project_id, + "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "c_list": tuple(c_list)} results = [] try: results = cur.execute(query=query, params=params) @@ -55,12 +68,21 @@ def __get_autocomplete_table(value, project_id): def __generic_query(typename, value_length=None): + if typename == schemas.FilterType.user_country: + return f"""SELECT DISTINCT value, type + FROM {TABLE} + WHERE + project_id = %(project_id)s + AND type='{typename.upper()}' + AND value IN %(value)s + ORDER BY value""" + if value_length is None or value_length > 2: return f"""(SELECT DISTINCT value, type FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(svalue)s ORDER BY value LIMIT 5) @@ -69,7 +91,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(value)s ORDER BY value LIMIT 5);""" @@ -77,7 +99,7 @@ def __generic_query(typename, value_length=None): FROM {TABLE} WHERE project_id = %(project_id)s - AND type='{typename}' + AND type='{typename.upper()}' AND value ILIKE %(svalue)s ORDER BY value LIMIT 10;""" @@ -105,3 +127,124 @@ def __generic_autocomplete_metas(typename): return results return f + + +def __pg_errors_query(source=None, value_length=None): + MAIN_TABLE = exp_ch_helper.get_main_js_errors_sessions_table() + if value_length is None or value_length > 2: + return f"""((SELECT DISTINCT ON(message) + message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND message ILIKE %(svalue)s + AND event_type = 'ERROR' + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(name) + name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND name ILIKE %(svalue)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(message) + message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND message ILIKE %(value)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(name) + name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND name ILIKE %(value)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" + return f"""((SELECT DISTINCT ON(message) + message AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND message ILIKE %(svalue)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON(name) + name AS value, + source, + '{events.EventType.ERROR.ui_type}' AS type + FROM {MAIN_TABLE} + WHERE + project_id = %(project_id)s + AND name ILIKE %(svalue)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" + + +def __search_pg_errors(project_id, value, key=None, source=None): + with ch_client.ClickHouseClient() as cur: + query = cur.format(__pg_errors_query(source, value_length=len(value)), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "source": source}) + results = cur.execute(query) + return helper.list_to_camel_case(results) + + +def __search_pg_errors_ios(project_id, value, key=None, source=None): + # TODO: define this when ios events are supported in CH + return [] + + +def __search_pg_metadata(project_id, value, key=None, source=None): + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys(): + return [] + sub_from = [] + if key is not None: + meta_keys = {key: meta_keys[key]} + + for k in meta_keys.keys(): + colname = metadata.index_to_colname(meta_keys[k]) + if len(value) > 2: + sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM {exp_ch_helper.get_main_sessions_table()} + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5) + UNION DISTINCT + (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM {exp_ch_helper.get_main_sessions_table()} + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(value)s LIMIT 5)) + """) + else: + sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM {exp_ch_helper.get_main_sessions_table()} + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5)""") + with ch_client.ClickHouseClient() as cur: + query = cur.format(f"""SELECT key, value, 'METADATA' AS TYPE + FROM({" UNION ALL ".join(sub_from)}) AS all_metas + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)}) + results = cur.execute(query) + return helper.list_to_camel_case(results) diff --git a/ee/api/chalicelib/core/errors_exp.py b/ee/api/chalicelib/core/errors_exp.py index 1fb201492..eb4331cb6 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/ee/api/chalicelib/core/errors_exp.py @@ -744,7 +744,7 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n else: table_name = "" if type_condition: - ch_sub_query.append(f"{table_name}event_type='ERROR'") + ch_sub_query.append(f"{table_name}EventType='ERROR'") if time_constraint: ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"] @@ -920,7 +920,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): params["maxDuration"] = f.value[1] elif filter_type == schemas.FilterType.referrer: - # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + # extra_from += f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)" if is_any: referrer_constraint = 'isNotNull(s.base_referrer)' else: @@ -1062,7 +1062,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence FROM {MAIN_EVENTS_TABLE} WHERE project_id=%(project_id)s - AND event_type='ERROR' + AND EventType='ERROR' GROUP BY error_id) AS time_details ON details.error_id=time_details.error_id INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py index 7ad9e830d..1d40b7673 100644 --- a/ee/api/chalicelib/core/events.py +++ b/ee/api/chalicelib/core/events.py @@ -1,14 +1,14 @@ +from typing import Optional + +from decouple import config + import schemas from chalicelib.core import issues -from chalicelib.core import metadata from chalicelib.core import sessions_metas - from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.event_filter_definition import SupportedFilter, Event -from decouple import config - if config("EXP_AUTOCOMPLETE", cast=bool, default=False): from . import autocomplete_exp as autocomplete else: @@ -45,7 +45,7 @@ def __get_grouped_clickrage(rows, session_id, project_id): for c in click_rage_issues: merge_count = c.get("payload") if merge_count is not None: - merge_count = merge_count.get("count", 3) + merge_count = merge_count.get("Count", 3) else: merge_count = 3 for i in range(len(rows)): @@ -58,246 +58,53 @@ def __get_grouped_clickrage(rows, session_id, project_id): return rows -def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False): +def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None): with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""\ - SELECT - c.*, - 'CLICK' AS type - FROM events.clicks AS c - WHERE - c.session_id = %(session_id)s - ORDER BY c.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows = cur.fetchall() - if group_clickrage: - rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) - - cur.execute(cur.mogrify(""" - SELECT - i.*, - 'INPUT' AS type - FROM events.inputs AS i - WHERE - i.session_id = %(session_id)s - ORDER BY i.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows += cur.fetchall() - cur.execute(cur.mogrify("""\ - SELECT - l.*, - l.path AS value, - l.path AS url, - 'LOCATION' AS type - FROM events.pages AS l - WHERE - l.session_id = %(session_id)s - ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) - rows += cur.fetchall() + rows = [] + if event_type is None or event_type == schemas.EventType.click: + cur.execute(cur.mogrify("""\ + SELECT + c.*, + 'CLICK' AS type + FROM events.clicks AS c + WHERE + c.session_id = %(session_id)s + ORDER BY c.timestamp;""", + {"project_id": project_id, "session_id": session_id}) + ) + rows += cur.fetchall() + if group_clickrage: + rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) + if event_type is None or event_type == schemas.EventType.input: + cur.execute(cur.mogrify(""" + SELECT + i.*, + 'INPUT' AS type + FROM events.inputs AS i + WHERE + i.session_id = %(session_id)s + ORDER BY i.timestamp;""", + {"project_id": project_id, "session_id": session_id}) + ) + rows += cur.fetchall() + if event_type is None or event_type == schemas.EventType.location: + cur.execute(cur.mogrify("""\ + SELECT + l.*, + l.path AS value, + l.path AS url, + 'LOCATION' AS type + FROM events.pages AS l + WHERE + l.session_id = %(session_id)s + ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) + rows += cur.fetchall() rows = helper.list_to_camel_case(rows) rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"])) return rows -def __pg_errors_query(source=None, value_length=None): - if value_length is None or value_length > 2: - return f"""((SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5));""" - return f"""((SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION DISTINCT - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5));""" - - -def __search_pg_errors(project_id, value, key=None, source=None): - now = TimeUTC.now() - - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(__pg_errors_query(source, - value_length=len(value)), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value), - "source": source})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : errors") - return results - - -def __search_pg_errors_ios(project_id, value, key=None, source=None): - now = TimeUTC.now() - if len(value) > 2: - query = f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(value)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - LIMIT 5);""" - else: - query = f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.reason ILIKE %(svalue)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.project_id = %(project_id)s - AND lg.name ILIKE %(svalue)s - LIMIT 5);""" - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : errors") - return results - - -def __search_pg_metadata(project_id, value, key=None, source=None): - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys(): - return [] - sub_from = [] - if key is not None: - meta_keys = {key: meta_keys[key]} - - for k in meta_keys.keys(): - colname = metadata.index_to_colname(meta_keys[k]) - if len(value) > 2: - sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(svalue)s LIMIT 5) - UNION - (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(value)s LIMIT 5)) - """) - else: - sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key - FROM public.sessions - WHERE project_id = %(project_id)s - AND {colname} ILIKE %(svalue)s LIMIT 5)""") - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(f"""\ - SELECT key, value, 'METADATA' AS TYPE - FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) - results = helper.list_to_camel_case(cur.fetchall()) - return results - - -class event_type: +class EventType: CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label") INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label") LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path") @@ -319,46 +126,46 @@ class event_type: SUPPORTED_TYPES = { - event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK), - query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)), - event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT), - query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)), - event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION), - query=autocomplete.__generic_query( - typename=event_type.LOCATION.ui_type)), - event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM), - query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)), - event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST), + EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK), + query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)), + EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT), + query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)), + EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION), query=autocomplete.__generic_query( - typename=event_type.REQUEST.ui_type)), - event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL), - query=autocomplete.__generic_query( - typename=event_type.GRAPHQL.ui_type)), - event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION), - query=autocomplete.__generic_query( - typename=event_type.STATEACTION.ui_type)), - event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors, - query=None), - event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata, - query=None), - # IOS - event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS), - query=autocomplete.__generic_query( - typename=event_type.CLICK_IOS.ui_type)), - event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS), - query=autocomplete.__generic_query( - typename=event_type.INPUT_IOS.ui_type)), - event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS), - query=autocomplete.__generic_query( - typename=event_type.VIEW_IOS.ui_type)), - event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS), + typename=EventType.LOCATION.ui_type)), + EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM), + query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)), + EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST), + query=autocomplete.__generic_query( + typename=EventType.REQUEST.ui_type)), + EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL), + query=autocomplete.__generic_query( + typename=EventType.GRAPHQL.ui_type)), + EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION), query=autocomplete.__generic_query( - typename=event_type.CUSTOM_IOS.ui_type)), - event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS), - query=autocomplete.__generic_query( - typename=event_type.REQUEST_IOS.ui_type)), - event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios, - query=None), + typename=EventType.STATEACTION.ui_type)), + EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors, + query=None), + EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_pg_metadata, + query=None), + # IOS + EventType.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_IOS), + query=autocomplete.__generic_query( + typename=EventType.CLICK_IOS.ui_type)), + EventType.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_IOS), + query=autocomplete.__generic_query( + typename=EventType.INPUT_IOS.ui_type)), + EventType.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_IOS), + query=autocomplete.__generic_query( + typename=EventType.VIEW_IOS.ui_type)), + EventType.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_IOS), + query=autocomplete.__generic_query( + typename=EventType.CUSTOM_IOS.ui_type)), + EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS), + query=autocomplete.__generic_query( + typename=EventType.REQUEST_IOS.ui_type)), + EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors_ios, + query=None), } @@ -366,7 +173,7 @@ def get_errors_by_session_id(session_id, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT er.*,ur.*, er.timestamp - s.start_ts AS time - FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) + FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) errors = cur.fetchall() @@ -383,11 +190,9 @@ def search(text, event_type, project_id, source, key): rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) # for IOS events autocomplete # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): - # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - # source=source) + # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - source=source) + rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): return sessions_metas.search(text, event_type, project_id) elif event_type.endswith("_IOS") \ diff --git a/ee/api/chalicelib/core/metrics_exp.py b/ee/api/chalicelib/core/metrics_exp.py index c41676d4a..268be3d1c 100644 --- a/ee/api/chalicelib/core/metrics_exp.py +++ b/ee/api/chalicelib/core/metrics_exp.py @@ -210,10 +210,10 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="errors", data=args) - ch_sub_query.append("errors.event_type = 'ERROR'") + ch_sub_query.append("errors.EventType = 'ERROR'") ch_sub_query.append("errors.source = 'js_exception'") ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args) - ch_sub_query_chart.append("errors.event_type = 'ERROR'") + ch_sub_query_chart.append("errors.EventType = 'ERROR'") ch_sub_query_chart.append("errors.source = 'js_exception'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -270,9 +270,9 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="errors", data=args) - ch_sub_query.append("errors.event_type='ERROR'") + ch_sub_query.append("errors.EventType='ERROR'") ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args) - ch_sub_query_chart.append("errors.event_type='ERROR'") + ch_sub_query_chart.append("errors.EventType='ERROR'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query_chart += meta_condition @@ -290,7 +290,7 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), toUnixTimestamp(MAX(datetime))*1000 AS lastOccurrenceAt, toUnixTimestamp(MIN(datetime))*1000 AS firstOccurrenceAt FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors - WHERE event_type='ERROR' AND project_id=%(project_id)s + WHERE EventType='ERROR' AND project_id=%(project_id)s GROUP BY error_id) AS errors_time USING(error_id) ORDER BY sessions DESC, count DESC LIMIT 10;""" params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, @@ -344,7 +344,7 @@ def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("(pages.dom_content_loaded_event_end>0 OR pages.first_contentful_paint_time>0)") @@ -377,7 +377,7 @@ def get_application_activity(project_id, startTimestamp=TimeUTC.now(delta_days=- def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **args): result = {} ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -390,7 +390,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a result = {**result, **row} ch_sub_query = __get_basic_constraints(table_name="resources", data=args) - # ch_sub_query.append("events.event_type='RESOURCE'") + # ch_sub_query.append("events.EventType='RESOURCE'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("resources.type= %(type)s") @@ -457,10 +457,10 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), density=7, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query = __get_basic_constraints(table_name="resources", data=args) - # ch_sub_query.append("events.event_type='RESOURCE'") + # ch_sub_query.append("events.EventType='RESOURCE'") ch_sub_query.append("resources.type = 'img'") ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) - # ch_sub_query_chart.append("events.event_type='RESOURCE'") + # ch_sub_query_chart.append("events.EventType='RESOURCE'") ch_sub_query_chart.append("resources.type = 'img'") ch_sub_query_chart.append("resources.url_hostpath IN %(url)s") meta_condition = __get_meta_constraint(args) @@ -527,7 +527,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi img_constraints = [] request_constraints = [] ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) - # ch_sub_query_chart.append("event_type='RESOURCE'") + # ch_sub_query_chart.append("EventType='RESOURCE'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -577,7 +577,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi end_time=endTimestamp, density=density, neutral={"avg": 0})] ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") ch_sub_query_chart += meta_condition ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, @@ -835,7 +835,7 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) - # ch_sub_query_chart.append("events.event_type='RESOURCE'") + # ch_sub_query_chart.append("events.EventType='RESOURCE'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -922,7 +922,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- endTimestamp=TimeUTC.now(), density=19, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") if url is not None: ch_sub_query_chart.append(f"pages.url_path = %(value)s") ch_sub_query_chart.append("isNotNull(pages.dom_building_time)") @@ -1045,7 +1045,7 @@ def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query.append("isNotNull(pages.speed_index)") ch_sub_query.append("pages.speed_index>0") meta_condition = __get_meta_constraint(args) @@ -1074,7 +1074,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 endTimestamp=TimeUTC.now(), density=7, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") ch_sub_query_chart.append("isNotNull(pages.response_time)") ch_sub_query_chart.append("pages.response_time>0") meta_condition = __get_meta_constraint(args) @@ -1110,7 +1110,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=20, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query.append("isNotNull(pages.response_time)") ch_sub_query.append("pages.response_time>0") meta_condition = __get_meta_constraint(args) @@ -1249,7 +1249,7 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1 def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), value=None, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1281,7 +1281,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") ch_sub_query_chart.append("isNotNull(pages.visually_complete)") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -1316,7 +1316,7 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d endTimestamp=TimeUTC.now(), value=None, density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query.append("isNotNull(pages.response_time)") ch_sub_query.append("pages.response_time>0") sch_sub_query = ch_sub_query[:] @@ -1351,7 +1351,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="performance", round_start=True, data=args) - ch_sub_query_chart.append("performance.event_type='PERFORMANCE'") + ch_sub_query_chart.append("performance.EventType='PERFORMANCE'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -1384,7 +1384,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="performance", round_start=True, data=args) - ch_sub_query_chart.append("performance.event_type='PERFORMANCE'") + ch_sub_query_chart.append("performance.EventType='PERFORMANCE'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -1417,7 +1417,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="performance", round_start=True, data=args) - ch_sub_query_chart.append("performance.event_type='PERFORMANCE'") + ch_sub_query_chart.append("performance.EventType='PERFORMANCE'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -1532,7 +1532,7 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=6, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args) - ch_sub_query.append("requests.event_type='REQUEST'") + ch_sub_query.append("requests.EventType='REQUEST'") ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1577,7 +1577,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC. endTimestamp=TimeUTC.now(), density=6, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args) - ch_sub_query.append("requests.event_type='REQUEST'") + ch_sub_query.append("requests.EventType='REQUEST'") ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1658,7 +1658,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): ch_sub_query = __get_basic_constraints(table_name="requests", data=args) - ch_sub_query.append("requests.event_type = 'REQUEST'") + ch_sub_query.append("requests.EventType = 'REQUEST'") ch_sub_query.append("requests.success = 0") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1731,7 +1731,7 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=- def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, **args): ch_sub_query = __get_basic_constraints(table_name="requests", data=args) - ch_sub_query.append("requests.event_type = 'REQUEST'") + ch_sub_query.append("requests.EventType = 'REQUEST'") ch_sub_query.append("intDiv(requests.status, 100) != 2") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1758,7 +1758,7 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now endTimestamp=TimeUTC.now(), platform=None, **args): ch_sub_query = __get_basic_constraints(table_name="requests", data=args) - ch_sub_query.append("requests.event_type = 'REQUEST'") + ch_sub_query.append("requests.EventType = 'REQUEST'") ch_sub_query.append(f"intDiv(requests.status, 100) == {status}") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -1799,17 +1799,17 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="events", round_start=True, data=args) - ch_sub_query_chart.append("(events.event_type = 'REQUEST' OR events.event_type = 'ERROR')") - ch_sub_query_chart.append("(events.status>200 OR events.event_type = 'ERROR')") + ch_sub_query_chart.append("(events.EventType = 'REQUEST' OR events.EventType = 'ERROR')") + ch_sub_query_chart.append("(events.status>200 OR events.EventType = 'ERROR')") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - SUM(events.event_type = 'REQUEST' AND intDiv(events.status, 100) == 4) AS _4xx, - SUM(events.event_type = 'REQUEST' AND intDiv(events.status, 100) == 5) AS _5xx, - SUM(events.event_type = 'ERROR' AND events.source == 'js_exception') AS js, - SUM(events.event_type = 'ERROR' AND events.source != 'js_exception') AS integrations + SUM(events.EventType = 'REQUEST' AND intDiv(events.status, 100) == 4) AS _4xx, + SUM(events.EventType = 'REQUEST' AND intDiv(events.status, 100) == 5) AS _5xx, + SUM(events.EventType = 'ERROR' AND events.source == 'js_exception') AS js, + SUM(events.EventType = 'ERROR' AND events.source != 'js_exception') AS integrations FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS events WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1834,7 +1834,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) ch_sub_query_chart_response_end = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart_response_end.append("pages.event_type='LOCATION'") + ch_sub_query_chart_response_end.append("pages.EventType='LOCATION'") ch_sub_query_chart_response_end.append("isNotNull(pages.response_end)") ch_sub_query_chart_response_end.append("pages.response_end>0") meta_condition = __get_meta_constraint(args) @@ -1876,7 +1876,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args) - ch_sub_query_chart.append("errors.event_type='ERROR'") + ch_sub_query_chart.append("errors.EventType='ERROR'") ch_sub_query_chart.append("errors.source == 'js_exception'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -2012,9 +2012,9 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1) endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args) - ch_sub_query.append("requests.event_type='REQUEST'") + ch_sub_query.append("requests.EventType='REQUEST'") ch_sub_query.append("requests.success = 0") - sch_sub_query = ["rs.project_id =toUInt16(%(project_id)s)", "rs.event_type='REQUEST'"] + sch_sub_query = ["rs.project_id =toUInt16(%(project_id)s)", "rs.EventType='REQUEST'"] meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition # sch_sub_query += meta_condition @@ -2073,7 +2073,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("pages.load_event_end>0") @@ -2111,7 +2111,7 @@ def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.no ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") ch_sub_query_chart += meta_condition ch_sub_query_chart.append("pages.load_event_end>0") @@ -2308,7 +2308,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("pages.dom_content_loaded_event_end>0") @@ -2325,7 +2325,7 @@ def __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTim **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -2371,7 +2371,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("pages.first_contentful_paint_time>0") @@ -2390,7 +2390,7 @@ def __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTim **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -2438,7 +2438,7 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition @@ -2459,7 +2459,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition @@ -2553,11 +2553,11 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: @@ -2592,11 +2592,11 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: @@ -2631,12 +2631,12 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: @@ -2672,12 +2672,12 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no endTimestamp=TimeUTC.now(), value=None, density=19, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: @@ -2716,12 +2716,12 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: @@ -2760,12 +2760,12 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density) ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) - ch_sub_query_chart.append("pages.event_type='LOCATION'") + ch_sub_query_chart.append("pages.EventType='LOCATION'") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query = __get_basic_constraints(table_name="pages", data=args) - ch_sub_query.append("pages.event_type='LOCATION'") + ch_sub_query.append("pages.EventType='LOCATION'") ch_sub_query += meta_condition if value is not None: diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 5e26c5865..4aec1778d 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -493,7 +493,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr extra_constraints.append( sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -647,45 +647,45 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr **sh.multi_values(event.value, value_key=e_k), **sh.multi_values(event.source, value_key=s_k)} - if event_type == events.event_type.CLICK.ui_type: - event_from = event_from % f"{events.event_type.CLICK.table} AS main " + if event_type == events.EventType.CLICK.ui_type: + event_from = event_from % f"{events.EventType.CLICK.table} AS main " if not is_any: if event.operator == schemas.ClickEventExtraOperator._on_selector: event_where.append( sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) else: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.INPUT.ui_type: - event_from = event_from % f"{events.event_type.INPUT.table} AS main " + elif event_type == events.EventType.INPUT.ui_type: + event_from = event_from % f"{events.EventType.INPUT.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value, value_key=e_k)) if event.source is not None and len(event.source) > 0: event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, value_key=f"custom{i}")) full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")} - elif event_type == events.event_type.LOCATION.ui_type: - event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + elif event_type == events.EventType.LOCATION.ui_type: + event_from = event_from % f"{events.EventType.LOCATION.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.CUSTOM.ui_type: - event_from = event_from % f"{events.event_type.CUSTOM.table} AS main " + elif event_type == events.EventType.CUSTOM.ui_type: + event_from = event_from % f"{events.EventType.CUSTOM.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.REQUEST.ui_type: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + elif event_type == events.EventType.REQUEST.ui_type: + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, value_key=e_k)) # elif event_type == events.event_type.GRAPHQL.ui_type: # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " @@ -693,14 +693,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # event_where.append( # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, # value_key=e_k)) - elif event_type == events.event_type.STATEACTION.ui_type: - event_from = event_from % f"{events.event_type.STATEACTION.table} AS main " + elif event_type == events.EventType.STATEACTION.ui_type: + event_from = event_from % f"{events.EventType.STATEACTION.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.ERROR.ui_type: - event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" + elif event_type == events.EventType.ERROR.ui_type: + event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" event.source = list(set(event.source)) if not is_any and event.value not in [None, "*", ""]: event_where.append( @@ -711,52 +711,52 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # ----- IOS - elif event_type == events.event_type.CLICK_IOS.ui_type: - event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main " + elif event_type == events.EventType.CLICK_IOS.ui_type: + event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.INPUT_IOS.ui_type: - event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main " + elif event_type == events.EventType.INPUT_IOS.ui_type: + event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) if event.source is not None and len(event.source) > 0: event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, value_key="custom{i}")) full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")} - elif event_type == events.event_type.VIEW_IOS.ui_type: - event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main " + elif event_type == events.EventType.VIEW_IOS.ui_type: + event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main " + elif event_type == events.EventType.CUSTOM_IOS.ui_type: + event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.REQUEST_IOS.ui_type: - event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main " + elif event_type == events.EventType.REQUEST_IOS.ui_type: + event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s", event.value, value_key=e_k)) - elif event_type == events.event_type.ERROR_IOS.ui_type: - event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" + elif event_type == events.EventType.ERROR_IOS.ui_type: + event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" if not is_any and event.value not in [None, "*", ""]: event_where.append( sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", event.value, value_key=e_k)) elif event_type == schemas.PerformanceEventType.fetch_failed: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, value_key=e_k)) col = performance_event.get_col(event_type) colname = col["column"] @@ -781,7 +781,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr schemas.PerformanceEventType.location_avg_cpu_load, schemas.PerformanceEventType.location_avg_memory_usage ]: - event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + event_from = event_from % f"{events.EventType.LOCATION.table} AS main " col = performance_event.get_col(event_type) colname = col["column"] tname = "main" @@ -792,7 +792,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr f"{tname}.timestamp <= %(endDate)s"] if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", event.value, value_key=e_k)) e_k += "_custom" full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} @@ -801,7 +801,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) " + event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " if not isinstance(event.value[0].value, list): event.value[0].value = [event.value[0].value] if not isinstance(event.value[1].value, list): @@ -823,14 +823,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if not is_any: event_where.append( sh.multi_conditions( - f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s", + f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", event.value[0].value, value_key=e_k1)) s_op = sh.get_sql_operator(event.value[1].operator) is_any = sh.isAny_opreator(event.value[1].operator) if not is_any: event_where.append( sh.multi_conditions( - f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s", + f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", event.value[1].value, value_key=e_k2)) e_k += "_custom" @@ -840,7 +840,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr event.source, value_key=e_k)) elif event_type == schemas.EventType.request_details: - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + event_from = event_from % f"{events.EventType.REQUEST.table} AS main " apply = False for j, f in enumerate(event.filters): is_any = sh.isAny_opreator(f.operator) @@ -852,7 +852,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} if f.type == schemas.FetchFilterType._url: event_where.append( - sh.multi_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text", + sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) apply = True elif f.type == schemas.FetchFilterType._status_code: @@ -884,7 +884,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if not apply: continue elif event_type == schemas.EventType.graphql: - event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " + event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " for j, f in enumerate(event.filters): is_any = sh.isAny_opreator(f.operator) if is_any or len(f.value) == 0: @@ -895,7 +895,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} if f.type == schemas.GraphqlFilterType._name: event_where.append( - sh.multi_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value, + sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, value_key=e_k_f)) elif f.type == schemas.GraphqlFilterType._method: event_where.append( @@ -976,7 +976,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") if errors_only: - extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" + extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") extra_constraints.append("ser.project_id = %(project_id)s") # if error_status != schemas.ErrorStatus.all: diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 3e37f7eb2..3258b388a 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -345,7 +345,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d WHERE ev.datetime >= toDateTime(%(startDate)s / 1000) AND ev.datetime <= toDateTime(%(endDate)s / 1000) AND ev.project_id = %(project_id)s - AND ev.event_type = 'LOCATION'""" + AND ev.EventType = 'LOCATION'""" elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, operator=schemas.SearchEventOperator._is)) @@ -451,7 +451,7 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): event.filters is None or len(event.filters) == 0)) -def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType]): +def __get_event_type(EventType: Union[schemas.EventType, schemas.PerformanceEventType]): defs = { schemas.EventType.click: "CLICK", schemas.EventType.input: "INPUT", @@ -470,7 +470,7 @@ def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEve } if event_type not in defs: - raise Exception(f"unsupported event_type:{event_type}") + raise Exception(f"unsupported EventType:{event_type}") return defs.get(event_type) @@ -624,7 +624,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, ss_constraints.append( _multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -777,32 +777,32 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, **_multiple_values(event.value, value_key=e_k), **_multiple_values(event.source, value_key=s_k)} - if event_type == events.event_type.CLICK.ui_type: + if event_type == events.EventType.CLICK.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - _column = events.event_type.CLICK.column - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + _column = events.EventType.CLICK.column + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] - elif event_type == events.event_type.INPUT.ui_type: + elif event_type == events.EventType.INPUT.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - _column = events.event_type.INPUT.column - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + _column = events.EventType.INPUT.column + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, @@ -813,80 +813,80 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, value_key=f"custom{i}")) full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")} - elif event_type == events.event_type.LOCATION.ui_type: + elif event_type == events.EventType.LOCATION.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " _column = 'url_path' - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] - elif event_type == events.event_type.CUSTOM.ui_type: + elif event_type == events.EventType.CUSTOM.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - _column = events.event_type.CUSTOM.column - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + _column = events.EventType.CUSTOM.column + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] - elif event_type == events.event_type.REQUEST.ui_type: + elif event_type == events.EventType.REQUEST.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " _column = 'url_path' - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] - # elif event_type == events.event_type.GRAPHQL.ui_type: + # elif EventType == events.EventType.GRAPHQL.ui_type: # event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main" - # event_where.append(f"main.event_type='GRAPHQL'") + # event_where.append(f"main.EventType='GRAPHQL'") # events_conditions.append({"type": event_where[-1]}) # if not is_any: # event_where.append( - # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, + # _multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k})s", event.value, # value_key=e_k)) # events_conditions[-1]["condition"] = event_where[-1] - elif event_type == events.event_type.STATEACTION.ui_type: + elif event_type == events.EventType.STATEACTION.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - _column = events.event_type.STATEACTION.column - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + _column = events.EventType.STATEACTION.column + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] # TODO: isNot for ERROR - elif event_type == events.event_type.ERROR.ui_type: + elif event_type == events.EventType.ERROR.ui_type: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main" events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s" - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) event.source = tuple(event.source) events_conditions[-1]["condition"] = [] @@ -906,14 +906,14 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, elif event_type == schemas.PerformanceEventType.fetch_failed: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " _column = 'url_path' - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) events_conditions[-1]["condition"] = [] if not is_any: if is_not: event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value, value_key=e_k)) - events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"}) + events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"}) events_conditions_not[-1]["condition"] = event_where[-1] else: event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", @@ -925,13 +925,13 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) - # elif event_type == schemas.PerformanceEventType.fetch_duration: - # event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + # elif EventType == schemas.PerformanceEventType.fetch_duration: + # event_from = event_from % f"{events.EventType.REQUEST.table} AS main " # if not is_any: # event_where.append( # _multiple_conditions(f"main.url_path {op} %({e_k})s", # event.value, value_key=e_k)) - # col = performance_event.get_col(event_type) + # col = performance_event.get_col(EventType) # colname = col["column"] # tname = "main" # e_k += "_custom" @@ -944,7 +944,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, schemas.PerformanceEventType.location_largest_contentful_paint_time, schemas.PerformanceEventType.location_ttfb]: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) events_conditions[-1]["condition"] = [] col = performance_event.get_col(event_type) @@ -967,7 +967,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, elif event_type in [schemas.PerformanceEventType.location_avg_cpu_load, schemas.PerformanceEventType.location_avg_memory_usage]: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) events_conditions[-1]["condition"] = [] col = performance_event.get_col(event_type) @@ -989,10 +989,10 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, # TODO: no isNot for TimeBetweenEvents elif event_type == schemas.PerformanceEventType.time_between_events: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - # event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) " - event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'") + # event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " + event_where.append(f"main.EventType='{__get_event_type(event.value[0].type)}'") events_conditions.append({"type": event_where[-1]}) - event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'") + event_where.append(f"main.EventType='{__get_event_type(event.value[0].type)}'") events_conditions.append({"type": event_where[-1]}) if not isinstance(event.value[0].value, list): @@ -1016,7 +1016,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, if not is_any: event_where.append( _multiple_conditions( - f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s", + f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", event.value[0].value, value_key=e_k1)) events_conditions[-2]["condition"] = event_where[-1] s_op = __get_sql_operator(event.value[1].operator) @@ -1024,7 +1024,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, if not is_any: event_where.append( _multiple_conditions( - f"main.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s", + f"main.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", event.value[1].value, value_key=e_k2)) events_conditions[-1]["condition"] = event_where[-1] @@ -1040,7 +1040,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, # TODO: no isNot for RequestDetails elif event_type == schemas.EventType.request_details: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - event_where.append(f"main.event_type='{__get_event_type(event_type)}'") + event_where.append(f"main.EventType='{__get_event_type(event_type)}'") events_conditions.append({"type": event_where[-1]}) apply = False events_conditions[-1]["condition"] = [] @@ -1093,7 +1093,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, # TODO: no isNot for GraphQL elif event_type == schemas.EventType.graphql: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " - event_where.append(f"main.event_type='GRAPHQL'") + event_where.append(f"main.EventType='GRAPHQL'") events_conditions.append({"type": event_where[-1]}) events_conditions[-1]["condition"] = [] for j, f in enumerate(event.filters): @@ -1106,7 +1106,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)} if f.type == schemas.GraphqlFilterType._name: event_where.append( - _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value, + _multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) elif f.type == schemas.GraphqlFilterType._method: @@ -1287,7 +1287,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") # if errors_only: - # extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" + # extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" # extra_constraints.append("ser.source = 'js_exception'") # extra_constraints.append("ser.project_id = %(project_id)s") # if error_status != schemas.ErrorStatus.all: diff --git a/ee/api/chalicelib/core/sessions_metas.py b/ee/api/chalicelib/core/sessions_metas.py new file mode 100644 index 000000000..65d3eb6db --- /dev/null +++ b/ee/api/chalicelib/core/sessions_metas.py @@ -0,0 +1,76 @@ +import schemas +from chalicelib.utils.event_filter_definition import SupportedFilter +from decouple import config + +if config("EXP_AUTOCOMPLETE", cast=bool, default=False): + from . import autocomplete_exp as autocomplete +else: + from . import autocomplete as autocomplete + +SUPPORTED_TYPES = { + schemas.FilterType.user_os: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)), + schemas.FilterType.user_browser: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)), + schemas.FilterType.user_device: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)), + schemas.FilterType.user_country: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)), + schemas.FilterType.user_id: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)), + schemas.FilterType.user_anonymous_id: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)), + schemas.FilterType.rev_id: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)), + schemas.FilterType.referrer: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)), + schemas.FilterType.utm_campaign: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)), + schemas.FilterType.utm_medium: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)), + schemas.FilterType.utm_source: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)), + # IOS + schemas.FilterType.user_os_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios)), + schemas.FilterType.user_device_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas( + typename=schemas.FilterType.user_device_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_ios)), + schemas.FilterType.user_country_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios)), + schemas.FilterType.user_id_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios)), + schemas.FilterType.user_anonymous_id_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios)), + schemas.FilterType.rev_id_ios: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios)), + +} + + +def search(text: str, meta_type: schemas.FilterType, project_id: int): + rows = [] + if meta_type not in list(SUPPORTED_TYPES.keys()): + return {"errors": ["unsupported type"]} + rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) + # for IOS events autocomplete + # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): + # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) + return {"data": rows} diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index bcc47da4c..290afcfd1 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -92,11 +92,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values["maxDuration"] = f["value"][1] elif filter_type == schemas.FilterType.referrer: # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"] + filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"] # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: if meta_keys is None: meta_keys = metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} @@ -141,31 +141,31 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: extra_from = [] op = sessions.__get_sql_operator(s["operator"]) event_type = s["type"].upper() - if event_type == events.event_type.CLICK.ui_type: - next_table = events.event_type.CLICK.table - next_col_name = events.event_type.CLICK.column - elif event_type == events.event_type.INPUT.ui_type: - next_table = events.event_type.INPUT.table - next_col_name = events.event_type.INPUT.column - elif event_type == events.event_type.LOCATION.ui_type: - next_table = events.event_type.LOCATION.table - next_col_name = events.event_type.LOCATION.column - elif event_type == events.event_type.CUSTOM.ui_type: - next_table = events.event_type.CUSTOM.table - next_col_name = events.event_type.CUSTOM.column + if event_type == events.EventType.CLICK.ui_type: + next_table = events.EventType.CLICK.table + next_col_name = events.EventType.CLICK.column + elif event_type == events.EventType.INPUT.ui_type: + next_table = events.EventType.INPUT.table + next_col_name = events.EventType.INPUT.column + elif event_type == events.EventType.LOCATION.ui_type: + next_table = events.EventType.LOCATION.table + next_col_name = events.EventType.LOCATION.column + elif event_type == events.EventType.CUSTOM.ui_type: + next_table = events.EventType.CUSTOM.table + next_col_name = events.EventType.CUSTOM.column # IOS -------------- - elif event_type == events.event_type.CLICK_IOS.ui_type: - next_table = events.event_type.CLICK_IOS.table - next_col_name = events.event_type.CLICK_IOS.column - elif event_type == events.event_type.INPUT_IOS.ui_type: - next_table = events.event_type.INPUT_IOS.table - next_col_name = events.event_type.INPUT_IOS.column - elif event_type == events.event_type.VIEW_IOS.ui_type: - next_table = events.event_type.VIEW_IOS.table - next_col_name = events.event_type.VIEW_IOS.column - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - next_table = events.event_type.CUSTOM_IOS.table - next_col_name = events.event_type.CUSTOM_IOS.column + elif event_type == events.EventType.CLICK_IOS.ui_type: + next_table = events.EventType.CLICK_IOS.table + next_col_name = events.EventType.CLICK_IOS.column + elif event_type == events.EventType.INPUT_IOS.ui_type: + next_table = events.EventType.INPUT_IOS.table + next_col_name = events.EventType.INPUT_IOS.column + elif event_type == events.EventType.VIEW_IOS.ui_type: + next_table = events.EventType.VIEW_IOS.table + next_col_name = events.EventType.VIEW_IOS.column + elif event_type == events.EventType.CUSTOM_IOS.ui_type: + next_table = events.EventType.CUSTOM_IOS.table + next_col_name = events.EventType.CUSTOM_IOS.column else: print("=================UNDEFINED") continue diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index 895dbfbe3..47ed2086d 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -92,11 +92,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values["maxDuration"] = f["value"][1] elif filter_type == schemas.FilterType.referrer: # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"] + filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"] # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k)) - elif filter_type == events.event_type.METADATA.ui_type: + elif filter_type == events.EventType.METADATA.ui_type: if meta_keys is None: meta_keys = metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} @@ -141,31 +141,31 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: extra_from = [] op = sessions.__get_sql_operator(s["operator"]) event_type = s["type"].upper() - if event_type == events.event_type.CLICK.ui_type: - next_table = events.event_type.CLICK.table - next_col_name = events.event_type.CLICK.column - elif event_type == events.event_type.INPUT.ui_type: - next_table = events.event_type.INPUT.table - next_col_name = events.event_type.INPUT.column - elif event_type == events.event_type.LOCATION.ui_type: - next_table = events.event_type.LOCATION.table - next_col_name = events.event_type.LOCATION.column - elif event_type == events.event_type.CUSTOM.ui_type: - next_table = events.event_type.CUSTOM.table - next_col_name = events.event_type.CUSTOM.column + if event_type == events.EventType.CLICK.ui_type: + next_table = events.EventType.CLICK.table + next_col_name = events.EventType.CLICK.column + elif event_type == events.EventType.INPUT.ui_type: + next_table = events.EventType.INPUT.table + next_col_name = events.EventType.INPUT.column + elif event_type == events.EventType.LOCATION.ui_type: + next_table = events.EventType.LOCATION.table + next_col_name = events.EventType.LOCATION.column + elif event_type == events.EventType.CUSTOM.ui_type: + next_table = events.EventType.CUSTOM.table + next_col_name = events.EventType.CUSTOM.column # IOS -------------- - elif event_type == events.event_type.CLICK_IOS.ui_type: - next_table = events.event_type.CLICK_IOS.table - next_col_name = events.event_type.CLICK_IOS.column - elif event_type == events.event_type.INPUT_IOS.ui_type: - next_table = events.event_type.INPUT_IOS.table - next_col_name = events.event_type.INPUT_IOS.column - elif event_type == events.event_type.VIEW_IOS.ui_type: - next_table = events.event_type.VIEW_IOS.table - next_col_name = events.event_type.VIEW_IOS.column - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - next_table = events.event_type.CUSTOM_IOS.table - next_col_name = events.event_type.CUSTOM_IOS.column + elif event_type == events.EventType.CLICK_IOS.ui_type: + next_table = events.EventType.CLICK_IOS.table + next_col_name = events.EventType.CLICK_IOS.column + elif event_type == events.EventType.INPUT_IOS.ui_type: + next_table = events.EventType.INPUT_IOS.table + next_col_name = events.EventType.INPUT_IOS.column + elif event_type == events.EventType.VIEW_IOS.ui_type: + next_table = events.EventType.VIEW_IOS.table + next_col_name = events.EventType.VIEW_IOS.column + elif event_type == events.EventType.CUSTOM_IOS.ui_type: + next_table = events.EventType.CUSTOM_IOS.table + next_col_name = events.EventType.CUSTOM_IOS.column else: print("=================UNDEFINED") continue diff --git a/ee/api/chalicelib/utils/exp_ch_helper.py b/ee/api/chalicelib/utils/exp_ch_helper.py index 02de9addd..81bb123ea 100644 --- a/ee/api/chalicelib/utils/exp_ch_helper.py +++ b/ee/api/chalicelib/utils/exp_ch_helper.py @@ -8,19 +8,19 @@ if config("EXP_7D_MV", cast=bool, default=True): print(">>> Using experimental last 7 days materialized views") -def get_main_events_table(timestamp): +def get_main_events_table(timestamp=0): return "experimental.events_l7d_mv" \ if config("EXP_7D_MV", cast=bool, default=True) \ and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events" -def get_main_sessions_table(timestamp): +def get_main_sessions_table(timestamp=0): return "experimental.sessions_l7d_mv" \ if config("EXP_7D_MV", cast=bool, default=True) \ and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions" -def get_main_resources_table(timestamp): +def get_main_resources_table(timestamp=0): return "experimental.resources_l7d_mv" \ if config("EXP_7D_MV", cast=bool, default=True) \ and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources" @@ -43,6 +43,8 @@ def get_user_viewed_errors_table(timestamp=0): def get_main_js_errors_sessions_table(timestamp=0): - return "experimental.js_errors_sessions_mv" # \ + return get_main_events_table(timestamp=timestamp) + # enable this when js_errors_sessions_mv is fixed + # return "experimental.js_errors_sessions_mv" # \ # if config("EXP_7D_MV", cast=bool, default=True) \ # and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events" diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 49b803d0f..9d9bb3cec 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -36,7 +36,7 @@ rm -rf ./chalicelib/core/log_tool_sumologic.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions_assignments.py -rm -rf ./chalicelib/core/sessions_metas.py +#exp rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py #exp rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/socket_ios.py diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index a8feb6b53..90cb2905f 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,6 +1,4 @@ -from typing import Union - -from chalicelib.core import roles, traces, projects, sourcemaps, assist_records, sessions +from chalicelib.core import roles, traces, assist_records, sessions from chalicelib.core import unlock, signals from chalicelib.utils import assist_helper @@ -120,9 +118,9 @@ def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentCont @app.post('/{projectId}/signals', tags=['signals']) def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: return data - return {'data': data} \ No newline at end of file + return {'data': data} diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index e229653aa..c5ac9d6b7 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -212,7 +212,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMM(datetime) + PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, event_type, session_id, message_id) TTL datetime + INTERVAL 7 DAY POPULATE @@ -285,7 +285,7 @@ WHERE datetime >= now() - INTERVAL 7 DAY; CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) - PARTITION BY toYYYYMM(datetime) + PARTITION BY toYYYYMMDD(datetime) ORDER BY (project_id, datetime, type, session_id, message_id) TTL datetime + INTERVAL 7 DAY POPULATE