diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 0d467295f..9fe8c5611 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -1,8 +1,9 @@ # This action will push the chalice changes to aws on: + workflow_dispatch: push: branches: - - dev + - api-v1.5.5 paths: - api/** diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index 990ce3c8a..c0a540efb 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -3,7 +3,7 @@ on: workflow_dispatch: push: branches: - - dev + - api-v1.5.5 paths: - frontend/** diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index e4579826f..56fde11da 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -102,7 +102,7 @@ def Build(a): a["filter"]["order"] = "DESC" a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() - full_args, query_part, sort = sessions.search_query_parts( + full_args, query_part= sessions.search_query_parts( data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 70f563ec8..c6fb35713 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -64,14 +64,17 @@ def get_live_sessions(project_id, filters=None): return helper.list_to_camel_case(results) -def get_live_sessions_ws(project_id): +def get_live_sessions_ws(project_id, user_id=None): project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") - if connected_peers.status_code != 200: - print("!! issue with the peer-server") - print(connected_peers.text) - return [] + params = {} + if user_id and len(user_id) > 0: + params["userId"] = user_id try: + connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params) + if connected_peers.status_code != 200: + print("!! issue with the peer-server") + print(connected_peers.text) + return [] live_peers = connected_peers.json().get("data", []) except Exception as e: print("issue getting Live-Assist response") @@ -101,12 +104,12 @@ def get_live_session_by_id(project_id, session_id): def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}") - if connected_peers.status_code != 200: - print("!! issue with the peer-server") - print(connected_peers.text) - return False try: + connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}") + if connected_peers.status_code != 200: + print("!! issue with the peer-server") + print(connected_peers.text) + return False connected_peers = connected_peers.json().get("data", []) except Exception as e: print("issue getting Assist response") diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 4b6554c2b..a7f863e79 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -2,7 +2,7 @@ import json import schemas from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.metrics_helper import __get_step_size @@ -399,7 +399,10 @@ def get_details_chart(project_id, error_id, user_id, **data): def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", endTime_arg_name="endDate", chart=False, step_size_name="step_size", project_key="project_id"): - ch_sub_query = [f"{project_key} =%(project_id)s"] + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] if time_constraint: ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", f"timestamp < %({endTime_arg_name})s"] @@ -415,21 +418,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') -@dev.timed -def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} + platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: @@ -437,8 +437,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", "pe.project_id=%(project_id)s"] - pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True) - pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None @@ -446,13 +446,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status) + error_status=data.status) if len(statuses) == 0: return empty_response - error_ids = [e["error_id"] for e in statuses] + error_ids = [e["errorId"] for e in statuses] with pg_client.PostgresClient() as cur: if data.startDate is None: data.startDate = TimeUTC.now(-7) @@ -473,6 +474,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status if data.limit is not None and data.page is not None: params["errors_offset"] = (data.page - 1) * data.limit params["errors_limit"] = data.limit @@ -483,11 +487,15 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s if error_ids is not None: params["error_ids"] = tuple(error_ids) pg_sub_query.append("error_id IN %(error_ids)s") - if favorite_only: + if data.bookmarked: pg_sub_query.append("ufe.user_id = %(userId)s") extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - main_pg_query = f"""\ - SELECT full_count, + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, error_id, name, message, @@ -522,7 +530,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s COUNT(session_id) AS count FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id) + FROM events.errors WHERE {" AND ".join(pg_sub_query_chart)} ) AS sessions ON (TRUE) GROUP BY timestamp @@ -557,16 +565,16 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), "user_id": user_id}) cur.execute(query=query) - statuses = cur.fetchall() + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: r.pop("full_count") if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 7abaa4fe9..933e3f800 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -97,7 +97,55 @@ def __get_data_for_extend(data): return data["data"] -def __pg_errors_query(source=None): +def __pg_errors_query(source=None, value_length=None): + if value_length is None or value_length > 2: + return f"""((SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION + (SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" return f"""((SELECT DISTINCT ON(lg.message) lg.message AS value, source, @@ -120,30 +168,6 @@ def __pg_errors_query(source=None): AND lg.name ILIKE %(svalue)s AND lg.project_id = %(project_id)s {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION - (SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} LIMIT 5));""" @@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value), - "source": source})) + cur.mogrify(__pg_errors_query(source, + value_length=len(value) \ + if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "source": source})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None): def __search_pg_errors_ios(project_id, value, key=None, source=None): now = TimeUTC.now() + if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(value)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + LIMIT 5);""" + else: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5);""" with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.reason ILIKE %(value)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - LIMIT 5);""", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None): for k in meta_keys.keys(): colname = metadata.index_to_colname(meta_keys[k]) - sub_from.append( - f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)") + if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2: + sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5) + UNION + (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(value)s LIMIT 5)) + """) + else: + sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5)""") with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) return results -def __generic_query(typename): - return f"""\ - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(svalue)s - LIMIT 5) - UNION - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(value)s - LIMIT 5)""" +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f"""(SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 5) + UNION + (SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(value)s + LIMIT 5);""" + return f"""SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 10;""" def __generic_autocomplete(event: Event): def f(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(__generic_query(event.ui_type), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + cur.execute( + cur.mogrify( + __generic_query(event.ui_type, + value_length=len(value) \ + if SUPPORTED_TYPES[event.ui_type].change_by_length \ + else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) return helper.list_to_camel_case(cur.fetchall()) return f @@ -263,142 +360,96 @@ class event_type: SUPPORTED_TYPES = { event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK), query=__generic_query(typename=event_type.CLICK.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT), query=__generic_query(typename=event_type.INPUT.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION), query=__generic_query(typename=event_type.LOCATION.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM), query=__generic_query(typename=event_type.CUSTOM.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST), query=__generic_query(typename=event_type.REQUEST.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL), query=__generic_query(typename=event_type.GRAPHQL.ui_type), - value_limit=3, - starts_with="/", - starts_limit=4, - ignore_if_starts_with=[]), + change_by_length=True), event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION), query=__generic_query(typename=event_type.STATEACTION.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata, - query=None, - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), # IOS event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS), query=__generic_query(typename=event_type.CLICK_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS), query=__generic_query(typename=event_type.INPUT_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS), query=__generic_query(typename=event_type.VIEW_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS), query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS), query=__generic_query(typename=event_type.REQUEST_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), - event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + change_by_length=True), + event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios, + query=None, change_by_length=True), } -def __get_merged_queries(queries, value, project_id): - if len(queries) == 0: - return [] - now = TimeUTC.now() - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}") - return results - - def __get_autocomplete_table(value, project_id): + autocomplete_events = [schemas.FilterType.rev_id, + schemas.EventType.click, + schemas.FilterType.user_device, + schemas.FilterType.user_id, + schemas.FilterType.user_browser, + schemas.FilterType.user_os, + schemas.EventType.custom, + schemas.FilterType.user_country, + schemas.EventType.location, + schemas.EventType.input] + autocomplete_events.sort() + sub_queries = [] + for e in autocomplete_events: + sub_queries.append(f"""(SELECT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(svalue)s + LIMIT 5)""") + if len(value) > 2: + sub_queries.append(f"""(SELECT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(value)s + LIMIT 5)""") with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT project_id, type, value - FROM (SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(svalue)s - UNION - SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(value)s) AS u - WHERE Row_ID <= 5) AS sfa - ORDER BY sfa.type;""", - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + query = cur.mogrify(" UNION ".join(sub_queries) + ";", + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)}) + cur.execute(query) results = helper.list_to_camel_case(cur.fetchall()) return results -def search_pg2(text, event_type, project_id, source, key): +def search(text, event_type, project_id, source, key): if not event_type: return {"data": __get_autocomplete_table(text, project_id)} if event_type in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) - if event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - source=source) + # for IOS events autocomplete + # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): + # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, + # source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index e63ea6efc..1dc9e3347 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -201,7 +201,7 @@ def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.Funnel data.events = filter_stages(data.events) data.events = __fix_stages(data.events) if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) + f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: return {"errors": ["funnel not found"]} get_start_end_time(filter_d=f["filter"], range_value=data.range_value, @@ -217,16 +217,21 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat return {"errors": ["funnel not found"]} get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id) + insights = helper.list_to_camel_case(insights) if len(insights) > 0: + # fix: this fix for huge drop count + if total_drop_due_to_issues > insights[0]["sessionsCount"]: + total_drop_due_to_issues = insights[0]["sessionsCount"] + # end fix insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": helper.list_to_camel_case(insights), + return {"data": {"stages": insights, "totalDropDueToIssues": total_drop_due_to_issues}} def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema): data.events = filter_stages(__parse_events(data.events)) if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) + f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: return {"errors": ["funnel not found"]} get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, @@ -235,9 +240,14 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"]) data.events = __fix_stages(data.events) insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) + insights = helper.list_to_camel_case(insights) if len(insights) > 0: + # fix: this fix for huge drop count + if total_drop_due_to_issues > insights[0]["sessionsCount"]: + total_drop_due_to_issues = insights[0]["sessionsCount"] + # end fix insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": helper.list_to_camel_case(insights), + return {"data": {"stages": insights, "totalDropDueToIssues": total_drop_due_to_issues}} @@ -256,7 +266,7 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe data.events = filter_stages(data.events) data.events = __fix_stages(data.events) if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) + f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: return {"errors": ["funnel not found"]} get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, @@ -307,7 +317,7 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None, start_date=None, end_date=None): if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) + f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: return {"errors": ["funnel not found"]} data.startDate = data.startDate if data.startDate is not None else start_date diff --git a/api/chalicelib/core/resources.py b/api/chalicelib/core/resources.py index 6b9ba9170..6a7e395f8 100644 --- a/api/chalicelib/core/resources.py +++ b/api/chalicelib/core/resources.py @@ -13,7 +13,8 @@ def get_by_session_id(session_id): header_size, encoded_body_size, decoded_body_size, - success + success, + COALESCE(status, CASE WHEN success THEN 200 END) AS status FROM events.resources WHERE session_id = %(session_id)s;""" params = {"session_id": session_id} diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index aafa00570..1903cc08b 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -169,10 +169,10 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status="ALL", count_only=False, issue=None): - full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, - favorite_only=data.bookmarked, issue=issue, project_id=project_id, - user_id=user_id) + error_status=schemas.ErrorStatus.all, count_only=False, issue=None): + full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, + favorite_only=data.bookmarked, issue=issue, project_id=project_id, + user_id=user_id) if data.limit is not None and data.page is not None: full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit @@ -199,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e COUNT(DISTINCT s.user_uuid) AS count_users {query_part};""", full_args) elif data.group_by_user: + g_sort = "count(full_sessions)" + if data.order is None: + data.order = "DESC" + else: + data.order = data.order.upper() + if data.sort is not None and data.sort != 'sessionsCount': + sort = helper.key_to_snake_case(data.sort) + g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" + else: + sort = 'start_ts' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) @@ -207,52 +218,58 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions - GROUP BY user_id - ORDER BY user_sessions_count DESC) AS users_sessions;""", + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ) AS filtred_sessions + ) AS full_sessions + GROUP BY user_id + ) AS users_sessions;""", full_args) else: + if data.order is None: + data.order = "DESC" + sort = 'session_id' + if data.sort is not None and data.sort != "session_id": + sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) + else: + sort = 'session_id' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""", + ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""", full_args) - # print("--------------------") # print(main_query) # print("--------------------") + try: + cur.execute(main_query) + except Exception as err: + print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") + print(main_query) + print("--------- PAYLOAD -----------") + print(data.dict()) + print("--------------------") + raise err + if errors_only: + return helper.list_to_camel_case(cur.fetchall()) - cur.execute(main_query) sessions = cur.fetchone() if count_only: return helper.dict_to_camel_case(sessions) total = sessions["count"] sessions = sessions["sessions"] - # sessions = [] - # total = cur.rowcount - # row = cur.fetchone() - # limit = 200 - # while row is not None and len(sessions) < limit: - # if row.get("favorite"): - # limit += 1 - # sessions.append(row) - # row = cur.fetchone() - if errors_only: - return sessions if data.group_by_user: for i, s in enumerate(sessions): sessions[i] = {**s.pop("last_session")[0], **s} @@ -283,9 +300,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0: data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, operator=schemas.SearchEventOperator._is)) - full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) + full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, + favorite_only=False, issue=None, project_id=project_id, + user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] with pg_client.PostgresClient() as cur: @@ -368,6 +385,19 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d return sessions +def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): + return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, + schemas.EventType.graphql_details] \ + or event.type in [schemas.PerformanceEventType.location_dom_complete, + schemas.PerformanceEventType.location_largest_contentful_paint_time, + schemas.PerformanceEventType.location_ttfb, + schemas.PerformanceEventType.location_avg_cpu_load, + schemas.PerformanceEventType.location_avg_memory_usage + ] and (event.source is None or len(event.source) == 0) \ + or event.type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and ( + event.filters is None or len(event.filters) == 0)) + + def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None): ss_constraints = [] full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, @@ -377,10 +407,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr "s.duration IS NOT NULL" ] extra_from = "" - fav_only_join = "" - if favorite_only and not errors_only: - fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id" - # extra_constraints.append("fs.user_id = %(userId)s") events_query_part = "" if len(data.filters) > 0: meta_keys = None @@ -587,6 +613,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr value_key=f_k)) # --------------------------------------------------------------------------- if len(data.events) > 0: + valid_events_count = 0 + for event in data.events: + is_any = _isAny_opreator(event.operator) + if not isinstance(event.value, list): + event.value = [event.value] + if __is_valid_event(is_any=is_any, event=event): + valid_events_count += 1 events_query_from = [] event_index = 0 or_events = data.events_order == schemas.SearchEventOrder._or @@ -597,16 +630,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr is_any = _isAny_opreator(event.operator) if not isinstance(event.value, list): event.value = [event.value] - if not is_any and len(event.value) == 0 and event_type not in [schemas.EventType.request_details, - schemas.EventType.graphql_details] \ - or event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ] and (event.source is None or len(event.source) == 0) \ - or event_type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and ( - event.filters is None or len(event.filters) == 0): + if not __is_valid_event(is_any=is_any, event=event): continue op = __get_sql_operator(event.operator) is_not = False @@ -618,6 +642,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", "ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"] + if favorite_only and not errors_only: + event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)" + event_where.append("fs.user_id = %(userId)s") else: event_from = "%s" event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", @@ -922,7 +949,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr """) else: events_query_from.append(f"""\ - (SELECT main.session_id, MIN(main.timestamp) AS timestamp + (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp FROM {event_from} WHERE {" AND ".join(event_where)} GROUP BY 1 @@ -936,16 +963,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr MIN(timestamp) AS first_event_ts, MAX(timestamp) AS last_event_ts FROM ({events_joiner.join(events_query_from)}) AS u - GROUP BY 1 - {fav_only_join}""" + GROUP BY 1""" else: events_query_part = f"""SELECT event_0.session_id, MIN(event_0.timestamp) AS first_event_ts, MAX(event_{event_index - 1}.timestamp) AS last_event_ts FROM {events_joiner.join(events_query_from)} - GROUP BY 1 - {fav_only_join}""" + GROUP BY 1""" else: data.events = [] # --------------------------------------------------------------------------- @@ -959,19 +984,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # elif data.platform == schemas.PlatformType.desktop: # extra_constraints.append( # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - if data.order is None: - data.order = "DESC" - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - else: - sort = 'session_id' + if errors_only: extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") - if error_status != "ALL": + extra_constraints.append("ser.project_id = %(project_id)s") + if error_status != schemas.ErrorStatus.all: extra_constraints.append("ser.status = %(error_status)s") - full_args["status"] = error_status.lower() + full_args["error_status"] = error_status if favorite_only: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" extra_constraints.append("ufe.user_id = %(userId)s") @@ -1009,7 +1029,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr {extra_from} WHERE {" AND ".join(extra_constraints)}""" - return full_args, query_part, sort + return full_args, query_part def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): @@ -1108,48 +1128,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date): return helper.list_to_camel_case(rows) -def get_favorite_sessions(project_id, user_id, include_viewed=False): - with pg_client.PostgresClient() as cur: - query_part = cur.mogrify(f"""\ - FROM public.sessions AS s - LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id - WHERE fs.user_id = %(userId)s""", - {"projectId": project_id, "userId": user_id} - ) - - extra_query = b"" - if include_viewed: - extra_query = cur.mogrify(""",\ - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""", - {"projectId": project_id, "userId": user_id}) - - cur.execute(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - TRUE AS favorite - {extra_query.decode('UTF-8')} - {query_part.decode('UTF-8')} - ORDER BY s.session_id - LIMIT 50;""") - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - def get_user_sessions(project_id, user_id, start_date, end_date): with pg_client.PostgresClient() as cur: constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index 1d342d03f..07aad2ee4 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -80,32 +80,41 @@ def get_top_key_values(project_id): return helper.dict_to_CAPITAL_keys(row) -def __generic_query(typename): - return f"""\ - SELECT value, type - FROM ((SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type ='{typename}' - AND value ILIKE %(svalue)s - ORDER BY value - LIMIT 5) +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f""" (SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 5) UNION - (SELECT value, type + (SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s AND type ='{typename}' AND value ILIKE %(value)s ORDER BY value - LIMIT 5)) AS met""" + LIMIT 5);""" + return f""" SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 10;""" def __generic_autocomplete(typename): def f(project_id, text): with pg_client.PostgresClient() as cur: - query = cur.mogrify(__generic_query(typename), + query = cur.mogrify(__generic_query(typename, + value_length=len(text) \ + if SUPPORTED_TYPES[typename].change_by_length else None), {"project_id": project_id, "value": helper.string_to_sql_like(text), "svalue": helper.string_to_sql_like("^" + text)}) @@ -120,124 +129,73 @@ SUPPORTED_TYPES = { schemas.FilterType.user_os: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os), query=__generic_query(typename=schemas.FilterType.user_os), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_browser: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_browser), query=__generic_query(typename=schemas.FilterType.user_browser), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_device), query=__generic_query(typename=schemas.FilterType.user_device), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country), query=__generic_query(typename=schemas.FilterType.user_country), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id), query=__generic_query(typename=schemas.FilterType.user_id), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id), query=__generic_query(typename=schemas.FilterType.user_anonymous_id), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id), query=__generic_query(typename=schemas.FilterType.rev_id), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.referrer: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.referrer), query=__generic_query(typename=schemas.FilterType.referrer), - value_limit=5, - starts_with="/", - starts_limit=5, - ignore_if_starts_with=[]), + change_by_length=True), schemas.FilterType.utm_campaign: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign), query=__generic_query(typename=schemas.FilterType.utm_campaign), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_medium: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_medium), query=__generic_query(typename=schemas.FilterType.utm_medium), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_source: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_source), query=__generic_query(typename=schemas.FilterType.utm_source), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), # IOS schemas.FilterType.user_os_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios), query=__generic_query(typename=schemas.FilterType.user_os_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device_ios: SupportedFilter( get=__generic_autocomplete( typename=schemas.FilterType.user_device_ios), query=__generic_query(typename=schemas.FilterType.user_device_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios), query=__generic_query(typename=schemas.FilterType.user_country_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios), query=__generic_query(typename=schemas.FilterType.user_id_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios), query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios), query=__generic_query(typename=schemas.FilterType.rev_id_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), } @@ -247,6 +205,7 @@ def search(text, meta_type, project_id): if meta_type not in list(SUPPORTED_TYPES.keys()): return {"errors": ["unsupported type"]} rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) - if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): - rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) + # for IOS events autocomplete + # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): + # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) return {"data": rows} diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 035890e2f..2e698dcfd 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -528,7 +528,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) split = issue.split('__^__') issues_dict['significant' if is_sign else 'insignificant'].append({ "type": split[0], - "title": get_issue_title(split[0]), + "title": helper.get_issue_title(split[0]), "affected_sessions": affected_sessions[issue], "unaffected_sessions": session_counts[1] - affected_sessions[issue], "lost_conversions": lost_conversions, @@ -641,27 +641,3 @@ def get_overview(filter_d, project_id, first_stage=None, last_stage=None): output['stages'] = stages_list output['criticalIssuesCount'] = n_critical_issues return output - - -def get_issue_title(issue_type): - return {'click_rage': "Click Rage", - 'dead_click': "Dead Click", - 'excessive_scrolling': "Excessive Scrolling", - 'bad_request': "Bad Request", - 'missing_resource': "Missing Image", - 'memory': "High Memory Usage", - 'cpu': "High CPU", - 'slow_resource': "Slow Resource", - 'slow_page_load': "Slow Page Performance", - 'crash': "Crash", - 'ml_cpu': "High CPU", - 'ml_memory': "High Memory Usage", - 'ml_dead_click': "Dead Click", - 'ml_click_rage': "Click Rage", - 'ml_mouse_thrashing': "Mouse Thrashing", - 'ml_excessive_scrolling': "Excessive Scrolling", - 'ml_slow_resources': "Slow Resource", - 'custom': "Custom Event", - 'js_exception': "Error", - 'custom_event_error': "Custom Error", - 'js_error': "Error"}.get(issue_type, issue_type) diff --git a/api/chalicelib/utils/event_filter_definition.py b/api/chalicelib/utils/event_filter_definition.py index 4c132cb13..b21d49b9c 100644 --- a/api/chalicelib/utils/event_filter_definition.py +++ b/api/chalicelib/utils/event_filter_definition.py @@ -6,10 +6,7 @@ class Event: class SupportedFilter: - def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with): + def __init__(self, get, query, change_by_length): self.get = get self.query = query - self.valueLimit = value_limit - self.startsWith = starts_with - self.startsLimit = starts_limit - self.ignoreIfStartsWith = ignore_if_starts_with + self.change_by_length = change_by_length diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 8e1f5788c..8cfab8a3f 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -213,11 +213,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator if value is None: return value if op == schemas.SearchEventOperator._starts_with: - return value + '%' + return f"{value}%" elif op == schemas.SearchEventOperator._ends_with: - return '%' + value + return f"%{value}" elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains: - return '%' + value + '%' + return f"%{value}%" return value diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index c598d8971..6e4118689 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -5,11 +5,12 @@ import psycopg2.extras from decouple import config from psycopg2 import pool -PG_CONFIG = {"host": config("pg_host"), - "database": config("pg_dbname"), - "user": config("pg_user"), - "password": config("pg_password"), - "port": config("pg_port", cast=int)} +_PG_CONFIG = {"host": config("pg_host"), + "database": config("pg_dbname"), + "user": config("pg_user"), + "password": config("pg_password"), + "port": config("pg_port", cast=int)} +PG_CONFIG = dict(_PG_CONFIG) if config("pg_timeout", cast=int, default=0) > 0: PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}" @@ -63,7 +64,7 @@ class PostgresClient: def __init__(self, long_query=False): self.long_query = long_query if long_query: - self.connection = psycopg2.connect(**PG_CONFIG) + self.connection = psycopg2.connect(**_PG_CONFIG) else: self.connection = postgreSQL_pool.getconn() diff --git a/api/routers/core.py b/api/routers/core.py index 999222c3e..97a749429 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -21,13 +21,6 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() -@app.get('/{projectId}/sessions2/favorite', tags=["sessions"]) -def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return { - 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True) - } - - @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): @@ -126,7 +119,7 @@ def events_search(projectId: int, q: str, else: return {"data": []} - result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key) + result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key) return result @@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext = return {'data': sessions_metas.get_top_key_values(projectId)} -@app.get('/{projectId}/sessions/filters/search', tags=["sessions"]) -def get_session_filters_meta(projectId: int, q: str, type: str, - context: schemas.CurrentContext = Depends(OR_context)): - meta_type = type - if len(meta_type) == 0: - return {"data": []} - if len(q) == 0: - return {"data": []} - return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q) - - @app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) @app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str, @@ -838,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) - return {'data': data} - - -@app.post('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) +def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions_ws(projectId, user_id=userId) return {'data': data} @@ -902,12 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), @app.post('/{projectId}/errors/search', tags=['errors']) -def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False, - data: schemas.SearchErrorsSchema = Body(...), +def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(favorite, str): - favorite = True if len(favorite) == 0 else False - return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite) + return errors.search(data, projectId, user_id=context.user_id) @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/schemas.py b/api/schemas.py index 767a53866..77cb78c05 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -11,6 +11,10 @@ def attribute_to_camel_case(snake_str): return components[0] + ''.join(x.title() for x in components[1:]) +def transform_email(email: str) -> str: + return email.lower() if isinstance(email, str) else email + + class _Grecaptcha(BaseModel): g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response') @@ -18,6 +22,7 @@ class _Grecaptcha(BaseModel): class UserLoginSchema(_Grecaptcha): email: EmailStr = Field(...) password: str = Field(...) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) class UserSignupSchema(UserLoginSchema): @@ -31,17 +36,21 @@ class UserSignupSchema(UserLoginSchema): class EditUserSchema(BaseModel): name: Optional[str] = Field(None) - email: Optional[str] = Field(None) + email: Optional[EmailStr] = Field(None) admin: Optional[bool] = Field(False) appearance: Optional[dict] = Field({}) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) + class EditUserAppearanceSchema(BaseModel): appearance: dict = Field(...) class ForgetPasswordPayloadSchema(_Grecaptcha): - email: str = Field(...) + email: EmailStr = Field(...) + + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) class EditUserPasswordSchema(BaseModel): @@ -70,7 +79,9 @@ class CurrentAPIContext(BaseModel): class CurrentContext(CurrentAPIContext): user_id: int = Field(...) - email: str = Field(...) + email: EmailStr = Field(...) + + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) class AddSlackSchema(BaseModel): @@ -115,15 +126,19 @@ class CreateEditWebhookSchema(BaseModel): class CreateMemberSchema(BaseModel): userId: Optional[int] = Field(None) name: str = Field(...) - email: str = Field(...) + email: EmailStr = Field(...) admin: bool = Field(False) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) + class EditMemberSchema(BaseModel): name: str = Field(...) - email: str = Field(...) + email: EmailStr = Field(...) admin: bool = Field(False) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) + class EditPasswordByInvitationSchema(BaseModel): invitation: str = Field(...) @@ -244,6 +259,8 @@ class EmailPayloadSchema(BaseModel): link: str = Field(...) message: str = Field(...) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) + class MemberInvitationPayloadSchema(BaseModel): auth: str = Field(...) @@ -252,6 +269,8 @@ class MemberInvitationPayloadSchema(BaseModel): client_id: str = Field(...) sender_name: str = Field(...) + _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) + class Config: alias_generator = attribute_to_camel_case @@ -600,7 +619,7 @@ class SessionsSearchPayloadSchema(BaseModel): startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: str = Field(default="DESC") + order: Literal["asc", "desc"] = Field(default="desc") events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) limit: int = Field(default=200, gt=0, le=200) @@ -690,8 +709,24 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): rangeValue: Optional[str] = Field(None) +class ErrorStatus(str, Enum): + all = 'all' + unresolved = 'unresolved' + resolved = 'resolved' + ignored = 'ignored' + + +class ErrorSort(str, Enum): + occurrence = 'occurrence' + users_count = 'users' + sessions_count = 'sessions' + + class SearchErrorsSchema(SessionsSearchPayloadSchema): + sort: ErrorSort = Field(default=ErrorSort.occurrence) density: Optional[int] = Field(7) + status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) + query: Optional[str] = Field(default=None) class MetricPayloadSchema(BaseModel): diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go index b087878b9..1fe717531 100644 --- a/backend/pkg/url/assets/url.go +++ b/backend/pkg/url/assets/url.go @@ -9,16 +9,17 @@ import ( func getSessionKey(sessionID uint64) string { // Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID - return strconv.FormatUint(sessionID>>50, 10) + return strconv.FormatUint(sessionID>>50, 10) } func ResolveURL(baseurl string, rawurl string) string { + rawurl = strings.Trim(rawurl, " ") if !isRelativeCachable(rawurl) { return rawurl } base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls - u, _ := url.Parse(rawurl) // TODO: handle errors ? - if base == nil || u == nil { + u, _ := url.Parse(rawurl) // TODO: handle errors ? + if base == nil || u == nil { return rawurl } return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl) @@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string { } func GetCachePathForAssets(sessionID uint64, rawurl string) string { - return getCachePathWithKey(sessionID, rawurl) + return getCachePathWithKey(sessionID, rawurl) } - func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string { fullURL, cachable := GetFullCachableURL(baseURL, relativeURL) if !cachable { return fullURL } - u := url.URL{ - Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), - Host: r.assetsURL.Host, - Scheme: r.assetsURL.Scheme, + u := url.URL{ + Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), + Host: r.assetsURL.Host, + Scheme: r.assetsURL.Scheme, } return u.String() } - diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index f70ac873e..8531d89a3 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -3,7 +3,7 @@ import json import schemas from chalicelib.core import dashboard from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import ch_client +from chalicelib.utils import ch_client, metrics_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -424,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if time_constraint: ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"datetime < toDateTime(%({endTime_arg_name})s/1000)"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -438,20 +438,213 @@ def __get_step_size(startTimestamp, endTimestamp, density): def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') -def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate", + endTime_arg_name="endDate", chart=False, step_size_name="step_size", + project_key="project_id"): + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] + if time_constraint: + ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", + f"timestamp < %({endTime_arg_name})s"] + if chart: + ch_sub_query += [f"timestamp >= generated_timestamp", + f"timestamp < generated_timestamp + %({step_size_name})s"] + if platform == schemas.PlatformType.mobile: + ch_sub_query.append("user_device_type = 'mobile'") + elif platform == schemas.PlatformType.desktop: + ch_sub_query.append("user_device_type = 'desktop'") + return ch_sub_query + + +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} + + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id") + pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", + "pe.project_id=%(project_id)s"] + pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart.append("errors.error_id =details.error_id") + statuses = [] + error_ids = None + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error + statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, + error_status=data.status) + if len(statuses) == 0: + return empty_response + error_ids = [e["errorId"] for e in statuses] + with pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1) + sort = __get_sort_key('datetime') + if data.sort is not None: + sort = __get_sort_key(data.sort) + order = "DESC" + if data.order is not None: + order = data.order + extra_join = "" + + params = { + "startDate": data.startDate, + "endDate": data.endDate, + "project_id": project_id, + "userId": user_id, + "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + + if error_ids is not None: + params["error_ids"] = tuple(error_ids) + pg_sub_query.append("error_id IN %(error_ids)s") + if data.bookmarked: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, + error_id, + name, + message, + users, + sessions, + last_occurrence, + first_occurrence, + chart + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details + INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, + MIN(timestamp) AS first_occurrence + FROM events.errors + WHERE errors.error_id = details.error_id) AS time_details ON (TRUE) + INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);""" + + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + + cur.execute(cur.mogrify(main_pg_query, params)) + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] + if flows: + return {"data": {"count": total}} + + if total == 0: + rows = [] + else: + if len(statuses) == 0: + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "user_id": user_id}) + cur.execute(query=query) + statuses = helper.list_to_camel_case(cur.fetchall()) + statuses = { + s["errorId"]: s for s in statuses + } + + for r in rows: + r.pop("full_count") + if r["error_id"] in statuses: + r["status"] = statuses[r["error_id"]]["status"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] + r["favorite"] = statuses[r["error_id"]]["favorite"] + r["viewed"] = statuses[r["error_id"]]["viewed"] + r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] + else: + r["status"] = "untracked" + r["parent_error_id"] = None + r["favorite"] = False + r["viewed"] = False + r["stack"] = None + + offset = len(rows) + rows = [r for r in rows if r["stack"] is None + or (len(r["stack"]) == 0 or len(r["stack"]) > 1 + or len(r["stack"]) > 0 + and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] + offset -= len(rows) + return { + "data": { + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) + } + } + + +# refactor this function after clickhouse structure changes (missing search by query) +def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: @@ -460,17 +653,19 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s ch_sub_query.append("source ='js_exception'") statuses = [] error_ids = None - if data.startDate is None: + # Clickhouse keeps data for the past month only, so no need to search beyond that + if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31): data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all: + print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status, favorite_only=favorite_only) + error_status=data.status) if len(statuses) == 0: return empty_response - error_ids = [e["error_id"] for e in statuses] + error_ids = [e["errorId"] for e in statuses] with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur: if data.startDate is None: data.startDate = TimeUTC.now(-7) @@ -495,7 +690,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s else: params["errors_offset"] = 0 params["errors_limit"] = 200 - if favorite_only: + if data.bookmarked: cur.execute(cur.mogrify(f"""SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s @@ -571,15 +766,15 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), "userId": user_id}) cur.execute(query=query) - statuses = cur.fetchall() + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py index 557135804..332d3709a 100644 --- a/ee/api/chalicelib/core/resources.py +++ b/ee/api/chalicelib/core/resources.py @@ -7,7 +7,7 @@ def get_by_session_id(session_id): with ch_client.ClickHouseClient() as ch: ch_query = """\ SELECT - datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success + datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status FROM resources WHERE session_id = toUInt64(%(session_id)s);""" params = {"session_id": session_id} diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index cf5e7378f..52fc737e4 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -41,6 +41,11 @@ def login(data: schemas.UserLoginSchema = Body(...)): status_code=status.HTTP_401_UNAUTHORIZED, detail="You’ve entered invalid Email or Password." ) + if "errors" in r: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=r["errors"][0] + ) tenant_id = r.pop("tenantId") diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go index 1483c2ccf..ca0544923 100644 --- a/ee/backend/pkg/kafka/consumer.go +++ b/ee/backend/pkg/kafka/consumer.go @@ -9,9 +9,9 @@ import ( "github.com/pkg/errors" + "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka" "openreplay/backend/pkg/env" "openreplay/backend/pkg/queue/types" - "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka" ) type Message = kafka.Message @@ -19,7 +19,7 @@ type Message = kafka.Message type Consumer struct { c *kafka.Consumer messageHandler types.MessageHandler - commitTicker *time.Ticker + commitTicker *time.Ticker pollTimeout uint lastKafkaEventTs int64 @@ -56,7 +56,7 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand return &Consumer{ c: c, messageHandler: messageHandler, - commitTicker: time.NewTicker(2 * time.Minute), + commitTicker: time.NewTicker(2 * time.Minute), pollTimeout: 200, } } @@ -65,13 +65,12 @@ func (consumer *Consumer) DisableAutoCommit() { consumer.commitTicker.Stop() } - func (consumer *Consumer) Commit() error { consumer.c.Commit() // TODO: return error if it is not "No offset stored" return nil } -func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error { +func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error { assigned, err := consumer.c.Assignment() if err != nil { return err @@ -84,37 +83,38 @@ func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error { timestamps = append(timestamps, p) } offsets, err := consumer.c.OffsetsForTimes(timestamps, 2000) - if err != nil { + if err != nil { return errors.Wrap(err, "Kafka Consumer back commit error") } // Limiting to already committed committed, err := consumer.c.Committed(assigned, 2000) // memorise? - logPartitions("Actually committed:",committed) + logPartitions("Actually committed:", committed) if err != nil { return errors.Wrap(err, "Kafka Consumer retrieving committed error") } for _, offs := range offsets { for _, comm := range committed { - if comm.Offset == kafka.OffsetStored || + if comm.Offset == kafka.OffsetStored || comm.Offset == kafka.OffsetInvalid || - comm.Offset == kafka.OffsetBeginning || - comm.Offset == kafka.OffsetEnd { continue } - if comm.Partition == offs.Partition && + comm.Offset == kafka.OffsetBeginning || + comm.Offset == kafka.OffsetEnd { + continue + } + if comm.Partition == offs.Partition && (comm.Topic != nil && offs.Topic != nil && *comm.Topic == *offs.Topic) && - comm.Offset > offs.Offset { + comm.Offset > offs.Offset { offs.Offset = comm.Offset } } } - // TODO: check per-partition errors: offsets[i].Error + // TODO: check per-partition errors: offsets[i].Error _, err = consumer.c.CommitOffsets(offsets) return errors.Wrap(err, "Kafka Consumer back commit error") } - -func (consumer *Consumer) CommitBack(gap int64) error { +func (consumer *Consumer) CommitBack(gap int64) error { if consumer.lastKafkaEventTs == 0 { return nil } @@ -135,31 +135,31 @@ func (consumer *Consumer) ConsumeNext() error { } switch e := ev.(type) { - case *kafka.Message: - if e.TopicPartition.Error != nil { - return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error") - } - ts := e.Timestamp.UnixNano()/ 1e6 - consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{ - Topic: *(e.TopicPartition.Topic), - ID: uint64(e.TopicPartition.Offset), - Timestamp: ts, - }) - consumer.lastKafkaEventTs = ts - // case kafka.AssignedPartitions: - // logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions) - // consumer.partitions = e.Partitions - // consumer.c.Assign(e.Partitions) - // log.Printf("Actually partitions assigned!") - // case kafka.RevokedPartitions: - // log.Println("Kafka Cosumer: Partitions Revoked") - // consumer.partitions = nil - // consumer.c.Unassign() - case kafka.Error: - if e.Code() == kafka.ErrAllBrokersDown { - os.Exit(1) - } - log.Printf("Consumer error: %v\n", e) + case *kafka.Message: + if e.TopicPartition.Error != nil { + return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error") + } + ts := e.Timestamp.UnixNano() / 1e6 + consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{ + Topic: *(e.TopicPartition.Topic), + ID: uint64(e.TopicPartition.Offset), + Timestamp: ts, + }) + consumer.lastKafkaEventTs = ts + // case kafka.AssignedPartitions: + // logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions) + // consumer.partitions = e.Partitions + // consumer.c.Assign(e.Partitions) + // log.Printf("Actually partitions assigned!") + // case kafka.RevokedPartitions: + // log.Println("Kafka Cosumer: Partitions Revoked") + // consumer.partitions = nil + // consumer.c.Unassign() + case kafka.Error: + if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded { + os.Exit(1) + } + log.Printf("Consumer error: %v\n", e) } return nil } @@ -173,8 +173,6 @@ func (consumer *Consumer) Close() { } } - - // func (consumer *Consumer) consume( // message func(m *kafka.Message) error, // commit func(c *kafka.Consumer) error, @@ -230,7 +228,6 @@ func (consumer *Consumer) Close() { // } // } - // func (consumer *Consumer) Consume( // message func(key uint64, value []byte) error, // ) error { diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql new file mode 100644 index 000000000..1a640b4be --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -0,0 +1,91 @@ +\set ON_ERROR_STOP true +SET client_min_messages TO NOTICE; +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.5.4-ee' +$$ LANGUAGE sql IMMUTABLE; + + +-- to detect duplicate users and delete them if possible +DO +$$ + DECLARE + duplicate RECORD; + BEGIN + IF EXISTS(SELECT user_id + FROM users + WHERE lower(email) = + (SELECT LOWER(email) + FROM users AS su + WHERE LOWER(su.email) = LOWER(users.email) + AND su.user_id != users.user_id + LIMIT 1) + ORDER BY LOWER(email)) THEN + raise notice 'duplicate users detected'; + FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat + FROM users + WHERE lower(email) = + (SELECT LOWER(email) + FROM users AS su + WHERE LOWER(su.email) = LOWER(users.email) + AND su.user_id != users.user_id + LIMIT 1) + ORDER BY LOWER(email) + LOOP + IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN + raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email; + DELETE FROM users WHERE user_id = duplicate.user_id; + END IF; + END LOOP; + IF EXISTS(SELECT user_id + FROM users + WHERE lower(email) = + (SELECT LOWER(email) + FROM users AS su + WHERE LOWER(su.email) = LOWER(users.email) + AND su.user_id != users.user_id + LIMIT 1) + ORDER BY LOWER(email)) THEN + raise notice 'remaining duplicates, please fix (delete) before finishing update'; + FOR duplicate IN SELECT user_id, email + FROM users + WHERE lower(email) = + (SELECT LOWER(email) + FROM users AS su + WHERE LOWER(su.email) = LOWER(users.email) + AND su.user_id != users.user_id + LIMIT 1) + ORDER BY LOWER(email) + LOOP + raise notice 'user: % %',duplicate.user_id,duplicate.email; + END LOOP; + RAISE 'Duplicate users' USING ERRCODE = '42710'; + END IF; + END IF; + END; +$$ +LANGUAGE plpgsql; + +UPDATE users +SET email=LOWER(email); + +DROP INDEX IF EXISTS autocomplete_value_gin_idx; +COMMIT; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 633e64caa..9adab50e0 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.5.3-ee' +SELECT 'v1.5.4-ee' $$ LANGUAGE sql IMMUTABLE; @@ -721,7 +721,22 @@ $$ CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type); CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id); CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type); - CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + + CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; + CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; + CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; + CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; + CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; + CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; + CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; + CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; + CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; + CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; + CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; + CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; + CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; + CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; + CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; BEGIN IF NOT EXISTS(SELECT * @@ -1018,7 +1033,7 @@ $$ CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL; CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL; CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL; - + CREATE TABLE IF NOT EXISTS events.state_actions ( session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, diff --git a/ee/utilities/server.js b/ee/utilities/server.js index f1209c9ff..d049faa19 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers var express = require('express'); const {ExpressPeerServer} = require('peer'); var socket; -if (process.env.cluster === "true") { +if (process.env.redis === "true") { console.log("Using Redis"); socket = require("./servers/websocket-cluster"); } else { diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 940f83879..c044043a5 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader; const {extractPeerId} = require('./peerjs-server'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); - -var wsRouter = express.Router(); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; +const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; +const pubClient = createClient({url: REDIS_URL}); +const subClient = pubClient.duplicate(); let io; const debug = process.env.debug === "1" || false; -const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; -const pubClient = createClient({url: REDIS_URL}); -const subClient = pubClient.duplicate(); +const createSocketIOServer = function (server, prefix) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); + } +} const uniqueSessions = function (data) { let resArr = []; @@ -36,18 +58,40 @@ const uniqueSessions = function (data) { return resArr; } -const socketsList = async function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); - for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); +const extractUserIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); } + } else if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; } - let result = {"data": liveSessions}; + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + let rooms = await io.of('/').adapter.allRooms(); + return rooms; +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -56,37 +100,64 @@ const socketsList = async function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } + +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey !== undefined) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } + } + respond(res, liveSessions); +} wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { @@ -94,51 +165,48 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -219,35 +287,13 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } - + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; socket.identity = socket.handshake.query.identity; - let {projectKey, sessionId} = extractPeerId(socket.peerId); + const {projectKey, sessionId} = extractPeerId(socket.peerId); socket.sessionId = sessionId; socket.projectKey = projectKey; socket.lastMessageReceivedAt = Date.now(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index e087dba31..0bd397d96 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -2,8 +2,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); const geoip2Reader = require('@maxmind/geoip2-node').Reader; -var {extractPeerId} = require('./peerjs-server'); -var wsRouter = express.Router(); +const {extractPeerId} = require('./peerjs-server'); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; let io; -let debug = process.env.debug === "1" || false; +const debug = process.env.debug === "1" || false; -const socketsList = function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); - } +const createSocketIOServer = function (server, prefix) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); } - let result = {"data": liveSessions}; +} + +const extractUserIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); + } + } else if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; + } + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + return io.sockets.adapter.rooms.keys(); +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -36,84 +82,111 @@ const socketsList = function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); -const socketsListByProject = function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); + respond(res, liveSessions); +} +wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); + +const socketsListByProject = async function (req, res) { + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -192,29 +265,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; @@ -285,10 +337,10 @@ module.exports = { socket.onAny(async (eventName, ...args) => { socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); } else { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`); let socketId = await findSessionSocketId(io, socket.peerId); if (socketId === null) { debug && console.log(`session not found for:${socket.peerId}`); @@ -302,7 +354,7 @@ module.exports = { }); console.log("WS server started") - setInterval((io) => { + setInterval(async (io) => { try { let count = 0; console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `); diff --git a/frontend/.gitignore b/frontend/.gitignore index 92150a232..dfcb0fd79 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -8,3 +8,4 @@ app/components/ui/SVG.js *.DS_Store .env *css.d.ts +*.cache diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index 27324e686..8ec60a884 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -22,6 +22,7 @@ var timeoutId; metaList: state.getIn(['customFields', 'list']).map(i => i.key), currentPage: state.getIn([ 'search', 'currentPage' ]), scrollY: state.getIn([ 'search', 'scrollY' ]), + lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]), }), { applyFilter, addAttribute, @@ -87,6 +88,7 @@ export default class SessionList extends React.PureComponent { metaList, currentPage, total, + lastPlayedSessionId, } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); @@ -122,6 +124,7 @@ export default class SessionList extends React.PureComponent { hasUserFilter={hasUserFilter} onUserClick={this.onUserClick} metaList={metaList} + lastPlayedSessionId={lastPlayedSessionId} /> ))} diff --git a/frontend/app/components/Onboarding/components/OnboardingTabs/OnboardingTabs.js b/frontend/app/components/Onboarding/components/OnboardingTabs/OnboardingTabs.js index fd05da5b6..398b7d240 100644 --- a/frontend/app/components/Onboarding/components/OnboardingTabs/OnboardingTabs.js +++ b/frontend/app/components/Onboarding/components/OnboardingTabs/OnboardingTabs.js @@ -8,14 +8,14 @@ const DOCUMENTATION = 'NPM'; // const SEGMENT = 'SEGMENT'; // const GOOGLE_TAG = 'GOOGLE TAG'; const TABS = [ - { key: PROJECT, text: PROJECT }, { key: DOCUMENTATION, text: DOCUMENTATION }, + { key: PROJECT, text: PROJECT }, // { key: SEGMENT, text: SEGMENT }, // { key: GOOGLE_TAG, text: GOOGLE_TAG } ]; class TrackingCodeModal extends React.PureComponent { - state = { copied: false, changed: false, activeTab: PROJECT }; + state = { copied: false, changed: false, activeTab: DOCUMENTATION }; setActiveTab = (tab) => { this.setState({ activeTab: tab }); diff --git a/frontend/app/components/Session_/Player/Player.js b/frontend/app/components/Session_/Player/Player.js index 01f491665..27875df52 100644 --- a/frontend/app/components/Session_/Player/Player.js +++ b/frontend/app/components/Session_/Player/Player.js @@ -9,6 +9,7 @@ import Controls from './Controls'; import Overlay from './Overlay'; import stl from './player.css'; import EventsToggleButton from '../../Session/EventsToggleButton'; +import { updateLastPlayedSession } from 'Duck/sessions'; @connectPlayer(state => ({ live: state.live, @@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton'; return { fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), nextId: state.getIn([ 'sessions', 'nextId' ]), + sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]), closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])), } }, { hideTargetDefiner, fullscreenOff, + updateLastPlayedSession, }) export default class Player extends React.PureComponent { screenWrapper = React.createRef(); componentDidMount() { + this.props.updateLastPlayedSession(this.props.sessionId); if (this.props.closedLive) return; const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx index c86f626ac..8f8aca480 100644 --- a/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx +++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx @@ -36,7 +36,7 @@ function FilterSeries(props: Props) { const onAddFilter = (filter) => { filter.value = [""] - if (filter.hasOwnProperty('filters')) { + if (filter.hasOwnProperty('filters') && Array.isArray(filter.filters)) { filter.filters = filter.filters.map(i => ({ ...i, value: [""] })) } props.addSeriesFilterFilter(seriesIndex, filter); diff --git a/frontend/app/components/shared/SessionItem/SessionItem.js b/frontend/app/components/shared/SessionItem/SessionItem.js index 0b7551760..64e4199ba 100644 --- a/frontend/app/components/shared/SessionItem/SessionItem.js +++ b/frontend/app/components/shared/SessionItem/SessionItem.js @@ -3,29 +3,25 @@ import cn from 'classnames'; import { Link, Icon, - OsIcon, - BrowserIcon, CountryFlag, Avatar, TextEllipsis, Label, } from 'UI'; -import { deviceTypeIcon } from 'App/iconNames'; import { toggleFavorite, setSessionPath } from 'Duck/sessions'; import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes'; import { durationFormatted, formatTimeOrDate } from 'App/date'; import stl from './sessionItem.css'; -import LiveTag from 'Shared/LiveTag'; -import Bookmark from 'Shared/Bookmark'; import Counter from './Counter' import { withRouter } from 'react-router-dom'; import SessionMetaList from './SessionMetaList'; import ErrorBars from './ErrorBars'; -import { assist as assistRoute, liveSession, isRoute } from "App/routes"; +import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes"; import { capitalize } from 'App/utils'; const ASSIST_ROUTE = assistRoute(); const ASSIST_LIVE_SESSION = liveSession() +const SESSIONS_ROUTE = sessionsRoute(); // const Label = ({ label = '', color = 'color-gray-medium'}) => ( //