diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 4588ccb09..a61d75160 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -47,7 +47,7 @@ jobs: # # Getting the images to build # - git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt + git diff --name-only HEAD HEAD~1 | grep backend/services | cut -d '/' -f3 | uniq > backend/images_to_build.txt [[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 21249773c..56fde11da 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -102,11 +102,9 @@ def Build(a): a["filter"]["order"] = "DESC" a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() - full_args, query_part, sort = sessions.search_query_parts( - data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), - error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=a["projectId"], - user_id=None) + full_args, query_part= sessions.search_query_parts( + data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, + issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value {query_part}""" else: diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 70f563ec8..e4b1d4a7e 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -64,9 +64,12 @@ def get_live_sessions(project_id, filters=None): return helper.list_to_camel_case(results) -def get_live_sessions_ws(project_id): +def get_live_sessions_ws(project_id, user_id=None): project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") + params = {} + if user_id and len(user_id) > 0: + params["userId"] = user_id + connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 48e6b7bd9..a7f863e79 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -1,7 +1,8 @@ import json +import schemas from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.metrics_helper import __get_step_size @@ -398,79 +399,104 @@ def get_details_chart(project_id, error_id, user_id, **data): def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", endTime_arg_name="endDate", chart=False, step_size_name="step_size", project_key="project_id"): - ch_sub_query = [f"{project_key} =%(project_id)s"] + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] if time_constraint: ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", f"timestamp < %({endTime_arg_name})s"] if chart: ch_sub_query += [f"timestamp >= generated_timestamp", f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') -@dev.timed -def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False): - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} - pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id") +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} + + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", "pe.project_id=%(project_id)s"] - pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True) - pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-30) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now(1) - if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only: + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status, favorite_only=favorite_only) + error_status=data.status) if len(statuses) == 0: - return {"data": { - 'total': 0, - 'errors': [] - }} - error_ids = [e["error_id"] for e in statuses] + return empty_response + error_ids = [e["errorId"] for e in statuses] with pg_client.PostgresClient() as cur: - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-7) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now() - density = data.get("density", 7) - step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1) + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) sort = __get_sort_key('datetime') - if data.get("sort") is not None: - sort = __get_sort_key(data["sort"]) + if data.sort is not None: + sort = __get_sort_key(data.sort) order = "DESC" - if data.get("order") is not None: - order = data["order"] + if data.order is not None: + order = data.order + extra_join = "" params = { - "startDate": data['startDate'], - "endDate": data['endDate'], + "startDate": data.startDate, + "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + if error_ids is not None: params["error_ids"] = tuple(error_ids) pg_sub_query.append("error_id IN %(error_ids)s") - main_pg_query = f"""\ - SELECT error_id, + if data.bookmarked: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, + error_id, name, message, users, @@ -478,19 +504,23 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F last_occurrence, first_occurrence, chart - FROM (SELECT error_id, - name, - message, - COUNT(DISTINCT user_uuid) AS users, - COUNT(DISTINCT session_id) AS sessions, - MAX(timestamp) AS max_datetime, - MIN(timestamp) AS min_datetime - FROM events.errors - INNER JOIN public.errors AS pe USING (error_id) - INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY error_id, name, message - ORDER BY {sort} {order}) AS details + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, MIN(timestamp) AS first_occurrence FROM events.errors @@ -500,7 +530,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F COUNT(session_id) AS count FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id) + FROM events.errors WHERE {" AND ".join(pg_sub_query_chart)} ) AS sessions ON (TRUE) GROUP BY timestamp @@ -508,16 +538,14 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F # print("--------------------") # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + cur.execute(cur.mogrify(main_pg_query, params)) - total = cur.rowcount + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: return {"data": {"count": total}} - row = cur.fetchone() - rows = [] - limit = 200 - while row is not None and len(rows) < limit: - rows.append(row) - row = cur.fetchone() + if total == 0: rows = [] else: @@ -537,15 +565,16 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), "user_id": user_id}) cur.execute(query=query) - statuses = cur.fetchall() + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: + r.pop("full_count") if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] @@ -581,7 +610,7 @@ def __save_stacktrace(error_id, data): def get_trace(project_id, error_id): - error = get(error_id=error_id) + error = get(error_id=error_id, family=False) if error is None: return {"errors": ["error not found"]} if error.get("source", "") != "js_exception": diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 7abaa4fe9..db515d995 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -97,7 +97,55 @@ def __get_data_for_extend(data): return data["data"] -def __pg_errors_query(source=None): +def __pg_errors_query(source=None, value_length=None): + if value_length is None or value_length > 2: + return f"""((SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION + (SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" return f"""((SELECT DISTINCT ON(lg.message) lg.message AS value, source, @@ -120,30 +168,6 @@ def __pg_errors_query(source=None): AND lg.name ILIKE %(svalue)s AND lg.project_id = %(project_id)s {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION - (SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} LIMIT 5));""" @@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value), - "source": source})) + cur.mogrify(__pg_errors_query(source, + value_length=len(value) \ + if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "source": source})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None): def __search_pg_errors_ios(project_id, value, key=None, source=None): now = TimeUTC.now() + if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(value)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + LIMIT 5);""" + else: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5);""" with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.reason ILIKE %(value)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - LIMIT 5);""", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None): for k in meta_keys.keys(): colname = metadata.index_to_colname(meta_keys[k]) - sub_from.append( - f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)") + if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2: + sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5) + UNION + (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(value)s LIMIT 5)) + """) + else: + sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5)""") with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ SELECT key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) return results -def __generic_query(typename): - return f"""\ - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(svalue)s - LIMIT 5) - UNION - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(value)s - LIMIT 5)""" +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f"""(SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 5) + UNION + (SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(value)s + LIMIT 5);""" + return f"""SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 10;""" def __generic_autocomplete(event: Event): def f(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(__generic_query(event.ui_type), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + cur.execute( + cur.mogrify( + __generic_query(event.ui_type, + value_length=len(value) \ + if SUPPORTED_TYPES[event.ui_type].change_by_length \ + else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) return helper.list_to_camel_case(cur.fetchall()) return f @@ -263,142 +360,96 @@ class event_type: SUPPORTED_TYPES = { event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK), query=__generic_query(typename=event_type.CLICK.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT), query=__generic_query(typename=event_type.INPUT.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION), query=__generic_query(typename=event_type.LOCATION.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM), query=__generic_query(typename=event_type.CUSTOM.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST), query=__generic_query(typename=event_type.REQUEST.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL), query=__generic_query(typename=event_type.GRAPHQL.ui_type), - value_limit=3, - starts_with="/", - starts_limit=4, - ignore_if_starts_with=[]), + change_by_length=True), event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION), query=__generic_query(typename=event_type.STATEACTION.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata, - query=None, - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), # IOS event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS), query=__generic_query(typename=event_type.CLICK_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS), query=__generic_query(typename=event_type.INPUT_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS), query=__generic_query(typename=event_type.VIEW_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS), query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS), query=__generic_query(typename=event_type.REQUEST_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), - event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + change_by_length=True), + event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios, + query=None, change_by_length=True), } -def __get_merged_queries(queries, value, project_id): - if len(queries) == 0: - return [] - now = TimeUTC.now() - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}") - return results - - def __get_autocomplete_table(value, project_id): + autocomplete_events = [schemas.FilterType.rev_id, + schemas.EventType.click, + schemas.FilterType.user_device, + schemas.FilterType.user_id, + schemas.FilterType.user_browser, + schemas.FilterType.user_os, + schemas.EventType.custom, + schemas.FilterType.user_country, + schemas.EventType.location, + schemas.EventType.input] + autocomplete_events.sort() + sub_queries = [] + for e in autocomplete_events: + sub_queries.append(f"""(SELECT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(svalue)s + LIMIT 5)""") + if len(value) > 2: + sub_queries.append(f"""(SELECT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(value)s + LIMIT 5)""") with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT project_id, type, value - FROM (SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(svalue)s - UNION - SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(value)s) AS u - WHERE Row_ID <= 5) AS sfa - ORDER BY sfa.type;""", - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + query = cur.mogrify("UNION ALL".join(sub_queries) + ";", + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)}) + cur.execute(query) results = helper.list_to_camel_case(cur.fetchall()) return results -def search_pg2(text, event_type, project_id, source, key): +def search(text, event_type, project_id, source, key): if not event_type: return {"data": __get_autocomplete_table(text, project_id)} if event_type in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) - if event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - source=source) + # for IOS events autocomplete + # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): + # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, + # source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index cdd6cec20..e63ea6efc 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -1,4 +1,5 @@ import json +from typing import List import chalicelib.utils.helper import schemas @@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"] ALLOW_UPDATE_FOR = ["name", "filter"] -# def filter_stages(stages): -# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type, -# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type, -# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type, -# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ] -# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None] +def filter_stages(stages: List[schemas._SessionSearchEventSchema]): + ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, + schemas.EventType.location, schemas.EventType.custom, + schemas.EventType.click_ios, schemas.EventType.input_ios, + schemas.EventType.view_ios, schemas.EventType.custom_ios, ] + return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None] + + +def __parse_events(f_events: List[dict]): + return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] + + +def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]): + return [e.dict() for e in f_events] + + +def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): + if f_events is None: + return + events = [] + for e in f_events: + if e.operator is None: + e.operator = schemas.SearchEventOperator._is + + if not isinstance(e.value, list): + e.value = [e.value] + is_any = sessions._isAny_opreator(e.operator) + if not is_any and isinstance(e.value, list) and len(e.value) == 0: + continue + events.append(e) + return events + def __transform_old_funnels(events): for e in events: @@ -28,7 +55,7 @@ def __transform_old_funnels(events): def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public): helper.delete_keys_from_dict(filter, REMOVE_KEYS) - # filter.events = filter_stages(stages=filter.events) + filter.events = filter_stages(stages=filter.events) with pg_client.PostgresClient() as cur: query = cur.mogrify("""\ INSERT INTO public.funnels (project_id, user_id, name, filter,is_public) @@ -76,9 +103,12 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non query ) r = cur.fetchone() + if r is None: + return {"errors": ["funnel not found"]} r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r = helper.dict_to_camel_case(r) r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) + r["filter"] = helper.old_search_payload_to_flat(r["filter"]) return {"data": r} @@ -102,9 +132,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date for row in rows: row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) if details: - # row["filter"]["events"] = filter_stages(row["filter"]["events"]) + row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"])) if row.get("filter") is not None and row["filter"].get("events") is not None: - row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"]) + row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"])) get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date, end_date=end_date) @@ -168,7 +198,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema): - # data.events = filter_stages(data.events) + data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: @@ -192,17 +223,18 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat "totalDropDueToIssues": total_drop_due_to_issues}} -def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data): - # data["events"] = filter_stages(data.get("events", [])) - if len(data["events"]) == 0: +def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema): + data.events = filter_stages(__parse_events(data.events)) + if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None), - start_date=data.get('startDate', None), - end_date=data.get('endDate', None)) - data = f["filter"] - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id) + get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, + start_date=data.startDate, + end_date=data.endDate) + data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"]) + data.events = __fix_stages(data.events) + insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) if len(insights) > 0: insights[-1]["dropDueToIssues"] = total_drop_due_to_issues return {"data": {"stages": helper.list_to_camel_case(insights), @@ -220,25 +252,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None @dev.timed -def get_issues_on_the_fly(funnel_id, user_id, project_id, data): - first_stage = data.get("firstStage") - last_stage = data.get("lastStage") - # data["events"] = filter_stages(data.get("events", [])) - if len(data["events"]) == 0: +def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): + data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) + if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None), - start_date=data.get('startDate', None), - end_date=data.get('endDate', None)) - data = f["filter"] + get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, + start_date=data.startDate, + end_date=data.endDate) + data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) + if len(data.events) < 2: + return {"issues": []} return { "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage, - last_stage=last_stage))} + significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, + last_stage=len(data.events)))} -def get(funnel_id, project_id, user_id, flatten=True): +def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( @@ -260,7 +293,11 @@ def get(funnel_id, project_id, user_id, flatten=True): if f.get("filter") is not None and f["filter"].get("events") is not None: f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"]) f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"]) - # f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) + f["filter"]["events"] = __parse_events(f["filter"]["events"]) + f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) + if fix_stages: + f["filter"]["events"] = __fix_stages(f["filter"]["events"]) + f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]] if flatten: f["filter"] = helper.old_search_payload_to_flat(f["filter"]) return f @@ -279,7 +316,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn end_date=data.endDate) data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \ + issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \ .get("issues", {}) issues = issues.get("significant", []) + issues.get("insignificant", []) issue = None diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index b67df2a1e..8a191c981 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -168,10 +168,11 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed -def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False, - error_status="ALL", count_only=False, issue=None): - full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, - user_id) +def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, + error_status=schemas.ErrorStatus.all, count_only=False, issue=None): + full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, + favorite_only=data.bookmarked, issue=issue, project_id=project_id, + user_id=user_id) if data.limit is not None and data.page is not None: full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit @@ -198,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f COUNT(DISTINCT s.user_uuid) AS count_users {query_part};""", full_args) elif data.group_by_user: + g_sort = "count(full_sessions)" + if data.order is None: + data.order = "DESC" + else: + data.order = data.order.upper() + if data.sort is not None and data.sort != 'sessionsCount': + sort = helper.key_to_snake_case(data.sort) + g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" + else: + sort = 'start_ts' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) @@ -206,51 +218,51 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions - GROUP BY user_id - ORDER BY user_sessions_count DESC) AS users_sessions;""", + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ) AS filtred_sessions + ) AS full_sessions + GROUP BY user_id + ) AS users_sessions;""", full_args) else: + if data.order is None: + data.order = "DESC" + sort = 'session_id' + if data.sort is not None and data.sort != "session_id": + sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) + else: + sort = 'session_id' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""", + ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""", full_args) - # print("--------------------") # print(main_query) # print("--------------------") - cur.execute(main_query) - if count_only: - return helper.dict_to_camel_case(cur.fetchone()) + cur.execute(main_query) + if errors_only: + return helper.list_to_camel_case(cur.fetchall()) + sessions = cur.fetchone() + if count_only: + return helper.dict_to_camel_case(sessions) + total = sessions["count"] sessions = sessions["sessions"] - # sessions = [] - # total = cur.rowcount - # row = cur.fetchone() - # limit = 200 - # while row is not None and len(sessions) < limit: - # if row.get("favorite"): - # limit += 1 - # sessions.append(row) - # row = cur.fetchone() - if errors_only: - return sessions if data.group_by_user: for i, s in enumerate(sessions): sessions[i] = {**s.pop("last_session")[0], **s} @@ -281,9 +293,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0: data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, operator=schemas.SearchEventOperator._is)) - full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) + full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, + favorite_only=False, issue=None, project_id=project_id, + user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] with pg_client.PostgresClient() as cur: @@ -378,8 +390,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr fav_only_join = "" if favorite_only and not errors_only: fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id" - extra_constraints.append("fs.user_id = %(userId)s") - full_args["userId"] = user_id + # extra_constraints.append("fs.user_id = %(userId)s") events_query_part = "" if len(data.filters) > 0: meta_keys = None @@ -958,24 +969,24 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # elif data.platform == schemas.PlatformType.desktop: # extra_constraints.append( # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - if data.order is None: - data.order = "DESC" - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - else: - sort = 'session_id' + if errors_only: extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") - if error_status != "ALL": + extra_constraints.append("ser.project_id = %(project_id)s") + if error_status != schemas.ErrorStatus.all: extra_constraints.append("ser.status = %(error_status)s") - full_args["status"] = error_status.lower() + full_args["error_status"] = error_status if favorite_only: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - extra_constraints.append("ufe.user_id = %(user_id)s") + extra_constraints.append("ufe.user_id = %(userId)s") # extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints] - if not favorite_only and not errors_only and user_id is not None: + if favorite_only and not errors_only and user_id is not None: + extra_from += """INNER JOIN (SELECT user_id, session_id + FROM public.user_favorite_sessions + WHERE user_id = %(userId)s) AS favorite_sessions + USING (session_id)""" + elif not favorite_only and not errors_only and user_id is not None: extra_from += """LEFT JOIN (SELECT user_id, session_id FROM public.user_favorite_sessions WHERE user_id = %(userId)s) AS favorite_sessions @@ -1003,7 +1014,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr {extra_from} WHERE {" AND ".join(extra_constraints)}""" - return full_args, query_part, sort + return full_args, query_part def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): @@ -1102,48 +1113,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date): return helper.list_to_camel_case(rows) -def get_favorite_sessions(project_id, user_id, include_viewed=False): - with pg_client.PostgresClient() as cur: - query_part = cur.mogrify(f"""\ - FROM public.sessions AS s - LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id - WHERE fs.user_id = %(userId)s""", - {"projectId": project_id, "userId": user_id} - ) - - extra_query = b"" - if include_viewed: - extra_query = cur.mogrify(""",\ - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""", - {"projectId": project_id, "userId": user_id}) - - cur.execute(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - TRUE AS favorite - {extra_query.decode('UTF-8')} - {query_part.decode('UTF-8')} - ORDER BY s.session_id - LIMIT 50;""") - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - def get_user_sessions(project_id, user_id, start_date, end_date): with pg_client.PostgresClient() as cur: constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] @@ -1196,11 +1165,11 @@ def get_session_user(project_id, user_id): "public".sessions WHERE project_id = %(project_id)s - AND user_id = %(user_id)s + AND user_id = %(userId)s AND duration is not null GROUP BY user_id; """, - {"project_id": project_id, "user_id": user_id} + {"project_id": project_id, "userId": user_id} ) cur.execute(query=query) data = cur.fetchone() @@ -1213,8 +1182,8 @@ def get_session_ids_by_user_ids(project_id, user_ids): """\ SELECT session_id FROM public.sessions WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} ) ids = cur.execute(query=query) return ids @@ -1240,8 +1209,8 @@ def delete_sessions_by_user_ids(project_id, user_ids): """\ DELETE FROM public.sessions WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} ) cur.execute(query=query) diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index 1d342d03f..07aad2ee4 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -80,32 +80,41 @@ def get_top_key_values(project_id): return helper.dict_to_CAPITAL_keys(row) -def __generic_query(typename): - return f"""\ - SELECT value, type - FROM ((SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type ='{typename}' - AND value ILIKE %(svalue)s - ORDER BY value - LIMIT 5) +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f""" (SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 5) UNION - (SELECT value, type + (SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s AND type ='{typename}' AND value ILIKE %(value)s ORDER BY value - LIMIT 5)) AS met""" + LIMIT 5);""" + return f""" SELECT DISTINCT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 10;""" def __generic_autocomplete(typename): def f(project_id, text): with pg_client.PostgresClient() as cur: - query = cur.mogrify(__generic_query(typename), + query = cur.mogrify(__generic_query(typename, + value_length=len(text) \ + if SUPPORTED_TYPES[typename].change_by_length else None), {"project_id": project_id, "value": helper.string_to_sql_like(text), "svalue": helper.string_to_sql_like("^" + text)}) @@ -120,124 +129,73 @@ SUPPORTED_TYPES = { schemas.FilterType.user_os: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os), query=__generic_query(typename=schemas.FilterType.user_os), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_browser: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_browser), query=__generic_query(typename=schemas.FilterType.user_browser), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_device), query=__generic_query(typename=schemas.FilterType.user_device), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country), query=__generic_query(typename=schemas.FilterType.user_country), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id), query=__generic_query(typename=schemas.FilterType.user_id), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id), query=__generic_query(typename=schemas.FilterType.user_anonymous_id), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id), query=__generic_query(typename=schemas.FilterType.rev_id), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.referrer: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.referrer), query=__generic_query(typename=schemas.FilterType.referrer), - value_limit=5, - starts_with="/", - starts_limit=5, - ignore_if_starts_with=[]), + change_by_length=True), schemas.FilterType.utm_campaign: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign), query=__generic_query(typename=schemas.FilterType.utm_campaign), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_medium: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_medium), query=__generic_query(typename=schemas.FilterType.utm_medium), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_source: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_source), query=__generic_query(typename=schemas.FilterType.utm_source), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), # IOS schemas.FilterType.user_os_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios), query=__generic_query(typename=schemas.FilterType.user_os_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device_ios: SupportedFilter( get=__generic_autocomplete( typename=schemas.FilterType.user_device_ios), query=__generic_query(typename=schemas.FilterType.user_device_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios), query=__generic_query(typename=schemas.FilterType.user_country_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios), query=__generic_query(typename=schemas.FilterType.user_id_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios), query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios), query=__generic_query(typename=schemas.FilterType.rev_id_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), } @@ -247,6 +205,7 @@ def search(text, meta_type, project_id): if meta_type not in list(SUPPORTED_TYPES.keys()): return {"errors": ["unsupported type"]} rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) - if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): - rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) + # for IOS events autocomplete + # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): + # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) return {"data": rows} diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index ab242d7e8..035890e2f 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: first_stage_extra_constraints.append( sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) + i = -1 + for s in stages: - for i, s in enumerate(stages): - if i == 0: - extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] - else: - extra_from = [] if s.get("operator") is None: s["operator"] = "is" @@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: is_any = sessions._isAny_opreator(s["operator"]) if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: continue + i += 1 + if i == 0: + extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] + else: + extra_from = [] op = sessions.__get_sql_operator(s["operator"]) event_type = s["type"].upper() if event_type == events.event_type.CLICK.ui_type: @@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: ISS.issue_id as issue_id FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id) WHERE ISE.timestamp >= stages_t.stage1_timestamp - AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp + AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp AND ISS.project_id=%(project_id)s {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t ) AS issues_t diff --git a/api/chalicelib/utils/event_filter_definition.py b/api/chalicelib/utils/event_filter_definition.py index 4c132cb13..b21d49b9c 100644 --- a/api/chalicelib/utils/event_filter_definition.py +++ b/api/chalicelib/utils/event_filter_definition.py @@ -6,10 +6,7 @@ class Event: class SupportedFilter: - def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with): + def __init__(self, get, query, change_by_length): self.get = get self.query = query - self.valueLimit = value_limit - self.startsWith = starts_with - self.startsLimit = starts_limit - self.ignoreIfStartsWith = ignore_if_starts_with + self.change_by_length = change_by_length diff --git a/api/routers/core.py b/api/routers/core.py index 73ae5fc20..97a749429 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -21,13 +21,6 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() -@app.get('/{projectId}/sessions2/favorite', tags=["sessions"]) -def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return { - 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True) - } - - @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): @@ -126,14 +119,14 @@ def events_search(projectId: int, q: str, else: return {"data": []} - result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key) + result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key) return result @app.post('/{projectId}/sessions/search2', tags=["sessions"]) def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = sessions.search2_pg(data, projectId, user_id=context.user_id) + data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id) return {'data': data} @@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext = return {'data': sessions_metas.get_top_key_values(projectId)} -@app.get('/{projectId}/sessions/filters/search', tags=["sessions"]) -def get_session_filters_meta(projectId: int, q: str, type: str, - context: schemas.CurrentContext = Depends(OR_context)): - meta_type = type - if len(meta_type) == 0: - return {"data": []} - if len(q) == 0: - return {"data": []} - return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q) - - @app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) @app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str, @@ -716,7 +698,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data.dict()) + data=data) @app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) @@ -731,7 +713,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data.dict())} + data=data)} @app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) @@ -755,10 +737,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) + if issue is None: + return {"errors": ["issue not found"]} return { "data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue, - start_date=startDate, - end_date=endDate), + start_date=startDate, end_date=endDate), "issue": issue}} @@ -837,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) - return {'data': data} - - -@app.post('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) +def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions_ws(projectId, user_id=userId) return {'data': data} @@ -901,13 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), @app.post('/{projectId}/errors/search', tags=['errors']) -def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False, - data: schemas.SearchErrorsSchema = Body(...), +def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(favorite, str): - favorite = True if len(favorite) == 0 else False - return errors.search(data.dict(), projectId, user_id=context.user_id, status=status, - favorite_only=favorite) + return errors.search(data, projectId, user_id=context.user_id) @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/schemas.py b/api/schemas.py index cf4ae6cd3..3b4fefbd6 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -83,15 +83,6 @@ class EditSlackSchema(BaseModel): url: HttpUrl = Field(...) -class SearchErrorsSchema(BaseModel): - platform: Optional[str] = Field(None) - startDate: Optional[int] = Field(TimeUTC.now(-7)) - endDate: Optional[int] = Field(TimeUTC.now()) - density: Optional[int] = Field(7) - sort: Optional[str] = Field(None) - order: Optional[str] = Field(None) - - class CreateNotificationSchema(BaseModel): token: str = Field(...) notifications: List = Field(...) @@ -609,11 +600,12 @@ class SessionsSearchPayloadSchema(BaseModel): startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: str = Field(default="DESC") + order: Literal["asc", "desc"] = Field(default="desc") events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) limit: int = Field(default=200, gt=0, le=200) page: int = Field(default=1, gt=0) + bookmarked: bool = Field(default=False) class Config: alias_generator = attribute_to_camel_case @@ -662,6 +654,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema): order: Optional[str] = Field(None) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) group_by_user: Optional[bool] = Field(default=False, const=True) + rangeValue: Optional[str] = Field(None) @root_validator(pre=True) def enforce_default_values(cls, values): @@ -694,6 +687,27 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): order: Optional[str] = Field(None) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) group_by_user: Optional[bool] = Field(default=False, const=True) + rangeValue: Optional[str] = Field(None) + + +class ErrorStatus(str, Enum): + all = 'all' + unresolved = 'unresolved' + resolved = 'resolved' + ignored = 'ignored' + + +class ErrorSort(str, Enum): + occurrence = 'occurrence' + users_count = 'users' + sessions_count = 'sessions' + + +class SearchErrorsSchema(SessionsSearchPayloadSchema): + sort: ErrorSort = Field(default=ErrorSort.occurrence) + density: Optional[int] = Field(7) + status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) + query: Optional[str] = Field(default=None) class MetricPayloadSchema(BaseModel): diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go index b087878b9..1fe717531 100644 --- a/backend/pkg/url/assets/url.go +++ b/backend/pkg/url/assets/url.go @@ -9,16 +9,17 @@ import ( func getSessionKey(sessionID uint64) string { // Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID - return strconv.FormatUint(sessionID>>50, 10) + return strconv.FormatUint(sessionID>>50, 10) } func ResolveURL(baseurl string, rawurl string) string { + rawurl = strings.Trim(rawurl, " ") if !isRelativeCachable(rawurl) { return rawurl } base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls - u, _ := url.Parse(rawurl) // TODO: handle errors ? - if base == nil || u == nil { + u, _ := url.Parse(rawurl) // TODO: handle errors ? + if base == nil || u == nil { return rawurl } return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl) @@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string { } func GetCachePathForAssets(sessionID uint64, rawurl string) string { - return getCachePathWithKey(sessionID, rawurl) + return getCachePathWithKey(sessionID, rawurl) } - func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string { fullURL, cachable := GetFullCachableURL(baseURL, relativeURL) if !cachable { return fullURL } - u := url.URL{ - Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), - Host: r.assetsURL.Host, - Scheme: r.assetsURL.Scheme, + u := url.URL{ + Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), + Host: r.assetsURL.Host, + Scheme: r.assetsURL.Scheme, } return u.String() } - diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 04efdbb32..8531d89a3 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -1,8 +1,9 @@ import json +import schemas from chalicelib.core import dashboard from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import ch_client +from chalicelib.utils import ch_client, metrics_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data): COALESCE((SELECT TRUE FROM public.user_favorite_errors AS fe WHERE pe.error_id = fe.error_id - AND fe.user_id = %(user_id)s), FALSE) AS favorite, + AND fe.user_id = %(userId)s), FALSE) AS favorite, True AS viewed FROM public.errors AS pe INNER JOIN events.errors AS ee USING (error_id) @@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data): AND error_id = %(error_id)s ORDER BY start_ts DESC LIMIT 1;""", - {"project_id": project_id, "error_id": error_id, "user_id": user_id}) + {"project_id": project_id, "error_id": error_id, "userId": user_id}) cur.execute(query=query) status = cur.fetchone() @@ -423,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if time_constraint: ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"datetime < toDateTime(%({endTime_arg_name})s/1000)"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -437,60 +438,280 @@ def __get_step_size(startTimestamp, endTimestamp, density): def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') -def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False): - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} - ch_sub_query = __get_basic_constraints(data.get('platform')) - ch_sub_query.append("source ='js_exception'") +def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate", + endTime_arg_name="endDate", chart=False, step_size_name="step_size", + project_key="project_id"): + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] + if time_constraint: + ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", + f"timestamp < %({endTime_arg_name})s"] + if chart: + ch_sub_query += [f"timestamp >= generated_timestamp", + f"timestamp < generated_timestamp + %({step_size_name})s"] + if platform == schemas.PlatformType.mobile: + ch_sub_query.append("user_device_type = 'mobile'") + elif platform == schemas.PlatformType.desktop: + ch_sub_query.append("user_device_type = 'desktop'") + return ch_sub_query + + +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} + + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id") + pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", + "pe.project_id=%(project_id)s"] + pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-30) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now(1) - if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only: + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status, favorite_only=favorite_only) - error_ids = [e["error_id"] for e in statuses] + error_status=data.status) if len(statuses) == 0: - return {"data": { - 'total': 0, - 'errors': [] - }} - with ch_client.ClickHouseClient() as ch: - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-7) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now() - density = data.get("density", 7) - step_size = __get_step_size(data["startDate"], data["endDate"], density) + return empty_response + error_ids = [e["errorId"] for e in statuses] + with pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1) sort = __get_sort_key('datetime') - if data.get("sort") is not None: - sort = __get_sort_key(data["sort"]) + if data.sort is not None: + sort = __get_sort_key(data.sort) order = "DESC" - if data.get("order") is not None: - order = data["order"] + if data.order is not None: + order = data.order + extra_join = "" params = { - "startDate": data['startDate'], - "endDate": data['endDate'], + "startDate": data.startDate, + "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + + if error_ids is not None: + params["error_ids"] = tuple(error_ids) + pg_sub_query.append("error_id IN %(error_ids)s") + if data.bookmarked: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, + error_id, + name, + message, + users, + sessions, + last_occurrence, + first_occurrence, + chart + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details + INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, + MIN(timestamp) AS first_occurrence + FROM events.errors + WHERE errors.error_id = details.error_id) AS time_details ON (TRUE) + INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);""" + + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + + cur.execute(cur.mogrify(main_pg_query, params)) + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] + if flows: + return {"data": {"count": total}} + + if total == 0: + rows = [] + else: + if len(statuses) == 0: + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "user_id": user_id}) + cur.execute(query=query) + statuses = helper.list_to_camel_case(cur.fetchall()) + statuses = { + s["errorId"]: s for s in statuses + } + + for r in rows: + r.pop("full_count") + if r["error_id"] in statuses: + r["status"] = statuses[r["error_id"]]["status"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] + r["favorite"] = statuses[r["error_id"]]["favorite"] + r["viewed"] = statuses[r["error_id"]]["viewed"] + r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] + else: + r["status"] = "untracked" + r["parent_error_id"] = None + r["favorite"] = False + r["viewed"] = False + r["stack"] = None + + offset = len(rows) + rows = [r for r in rows if r["stack"] is None + or (len(r["stack"]) == 0 or len(r["stack"]) > 1 + or len(r["stack"]) > 0 + and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] + offset -= len(rows) + return { + "data": { + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) + } + } + + +# refactor this function after clickhouse structure changes (missing search by query) +def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + ch_sub_query = __get_basic_constraints(platform) + ch_sub_query.append("source ='js_exception'") + statuses = [] + error_ids = None + # Clickhouse keeps data for the past month only, so no need to search beyond that + if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31): + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error + statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, + error_status=data.status) + if len(statuses) == 0: + return empty_response + error_ids = [e["errorId"] for e in statuses] + with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = __get_step_size(data.startDate, data.endDate, data.density) + sort = __get_sort_key('datetime') + if data.sort is not None: + sort = __get_sort_key(data.sort) + order = "DESC" + if data.order is not None: + order = data.order + params = { + "startDate": data.startDate, + "endDate": data.endDate, + "project_id": project_id, + "userId": user_id, + "step_size": step_size} + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + if data.bookmarked: + cur.execute(cur.mogrify(f"""SELECT error_id + FROM public.user_favorite_errors + WHERE user_id = %(userId)s + {"" if error_ids is None else "AND error_id IN %(error_ids)s"}""", + {"userId": user_id, "error_ids": tuple(error_ids or [])})) + error_ids = cur.fetchall() + if len(error_ids) == 0: + return empty_response + error_ids = [e["error_id"] for e in error_ids] + if error_ids is not None: params["error_ids"] = tuple(error_ids) ch_sub_query.append("error_id IN %(error_ids)s") + main_ch_query = f"""\ SELECT COUNT(DISTINCT error_id) AS count FROM errors WHERE {" AND ".join(ch_sub_query)};""" + # print("------------") + # print(ch.client().substitute_params(main_ch_query, params)) + # print("------------") total = ch.execute(query=main_ch_query, params=params)[0]["count"] if flows: return {"data": {"count": total}} @@ -510,9 +731,10 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F WHERE {" AND ".join(ch_sub_query)} GROUP BY error_id, name, message ORDER BY {sort} {order} - LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence - FROM errors - GROUP BY error_id) AS time_details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details + INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence + FROM errors + GROUP BY error_id) AS time_details ON details.error_id=time_details.error_id INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, @@ -523,35 +745,36 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F ORDER BY timestamp) AS sub_table GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;""" - # print("--------------------") - # print(main_ch_query % params) + # print("------------") + # print(ch.client().substitute_params(main_ch_query, params)) + # print("------------") + rows = ch.execute(query=main_ch_query, params=params) if len(statuses) == 0: - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """SELECT error_id, status, parent_error_id, payload, - COALESCE((SELECT TRUE - FROM public.user_favorite_errors AS fe - WHERE errors.error_id = fe.error_id - AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE errors.error_id = ve.error_id - AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed - FROM public.errors - WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", - {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), - "user_id": user_id}) - cur.execute(query=query) - statuses = cur.fetchall() + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "userId": user_id}) + cur.execute(query=query) + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] @@ -565,9 +788,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F r["chart"] = list(r["chart"]) for i in range(len(r["chart"])): r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]} - r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"], - end_time=data["endDate"], - density=density, neutral={"count": 0}) + r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, + end_time=data.endDate, + density=data.density, neutral={"count": 0}) offset = len(rows) rows = [r for r in rows if r["stack"] is None or (len(r["stack"]) == 0 or len(r["stack"]) > 1 @@ -593,7 +816,7 @@ def __save_stacktrace(error_id, data): def get_trace(project_id, error_id): - error = get(error_id=error_id) + error = get(error_id=error_id, family=False) if error is None: return {"errors": ["error not found"]} if error.get("source", "") != "js_exception": @@ -766,7 +989,7 @@ def format_first_stack_frame(error): def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()): with pg_client.PostgresClient() as cur: query = cur.mogrify( - """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s) + """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s) SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed FROM (SELECT root_error.error_id FROM events.errors @@ -780,7 +1003,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim AND user_viewed.error_id ISNULL LIMIT 1 ) AS timed_errors;""", - {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp, + {"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp}) cur.execute(query=query) row = cur.fetchone() diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql new file mode 100644 index 000000000..d043cedcb --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -0,0 +1,25 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.5.4-ee' +$$ LANGUAGE sql IMMUTABLE; + + +COMMIT; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 633e64caa..f5a26c04b 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -723,6 +723,22 @@ $$ CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type); CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; + CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; + CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; + CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; + CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; + CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; + CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; + CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; + CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; + CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; + CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; + CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; + CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; + CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; + CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; + BEGIN IF NOT EXISTS(SELECT * FROM pg_type typ @@ -1018,7 +1034,7 @@ $$ CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL; CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL; CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL; - + CREATE TABLE IF NOT EXISTS events.state_actions ( session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, diff --git a/ee/utilities/server.js b/ee/utilities/server.js index f1209c9ff..d049faa19 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers var express = require('express'); const {ExpressPeerServer} = require('peer'); var socket; -if (process.env.cluster === "true") { +if (process.env.redis === "true") { console.log("Using Redis"); socket = require("./servers/websocket-cluster"); } else { diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 940f83879..c044043a5 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader; const {extractPeerId} = require('./peerjs-server'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); - -var wsRouter = express.Router(); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; +const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; +const pubClient = createClient({url: REDIS_URL}); +const subClient = pubClient.duplicate(); let io; const debug = process.env.debug === "1" || false; -const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; -const pubClient = createClient({url: REDIS_URL}); -const subClient = pubClient.duplicate(); +const createSocketIOServer = function (server, prefix) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); + } +} const uniqueSessions = function (data) { let resArr = []; @@ -36,18 +58,40 @@ const uniqueSessions = function (data) { return resArr; } -const socketsList = async function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); - for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); +const extractUserIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); } + } else if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; } - let result = {"data": liveSessions}; + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + let rooms = await io.of('/').adapter.allRooms(); + return rooms; +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -56,37 +100,64 @@ const socketsList = async function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } + +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey !== undefined) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } + } + respond(res, liveSessions); +} wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { @@ -94,51 +165,48 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -219,35 +287,13 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } - + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; socket.identity = socket.handshake.query.identity; - let {projectKey, sessionId} = extractPeerId(socket.peerId); + const {projectKey, sessionId} = extractPeerId(socket.peerId); socket.sessionId = sessionId; socket.projectKey = projectKey; socket.lastMessageReceivedAt = Date.now(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index e087dba31..0bd397d96 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -2,8 +2,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); const geoip2Reader = require('@maxmind/geoip2-node').Reader; -var {extractPeerId} = require('./peerjs-server'); -var wsRouter = express.Router(); +const {extractPeerId} = require('./peerjs-server'); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; let io; -let debug = process.env.debug === "1" || false; +const debug = process.env.debug === "1" || false; -const socketsList = function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); - } +const createSocketIOServer = function (server, prefix) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); } - let result = {"data": liveSessions}; +} + +const extractUserIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); + } + } else if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; + } + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + return io.sockets.adapter.rooms.keys(); +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -36,84 +82,111 @@ const socketsList = function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); -const socketsListByProject = function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); + respond(res, liveSessions); +} +wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); + +const socketsListByProject = async function (req, res) { + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -192,29 +265,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; @@ -285,10 +337,10 @@ module.exports = { socket.onAny(async (eventName, ...args) => { socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); } else { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`); let socketId = await findSessionSocketId(io, socket.peerId); if (socketId === null) { debug && console.log(`session not found for:${socket.peerId}`); @@ -302,7 +354,7 @@ module.exports = { }); console.log("WS server started") - setInterval((io) => { + setInterval(async (io) => { try { let count = 0; console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `); diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js index 6d30359f1..326a1e78e 100644 --- a/frontend/app/components/BugFinder/BugFinder.js +++ b/frontend/app/components/BugFinder/BugFinder.js @@ -13,7 +13,8 @@ import withLocationHandlers from "HOCs/withLocationHandlers"; import { fetch as fetchFilterVariables } from 'Duck/sources'; import { fetchSources } from 'Duck/customField'; import { RehydrateSlidePanel } from './WatchDogs/components'; -import { setActiveTab, setFunnelPage } from 'Duck/sessions'; +import { setFunnelPage } from 'Duck/sessions'; +import { setActiveTab } from 'Duck/search'; import SessionsMenu from './SessionsMenu/SessionsMenu'; import { LAST_7_DAYS } from 'Types/app/period'; import { resetFunnel } from 'Duck/funnels'; @@ -51,12 +52,12 @@ const allowedQueryKeys = [ variables: state.getIn([ 'customFields', 'list' ]), sources: state.getIn([ 'customFields', 'sources' ]), filterValues: state.get('filterValues'), - activeTab: state.getIn([ 'sessions', 'activeTab' ]), favoriteList: state.getIn([ 'sessions', 'favoriteList' ]), currentProjectId: state.getIn([ 'user', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), watchdogs: state.getIn(['watchdogs', 'list']), activeFlow: state.getIn([ 'filters', 'activeFlow' ]), + sessions: state.getIn([ 'sessions', 'list' ]), }), { fetchFavoriteSessionList, applyFilter, @@ -91,7 +92,9 @@ export default class BugFinder extends React.PureComponent { // keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS() // }; // }); - props.fetchSessions(); + if (props.sessions.size === 0) { + props.fetchSessions(); + } props.resetFunnel(); props.resetFunnelFilters(); props.fetchFunnelsList(LAST_7_DAYS) @@ -115,7 +118,6 @@ export default class BugFinder extends React.PureComponent { } render() { - const { activeFlow, activeTab } = this.props; const { showRehydratePanel } = this.state; return ( diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index 10db59c5b..f5152222a 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -1,7 +1,7 @@ import { connect } from 'react-redux'; -import { Loader, NoContent, Button, LoadMoreButton } from 'UI'; +import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI'; import { applyFilter, addAttribute, addEvent } from 'Duck/filters'; -import { fetchSessions, addFilterByKeyAndValue } from 'Duck/search'; +import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search'; import SessionItem from 'Shared/SessionItem'; import SessionListHeader from './SessionListHeader'; import { FilterKey } from 'Types/filter/filterType'; @@ -15,17 +15,20 @@ var timeoutId; shouldAutorefresh: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 0, savedFilters: state.getIn([ 'filters', 'list' ]), loading: state.getIn([ 'sessions', 'loading' ]), - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), allList: state.getIn([ 'sessions', 'list' ]), total: state.getIn([ 'sessions', 'total' ]), filters: state.getIn([ 'search', 'instance', 'filters' ]), metaList: state.getIn(['customFields', 'list']).map(i => i.key), + currentPage: state.getIn([ 'search', 'currentPage' ]), + lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]), }), { applyFilter, addAttribute, addEvent, fetchSessions, addFilterByKeyAndValue, + updateCurrentPage, }) export default class SessionList extends React.PureComponent { state = { @@ -76,6 +79,8 @@ export default class SessionList extends React.PureComponent { clearTimeout(timeoutId) } + + renderActiveTabContent(list) { const { loading, @@ -84,6 +89,9 @@ export default class SessionList extends React.PureComponent { allList, activeTab, metaList, + currentPage, + total, + lastPlayedSessionId, } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); @@ -93,49 +101,47 @@ export default class SessionList extends React.PureComponent { return ( -
Please try changing your search parameters.
- {allList.size > 0 && ( -
- However, we found other sessions based on your search parameters. -
- +
+
Please try changing your search parameters.
+ {allList.size > 0 && ( +
+ However, we found other sessions based on your search parameters. +
+ +
-
- )} -
+ )} +
} > - { list.take(displayedCount).map(session => ( + { list.map(session => ( ))} - - Haven't found the session in the above list?
Try being a bit more specific by setting a specific time frame or simply use different filters - - } - /> +
+ this.props.updateCurrentPage(page)} + limit={PER_PAGE} + debounceRequest={1000} + /> +
); } diff --git a/frontend/app/components/BugFinder/SessionList/SessionListHeader.js b/frontend/app/components/BugFinder/SessionList/SessionListHeader.js index 67d1c4aaf..e4f949473 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionListHeader.js +++ b/frontend/app/components/BugFinder/SessionList/SessionListHeader.js @@ -64,5 +64,5 @@ function SessionListHeader({ }; export default connect(state => ({ - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), }), { applyFilter })(SessionListHeader); diff --git a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js index be98a28cd..fa0594316 100644 --- a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js +++ b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js @@ -1,30 +1,20 @@ -import React, { useEffect } from 'react' +import React from 'react' import { connect } from 'react-redux'; import cn from 'classnames'; -import { SideMenuitem, SavedSearchList, Progress, Popup, Icon, CircularLoader } from 'UI' +import { SideMenuitem, SavedSearchList, Progress, Popup } from 'UI' import stl from './sessionMenu.css'; import { fetchWatchdogStatus } from 'Duck/watchdogs'; -import { setActiveFlow, clearEvents } from 'Duck/filters'; -import { setActiveTab } from 'Duck/sessions'; +import { clearEvents } from 'Duck/filters'; import { issues_types } from 'Types/session/issue' import { fetchList as fetchSessionList } from 'Duck/sessions'; function SessionsMenu(props) { - const { - activeFlow, activeTab, watchdogs = [], keyMap, wdTypeCount, - fetchWatchdogStatus, toggleRehydratePanel, filters, sessionsLoading } = props; + const { activeTab, keyMap, wdTypeCount, toggleRehydratePanel } = props; const onMenuItemClick = (filter) => { props.onMenuItemClick(filter) - - if (activeFlow && activeFlow.type === 'flows') { - props.setActiveFlow(null) - } } - - // useEffect(() => { - // fetchWatchdogStatus() - // }, []) + const capturingAll = props.captureRate && props.captureRate.get('captureAll'); @@ -66,36 +56,13 @@ function SessionsMenu(props) { { issues_types.filter(item => item.visible).map(item => ( onMenuItemClick(item)} /> ))} - {/*
-
- -
Assist
- { activeTab.type === 'live' && ( -
!sessionsLoading && props.fetchSessionList(filters.toJS())} - > - { sessionsLoading ? : } -
- )} -
- } - iconName="person" - active={activeTab.type === 'live'} - onClick={() => onMenuItemClick({ name: 'Assist', type: 'live' })} - /> - -
*/} -
({ - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), keyMap: state.getIn([ 'sessions', 'keyMap' ]), wdTypeCount: state.getIn([ 'sessions', 'wdTypeCount' ]), - activeFlow: state.getIn([ 'filters', 'activeFlow' ]), captureRate: state.getIn(['watchdogs', 'captureRate']), filters: state.getIn([ 'filters', 'appliedFilter' ]), sessionsLoading: state.getIn([ 'sessions', 'fetchLiveListRequest', 'loading' ]), }), { - fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab, fetchSessionList + fetchWatchdogStatus, clearEvents, fetchSessionList })(SessionsMenu); diff --git a/frontend/app/components/Errors/Errors.js b/frontend/app/components/Errors/Errors.js index 4eb671cf5..10812558d 100644 --- a/frontend/app/components/Errors/Errors.js +++ b/frontend/app/components/Errors/Errors.js @@ -1,23 +1,19 @@ import { connect } from 'react-redux'; import withSiteIdRouter from 'HOCs/withSiteIdRouter'; import withPermissions from 'HOCs/withPermissions' -import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo"; -import { getRE } from 'App/utils'; -import { fetchBookmarks } from "Duck/errors"; +import { UNRESOLVED, RESOLVED, IGNORED, BOOKMARK } from "Types/errorInfo"; +import { fetchBookmarks, editOptions } from "Duck/errors"; import { applyFilter } from 'Duck/filters'; import { fetchList as fetchSlackList } from 'Duck/integrations/slack'; import { errors as errorsRoute, isRoute } from "App/routes"; -import EventFilter from 'Components/BugFinder/EventFilter'; import DateRange from 'Components/BugFinder/DateRange'; import withPageTitle from 'HOCs/withPageTitle'; - -import { SavedSearchList } from 'UI'; +import cn from 'classnames'; import List from './List/List'; import ErrorInfo from './Error/ErrorInfo'; import Header from './Header'; import SideMenuSection from './SideMenu/SideMenuSection'; -import SideMenuHeader from './SideMenu/SideMenuHeader'; import SideMenuDividedItem from './SideMenu/SideMenuDividedItem'; const ERRORS_ROUTE = errorsRoute(); @@ -39,44 +35,26 @@ function getStatusLabel(status) { @withSiteIdRouter @connect(state => ({ list: state.getIn([ "errors", "list" ]), + status: state.getIn([ "errors", "options", "status" ]), }), { fetchBookmarks, applyFilter, fetchSlackList, + editOptions, }) @withPageTitle("Errors - OpenReplay") export default class Errors extends React.PureComponent { - state = { - status: UNRESOLVED, - bookmarksActive: false, - currentList: this.props.list.filter(e => e.status === UNRESOLVED), - filter: '', + constructor(props) { + super(props) + this.state = { + filter: '', + } } componentDidMount() { this.props.fetchSlackList(); // Delete after implementing cache } - onFilterChange = ({ target: { value } }) => this.setState({ filter: value }) - - componentDidUpdate(prevProps, prevState) { - const { bookmarksActive, status, filter } = this.state; - const { list } = this.props; - if (prevProps.list !== list - || prevState.status !== status - || prevState.bookmarksActive !== bookmarksActive - || prevState.filter !== filter) { - const unfiltered = bookmarksActive - ? list - : list.filter(e => e.status === status); - const filterRE = getRE(filter); - this.setState({ - currentList: unfiltered - .filter(e => filterRE.test(e.name) || filterRE.test(e.message)), - }) - } - } - ensureErrorsPage() { const { history } = this.props; if (!isRoute(ERRORS_ROUTE, history.location.pathname)) { @@ -85,22 +63,11 @@ export default class Errors extends React.PureComponent { } onStatusItemClick = ({ key }) => { - if (this.state.bookmarksActive) { - this.props.applyFilter(); - } - this.setState({ - status: key, - bookmarksActive: false, - }); - this.ensureErrorsPage(); + this.props.editOptions({ status: key }); } onBookmarksClick = () => { - this.setState({ - bookmarksActive: true, - }); - this.props.fetchBookmarks(); - this.ensureErrorsPage(); + this.props.editOptions({ status: BOOKMARK }); } @@ -110,12 +77,14 @@ export default class Errors extends React.PureComponent { match: { params: { errorId } }, + status, + list, + history, } = this.props; - const { status, bookmarksActive, currentList } = this.state; return (
-
+
@@ -154,8 +123,8 @@ export default class Errors extends React.PureComponent { <>
Seen in @@ -164,12 +133,11 @@ export default class Errors extends React.PureComponent {
: - + }
diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index cb0ffd55a..2fa91c5e5 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -1,53 +1,62 @@ import cn from 'classnames'; import { connect } from 'react-redux'; import { Set, List as ImmutableList } from "immutable"; -import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain } from 'UI'; -import { merge, resolve, unresolve, ignore, updateCurrentPage } from "Duck/errors"; +import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain, Pagination } from 'UI'; +import { merge, resolve, unresolve, ignore, updateCurrentPage, editOptions } from "Duck/errors"; import { applyFilter } from 'Duck/filters'; import { IGNORED, RESOLVED, UNRESOLVED } from 'Types/errorInfo'; import SortDropdown from 'Components/BugFinder/Filters/SortDropdown'; import Divider from 'Components/Errors/ui/Divider'; import ListItem from './ListItem/ListItem'; +import { debounce } from 'App/utils'; -const PER_PAGE = 5; -const DEFAULT_SORT = 'lastOccurrence'; -const DEFAULT_ORDER = 'desc'; +const PER_PAGE = 10; const sortOptionsMap = { - 'lastOccurrence-desc': 'Last Occurrence', - 'firstOccurrence-desc': 'First Occurrence', - 'sessions-asc': 'Sessions Ascending', - 'sessions-desc': 'Sessions Descending', - 'users-asc': 'Users Ascending', - 'users-desc': 'Users Descending', + 'occurrence-desc': 'Last Occurrence', + 'occurrence-desc': 'First Occurrence', + 'sessions-asc': 'Sessions Ascending', + 'sessions-desc': 'Sessions Descending', + 'users-asc': 'Users Ascending', + 'users-desc': 'Users Descending', }; const sortOptions = Object.entries(sortOptionsMap) .map(([ value, text ]) => ({ value, text })); - @connect(state => ({ loading: state.getIn([ "errors", "loading" ]), resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) || state.getIn(["errors", "unresolve", "loading"]), ignoreLoading: state.getIn([ "errors", "ignore", "loading" ]), mergeLoading: state.getIn([ "errors", "merge", "loading" ]), - currentPage: state.getIn(["errors", "currentPage"]), + currentPage: state.getIn(["errors", "currentPage"]), + total: state.getIn([ 'errors', 'totalCount' ]), + sort: state.getIn([ 'errors', 'options', 'sort' ]), + order: state.getIn([ 'errors', 'options', 'order' ]), + query: state.getIn([ "errors", "options", "query" ]), }), { merge, resolve, unresolve, ignore, applyFilter, - updateCurrentPage, + updateCurrentPage, + editOptions, }) export default class List extends React.PureComponent { - state = { - checkedAll: false, - checkedIds: Set(), - sort: {} + constructor(props) { + super(props) + this.state = { + checkedAll: false, + checkedIds: Set(), + query: props.query, + } + this.debounceFetch = debounce(this.props.editOptions, 1000); } - + componentDidMount() { - this.props.applyFilter({ sort: DEFAULT_SORT, order: DEFAULT_ORDER, events: ImmutableList(), filters: ImmutableList() }); + if (this.props.list.size === 0) { + this.props.applyFilter({ }); + } } check = ({ errorId }) => { @@ -111,8 +120,14 @@ export default class List extends React.PureComponent { writeOption = (e, { name, value }) => { const [ sort, order ] = value.split('-'); - const sign = order === 'desc' ? -1 : 1; - this.setState({ sort: { sort, order }}) + if (name === 'sort') { + this.props.editOptions({ sort, order }); + } + } + + onQueryChange = (e, { value }) => { + this.setState({ query: value }); + this.debounceFetch({ query: value }); } render() { @@ -123,19 +138,18 @@ export default class List extends React.PureComponent { ignoreLoading, resolveToggleLoading, mergeLoading, - onFilterChange, - currentPage, + currentPage, + total, + sort, + order, } = this.props; const { checkedAll, checkedIds, - sort + query, } = this.state; const someLoading = loading || ignoreLoading || resolveToggleLoading || mergeLoading; const currentCheckedIds = this.currentCheckedIds(); - const displayedCount = Math.min(currentPage * PER_PAGE, list.size); - let _list = sort.sort ? list.sortBy(i => i[sort.sort]) : list; - _list = sort.order === 'desc' ? _list.reverse() : _list; return (
@@ -182,33 +196,35 @@ export default class List extends React.PureComponent { }
- Sort By + Sort By - + -
-
- - - - { _list.take(displayedCount).map(e => - <> + className="input-small ml-3" + placeholder="Filter by Name or Message" + icon="search" + iconPosition="left" + name="filter" + onChange={ this.onQueryChange } + value={query} + /> +
+
+ + + + { list.map(e => +
- +
)} - -
-
+
+ this.props.updateCurrentPage(page)} + limit={PER_PAGE} + debounceRequest={500} + /> +
+ + ); } diff --git a/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js b/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js index 906843394..c110d16ab 100644 --- a/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js +++ b/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js @@ -6,9 +6,19 @@ import { connect } from 'react-redux'; import { setActiveStages } from 'Duck/funnels'; import { Styles } from '../../Dashboard/Widgets/common'; import { numberWithCommas } from 'App/utils' +import { truncate } from 'App/utils' const MIN_BAR_HEIGHT = 20; +function CustomTick(props) { + const { x, y, payload } = props; + return ( + + {payload.value} + + ); +} + function FunnelGraph(props) { const { data, activeStages, funnelId, liveFilters } = props; const [activeIndex, setActiveIndex] = useState(activeStages) @@ -118,13 +128,29 @@ function FunnelGraph(props) { ) } - const CustomTooltip = ({ active, payload, msg = '' }) => { + const CustomTooltip = (props) => { + const { payload } = props; + if (payload.length === 0) return null; + const { value, headerText } = payload[0].payload; + + // const value = payload[0].payload.value; + if (!value) return null; return ( -
-

{msg}

+
+
{headerText}
+ {value.map(i => ( +
{truncate(i, 30)}
+ ))}
- ); + ) }; + // const CustomTooltip = ({ active, payload, msg = '' }) => { + // return ( + //
+ //

{msg}

+ //
+ // ); + // }; const TEMP = {} @@ -152,7 +178,9 @@ function FunnelGraph(props) { background={'transparent'} > - {activeStages.length < 2 && 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} + {/* {activeStages.length < 2 && 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} */} + + } xAxisId={0} /> {/* { const FunnelHeader = (props) => { const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props; - const [showSaveModal, setShowSaveModal] = useState(false) const writeOption = (e, { name, value }) => { - props.fetch(value).then(() => { - props.fetchInsights(value, {}) - props.fetchIssuesFiltered(value, {}) - props.fetchSessionsFiltered(value, {}) - props.redirect(value) - }) + props.redirect(value) + props.fetch(value).then(() => props.refresh(value)) } const deleteFunnel = async (e, funnel) => { @@ -45,11 +40,12 @@ const FunnelHeader = (props) => { } const onDateChange = (e) => { - props.editFilter(e, funnel.funnelId); + props.editFilter(e, funnelId); } const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS(); const selectedFunnel = funnels.filter(i => i.funnelId === parseInt(funnelId)).first() || {}; + const eventsCount = funnel.filter.filters.filter(i => i.isEvent).size; return (
@@ -76,7 +72,7 @@ const FunnelHeader = (props) => { selectOnBlur={false} icon={ } /> - + - @@ -114,4 +110,4 @@ const FunnelHeader = (props) => { export default connect(state => ({ funnel: state.getIn([ 'funnels', 'instance' ]), -}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered })(FunnelHeader) +}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader) diff --git a/frontend/app/components/Session_/Player/Player.js b/frontend/app/components/Session_/Player/Player.js index 0f0b51786..7391c8992 100644 --- a/frontend/app/components/Session_/Player/Player.js +++ b/frontend/app/components/Session_/Player/Player.js @@ -9,6 +9,7 @@ import Controls from './Controls'; import Overlay from './Overlay'; import stl from './player.css'; import EventsToggleButton from '../../Session/EventsToggleButton'; +import { updateLastPlayedSession } from 'Duck/sessions'; @connectPlayer(state => ({ live: state.live, @@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton'; return { fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), nextId: state.getIn([ 'sessions', 'nextId' ]), + sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]), closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])), } }, { hideTargetDefiner, fullscreenOff, + updateLastPlayedSession, }) export default class Player extends React.PureComponent { screenWrapper = React.createRef(); componentDidMount() { + this.props.updateLastPlayedSession(this.props.sessionId); if (this.props.closedLive) return; const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index cb503a745..a1617e47d 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -1,7 +1,7 @@ import React, { useEffect } from 'react'; import { fetchLiveList } from 'Duck/sessions'; import { connect } from 'react-redux'; -import { NoContent, Loader, LoadMoreButton } from 'UI'; +import { NoContent, Loader, LoadMoreButton, Pagination } from 'UI'; import { List, Map } from 'immutable'; import SessionItem from 'Shared/SessionItem'; import withPermissions from 'HOCs/withPermissions' @@ -12,11 +12,11 @@ import { addFilterByKeyAndValue, updateCurrentPage, updateSort } from 'Duck/live import DropdownPlain from 'Shared/DropdownPlain'; import SortOrderButton from 'Shared/SortOrderButton'; import { TimezoneDropdown } from 'UI'; -import { capitalize } from 'App/utils'; +import { capitalize, sliceListPerPage } from 'App/utils'; import LiveSessionReloadButton from 'Shared/LiveSessionReloadButton'; const AUTOREFRESH_INTERVAL = .5 * 60 * 1000 -const PER_PAGE = 20; +const PER_PAGE = 10; interface Props { loading: Boolean, @@ -42,9 +42,8 @@ function LiveSessionList(props: Props) { text: capitalize(i), value: i })).toJS(); - const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size); - - const addPage = () => props.updateCurrentPage(props.currentPage + 1) + // const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size); + // const addPage = () => props.updateCurrentPage(props.currentPage + 1) useEffect(() => { if (filters.size === 0) { @@ -135,6 +134,7 @@ function LiveSessionList(props: Props) { props.updateSort({ order: state })} sortOrder={sort.order} />
+ - {sessions && sessions.sortBy(i => i.metadata[sort.field]).update(list => { + {sessions && sliceListPerPage(sessions.sortBy(i => i.metadata[sort.field]).update(list => { return sort.order === 'desc' ? list.reverse() : list; - }).take(displayedCount).map(session => ( + }), currentPage - 1).map(session => ( ))} - + props.updateCurrentPage(page)} + limit={PER_PAGE} /> + diff --git a/frontend/app/components/shared/SessionItem/SessionItem.js b/frontend/app/components/shared/SessionItem/SessionItem.js index 0b7551760..64e4199ba 100644 --- a/frontend/app/components/shared/SessionItem/SessionItem.js +++ b/frontend/app/components/shared/SessionItem/SessionItem.js @@ -3,29 +3,25 @@ import cn from 'classnames'; import { Link, Icon, - OsIcon, - BrowserIcon, CountryFlag, Avatar, TextEllipsis, Label, } from 'UI'; -import { deviceTypeIcon } from 'App/iconNames'; import { toggleFavorite, setSessionPath } from 'Duck/sessions'; import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes'; import { durationFormatted, formatTimeOrDate } from 'App/date'; import stl from './sessionItem.css'; -import LiveTag from 'Shared/LiveTag'; -import Bookmark from 'Shared/Bookmark'; import Counter from './Counter' import { withRouter } from 'react-router-dom'; import SessionMetaList from './SessionMetaList'; import ErrorBars from './ErrorBars'; -import { assist as assistRoute, liveSession, isRoute } from "App/routes"; +import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes"; import { capitalize } from 'App/utils'; const ASSIST_ROUTE = assistRoute(); const ASSIST_LIVE_SESSION = liveSession() +const SESSIONS_ROUTE = sessionsRoute(); // const Label = ({ label = '', color = 'color-gray-medium'}) => ( //
{label}
@@ -69,10 +65,13 @@ export default class SessionItem extends React.PureComponent { disableUser = false, metaList = [], showActive = false, + lastPlayedSessionId, } = this.props; const formattedDuration = durationFormatted(duration); const hasUserId = userId || userAnonymousId; + const isSessions = isRoute(SESSIONS_ROUTE, this.props.location.pathname); const isAssist = isRoute(ASSIST_ROUTE, this.props.location.pathname) || isRoute(ASSIST_LIVE_SESSION, this.props.location.pathname); + const isLastPlayed = lastPlayedSessionId === sessionId; const _metaList = Object.keys(metadata).filter(i => metaList.includes(i)).map(key => { const value = metadata[key]; @@ -125,7 +124,7 @@ export default class SessionItem extends React.PureComponent { - { !isAssist && ( + { isSessions && (
@@ -139,6 +138,15 @@ export default class SessionItem extends React.PureComponent { )}
+ { isSessions && ( +
+ { isLastPlayed && ( + + )} +
+ )} diff --git a/frontend/app/components/ui/DropdownPlain/DropdownPlain.js b/frontend/app/components/ui/DropdownPlain/DropdownPlain.js index 389b75b93..8f11a14fb 100644 --- a/frontend/app/components/ui/DropdownPlain/DropdownPlain.js +++ b/frontend/app/components/ui/DropdownPlain/DropdownPlain.js @@ -21,7 +21,7 @@ function DropdownPlain({ name, label, options, onChange, defaultValue, wrapperSt options={ options } onChange={ onChange } defaultValue={ defaultValue || options[ 0 ].value } - icon={null} + // icon={null} disabled={disabled} icon={ } /> diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx new file mode 100644 index 000000000..0e552ea69 --- /dev/null +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -0,0 +1,77 @@ +import React from 'react' +import { Icon } from 'UI' +import cn from 'classnames' +import { debounce } from 'App/utils'; +import { Tooltip } from 'react-tippy'; +interface Props { + page: number + totalPages: number + onPageChange: (page: number) => void + limit?: number + debounceRequest?: number +} +export default function Pagination(props: Props) { + const { page, totalPages, onPageChange, limit = 5, debounceRequest = 0 } = props; + const [currentPage, setCurrentPage] = React.useState(page); + React.useMemo( + () => setCurrentPage(page), + [page], + ); + + const debounceChange = React.useCallback(debounce(onPageChange, debounceRequest), []); + + const changePage = (page: number) => { + if (page > 0 && page <= totalPages) { + setCurrentPage(page); + debounceChange(page); + } + } + + const isFirstPage = currentPage === 1; + const isLastPage = currentPage === totalPages; + return ( +
+ + + + Page + changePage(parseInt(e.target.value))} + /> + of + {totalPages} + + + +
+ ) +} diff --git a/frontend/app/components/ui/Pagination/index.ts b/frontend/app/components/ui/Pagination/index.ts new file mode 100644 index 000000000..29c341d81 --- /dev/null +++ b/frontend/app/components/ui/Pagination/index.ts @@ -0,0 +1 @@ +export { default } from './Pagination'; \ No newline at end of file diff --git a/frontend/app/components/ui/index.js b/frontend/app/components/ui/index.js index 1e0088720..1152437cf 100644 --- a/frontend/app/components/ui/index.js +++ b/frontend/app/components/ui/index.js @@ -55,5 +55,6 @@ export { default as HighlightCode } from './HighlightCode'; export { default as NoPermission } from './NoPermission'; export { default as NoSessionPermission } from './NoSessionPermission'; export { default as HelpText } from './HelpText'; +export { default as Pagination } from './Pagination'; export { Input, Modal, Form, Message, Card } from 'semantic-ui-react'; diff --git a/frontend/app/constants/filterOptions.js b/frontend/app/constants/filterOptions.js index 7f6a12e03..861c61faf 100644 --- a/frontend/app/constants/filterOptions.js +++ b/frontend/app/constants/filterOptions.js @@ -1,4 +1,4 @@ -import { FilterKey } from 'Types/filter/filterType'; +import { FilterKey, IssueType } from 'Types/filter/filterType'; export const options = [ { key: 'on', text: 'on', value: 'on' }, @@ -93,18 +93,18 @@ export const methodOptions = [ ] export const issueOptions = [ - { text: 'Click Rage', value: 'click_rage' }, - { text: 'Dead Click', value: 'dead_click' }, - { text: 'Excessive Scrolling', value: 'excessive_scrolling' }, - { text: 'Bad Request', value: 'bad_request' }, - { text: 'Missing Resource', value: 'missing_resource' }, - { text: 'Memory', value: 'memory' }, - { text: 'CPU', value: 'cpu' }, - { text: 'Slow Resource', value: 'slow_resource' }, - { text: 'Slow Page Load', value: 'slow_page_load' }, - { text: 'Crash', value: 'crash' }, - { text: 'Custom', value: 'custom' }, - { text: 'JS Exception', value: 'js_exception' }, + { text: 'Click Rage', value: IssueType.CLICK_RAGE }, + { text: 'Dead Click', value: IssueType.DEAD_CLICK }, + { text: 'Excessive Scrolling', value: IssueType.EXCESSIVE_SCROLLING }, + { text: 'Bad Request', value: IssueType.BAD_REQUEST }, + { text: 'Missing Resource', value: IssueType.MISSING_RESOURCE }, + { text: 'Memory', value: IssueType.MEMORY }, + { text: 'CPU', value: IssueType.CPU }, + { text: 'Slow Resource', value: IssueType.SLOW_RESOURCE }, + { text: 'Slow Page Load', value: IssueType.SLOW_PAGE_LOAD }, + { text: 'Crash', value: IssueType.CRASH }, + { text: 'Custom', value: IssueType.CUSTOM }, + { text: 'Error', value: IssueType.JS_EXCEPTION }, ] export default { diff --git a/frontend/app/duck/errors.js b/frontend/app/duck/errors.js index 9e7b552f2..2d54b3a0a 100644 --- a/frontend/app/duck/errors.js +++ b/frontend/app/duck/errors.js @@ -1,13 +1,18 @@ import { List, Map } from 'immutable'; import { clean as cleanParams } from 'App/api_client'; -import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED } from 'Types/errorInfo'; +import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED, BOOKMARK } from 'Types/errorInfo'; import { createFetch, fetchListType, fetchType } from './funcTools/crud'; import { createRequestReducer, ROOT_KEY } from './funcTools/request'; import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools'; +import { reduceThenFetchResource } from './search' const name = "error"; const idKey = "errorId"; +const PER_PAGE = 10; +const DEFAULT_SORT = 'occurrence'; +const DEFAULT_ORDER = 'desc'; +const EDIT_OPTIONS = `${name}/EDIT_OPTIONS`; const FETCH_LIST = fetchListType(name); const FETCH = fetchType(name); const FETCH_NEW_ERRORS_COUNT = fetchType('errors/FETCH_NEW_ERRORS_COUNT'); @@ -18,6 +23,7 @@ const MERGE = "errors/MERGE"; const TOGGLE_FAVORITE = "errors/TOGGLE_FAVORITE"; const FETCH_TRACE = "errors/FETCH_TRACE"; const UPDATE_CURRENT_PAGE = "errors/UPDATE_CURRENT_PAGE"; +const UPDATE_KEY = `${name}/UPDATE_KEY`; function chartWrapper(chart = []) { return chart.map(point => ({ ...point, count: Math.max(point.count, 0) })); @@ -35,13 +41,23 @@ const initialState = Map({ instanceTrace: List(), stats: Map(), sourcemapUploaded: true, - currentPage: 1, + currentPage: 1, + options: Map({ + sort: DEFAULT_SORT, + order: DEFAULT_ORDER, + status: UNRESOLVED, + query: '', + }), + // sort: DEFAULT_SORT, + // order: DEFAULT_ORDER, }); function reducer(state = initialState, action = {}) { let updError; switch (action.type) { + case EDIT_OPTIONS: + return state.mergeIn(["options"], action.instance); case success(FETCH): return state.set("instance", ErrorInfo(action.data)); case success(FETCH_TRACE): @@ -69,8 +85,10 @@ function reducer(state = initialState, action = {}) { return state.update("list", list => list.filter(e => !ids.includes(e.errorId))); case success(FETCH_NEW_ERRORS_COUNT): return state.set('stats', action.data); - case UPDATE_CURRENT_PAGE: - return state.set('currentPage', action.page); + case UPDATE_KEY: + return state.set(action.key, action.value); + case UPDATE_CURRENT_PAGE: + return state.set('currentPage', action.page); } return state; } @@ -106,14 +124,32 @@ export function fetchTrace(id) { } } -export function fetchList(params = {}, clear = false) { - return { - types: array(FETCH_LIST), - call: client => client.post('/errors/search', params), - clear, - params: cleanParams(params), - }; -} +export const fetchList = (params = {}, clear = false) => (dispatch, getState) => { + params.page = getState().getIn(['errors', 'currentPage']); + params.limit = PER_PAGE; + + const options = getState().getIn(['errors', 'options']).toJS(); + if (options.status === BOOKMARK) { + options.bookmarked = true; + options.status = 'all'; + } + + return dispatch({ + types: array(FETCH_LIST), + call: client => client.post('/errors/search', { ...params, ...options }), + clear, + params: cleanParams(params), + }); +}; + +// export function fetchList(params = {}, clear = false) { +// return { +// types: array(FETCH_LIST), +// call: client => client.post('/errors/search', params), +// clear, +// params: cleanParams(params), +// }; +// } export function fetchBookmarks() { return { @@ -169,9 +205,12 @@ export function fetchNewErrorsCount(params = {}) { } } -export function updateCurrentPage(page) { - return { - type: 'errors/UPDATE_CURRENT_PAGE', +export const updateCurrentPage = reduceThenFetchResource((page) => ({ + type: UPDATE_CURRENT_PAGE, page, - }; -} +})); + +export const editOptions = reduceThenFetchResource((instance) => ({ + type: EDIT_OPTIONS, + instance +})); \ No newline at end of file diff --git a/frontend/app/duck/search.js b/frontend/app/duck/search.js index 3d15ae950..9106227bb 100644 --- a/frontend/app/duck/search.js +++ b/frontend/app/duck/search.js @@ -7,7 +7,7 @@ import SavedFilter from 'Types/filter/savedFilter'; import { errors as errorsRoute, isRoute } from "App/routes"; import { fetchList as fetchSessionList } from './sessions'; import { fetchList as fetchErrorsList } from './errors'; -import { FilterCategory, FilterKey } from 'Types/filter/filterType'; +import { FilterCategory, FilterKey, IssueType } from 'Types/filter/filterType'; import { filtersMap, liveFiltersMap, generateFilterOptions, generateLiveFilterOptions } from 'Types/filter/newFilter'; const ERRORS_ROUTE = errorsRoute(); @@ -28,6 +28,8 @@ const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`; const UPDATE = `${name}/UPDATE`; const APPLY = `${name}/APPLY`; const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`; +const UPDATE_CURRENT_PAGE = `${name}/UPDATE_CURRENT_PAGE`; +const SET_ACTIVE_TAB = `${name}/SET_ACTIVE_TAB`; const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS'; @@ -49,6 +51,8 @@ const initialState = Map({ instance: new Filter({ filters: [] }), savedSearch: new SavedFilter({}), filterSearchList: {}, + currentPage: 1, + activeTab: {name: 'All', type: 'all' }, }); // Metric - Series - [] - filters @@ -62,7 +66,7 @@ function reducer(state = initialState, action = {}) { case APPLY: return action.fromUrl ? state.set('instance', Filter(action.filter)) - : state.mergeIn(['instance'], action.filter); + : state.mergeIn(['instance'], action.filter).set('currentPage', 1); case success(FETCH): return state.set("instance", action.data); case success(FETCH_LIST): @@ -83,6 +87,10 @@ function reducer(state = initialState, action = {}) { return state.set('savedSearch', action.filter); case EDIT_SAVED_SEARCH: return state.mergeIn([ 'savedSearch' ], action.instance); + case UPDATE_CURRENT_PAGE: + return state.set('currentPage', action.page); + case SET_ACTIVE_TAB: + return state.set('activeTab', action.tab).set('currentPage', 1); } return state; } @@ -118,10 +126,24 @@ export const filterMap = ({category, value, key, operator, sourceOperator, sourc filters: filters ? filters.map(filterMap) : [], }); -const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => { +export const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => { dispatch(actionCreator(...args)); const filter = getState().getIn([ 'search', 'instance']).toData(); + + const activeTab = getState().getIn([ 'search', 'activeTab']); + if (activeTab.type !== 'all' && activeTab.type !== 'bookmark') { + const tmpFilter = filtersMap[FilterKey.ISSUE]; + tmpFilter.value = [activeTab.type] + filter.filters = filter.filters.concat(tmpFilter) + } + + if (activeTab.type === 'bookmark') { + filter.bookmarked = true + } + filter.filters = filter.filters.map(filterMap); + filter.limit = 10; + filter.page = getState().getIn([ 'search', 'currentPage']); return isRoute(ERRORS_ROUTE, window.location.pathname) ? dispatch(fetchErrorsList(filter)) @@ -133,6 +155,11 @@ export const edit = reduceThenFetchResource((instance) => ({ instance, })); +export const setActiveTab = reduceThenFetchResource((tab) => ({ + type: SET_ACTIVE_TAB, + tab +})); + export const remove = (id) => (dispatch, getState) => { return dispatch({ types: REMOVE.array, @@ -152,6 +179,11 @@ export const applyFilter = reduceThenFetchResource((filter, fromUrl=false) => ({ fromUrl, })); +export const updateCurrentPage = reduceThenFetchResource((page) => ({ + type: UPDATE_CURRENT_PAGE, + page, +})); + export const applySavedSearch = (filter) => (dispatch, getState) => { dispatch(edit({ filters: filter ? filter.filter.filters : [] })); return dispatch({ diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index f3df333c7..34109ada3 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -7,9 +7,9 @@ import withRequestState, { RequestTypes } from './requestStateCreator'; import { getRE } from 'App/utils'; import { LAST_7_DAYS } from 'Types/app/period'; import { getDateRangeFromValue } from 'App/dateRange'; +const name = 'sessions'; const INIT = 'sessions/INIT'; - const FETCH_LIST = new RequestTypes('sessions/FETCH_LIST'); const FETCH = new RequestTypes('sessions/FETCH'); const FETCH_FAVORITE_LIST = new RequestTypes('sessions/FETCH_FAVORITE_LIST'); @@ -26,6 +26,7 @@ const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW'; const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG'; const SET_TIMELINE_POINTER = 'sessions/SET_TIMELINE_POINTER'; const SET_SESSION_PATH = 'sessions/SET_SESSION_PATH'; +const LAST_PLAYED_SESSION_ID = `${name}/LAST_PLAYED_SESSION_ID`; const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB'; @@ -60,6 +61,7 @@ const initialState = Map({ funnelPage: Map(), timelinePointer: null, sessionPath: '', + lastPlayedSessionId: null, }); const reducer = (state = initialState, action = {}) => { @@ -248,11 +250,21 @@ const reducer = (state = initialState, action = {}) => { return state.set('timelinePointer', action.pointer); case SET_SESSION_PATH: return state.set('sessionPath', action.path); + case LAST_PLAYED_SESSION_ID: + return updateListItem(state, action.sessionId, { viewed: true }).set('lastPlayedSessionId', action.sessionId); default: return state; } }; +function updateListItem(state, sourceSessionId, instance) { + const list = state.get('list'); + const index = list.findIndex(({ sessionId }) => sessionId === sourceSessionId); + if (index === -1) return state; + + return state.updateIn([ 'list', index ], session => session.merge(instance)); +} + export default withRequestState({ _: [ FETCH, FETCH_LIST ], fetchLiveListRequest: FETCH_LIVE_LIST, @@ -390,4 +402,11 @@ export function setSessionPath(path) { type: SET_SESSION_PATH, path } +} + +export function updateLastPlayedSession(sessionId) { + return { + type: LAST_PLAYED_SESSION_ID, + sessionId, + }; } \ No newline at end of file diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index 7c40a4668..685a34f2c 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -113,8 +113,15 @@ export default class DOMManager extends ListWalker { logger.error("Node has no childNodes", this.nl[ parentID ]); return; } + + if (this.nl[ id ] instanceof HTMLHtmlElement) { + // What if some exotic cases? + this.nl[ parentID ].replaceChild(this.nl[ id ], childNodes[childNodes.length-1]) + return + } + this.nl[ parentID ] - .insertBefore(this.nl[ id ], childNodes[ index ]); + .insertBefore(this.nl[ id ], childNodes[ index ]) } private applyMessage = (msg: Message): void => { @@ -257,14 +264,14 @@ export default class DOMManager extends ListWalker { case "create_i_frame_document": node = this.nl[ msg.frameID ]; // console.log('ifr', msg, node) - + if (node instanceof HTMLIFrameElement) { doc = node.contentDocument; if (!doc) { logger.warn("No iframe doc", msg, node, node.contentDocument); return; } - this.nl[ msg.id ] = doc.documentElement + this.nl[ msg.id ] = doc return; } else if (node instanceof Element) { // shadow DOM try { diff --git a/frontend/app/styles/colors-autogen.css b/frontend/app/styles/colors-autogen.css index c7b3a6ce1..1d53dea64 100644 --- a/frontend/app/styles/colors-autogen.css +++ b/frontend/app/styles/colors-autogen.css @@ -62,7 +62,7 @@ .color-white { color: $white } .color-borderColor { color: $borderColor } -/* color */ +/* hover color */ .hover-main:hover { color: $main } .hover-gray-light-shade:hover { color: $gray-light-shade } .hover-gray-lightest:hover { color: $gray-lightest } @@ -92,3 +92,33 @@ .hover-pink:hover { color: $pink } .hover-white:hover { color: $white } .hover-borderColor:hover { color: $borderColor } + +.border-main { border-color: $main } +.border-gray-light-shade { border-color: $gray-light-shade } +.border-gray-lightest { border-color: $gray-lightest } +.border-gray-light { border-color: $gray-light } +.border-gray-medium { border-color: $gray-medium } +.border-gray-dark { border-color: $gray-dark } +.border-gray-darkest { border-color: $gray-darkest } +.border-teal { border-color: $teal } +.border-teal-dark { border-color: $teal-dark } +.border-teal-light { border-color: $teal-light } +.border-tealx { border-color: $tealx } +.border-tealx-light { border-color: $tealx-light } +.border-tealx-light-border { border-color: $tealx-light-border } +.border-orange { border-color: $orange } +.border-yellow { border-color: $yellow } +.border-yellow2 { border-color: $yellow2 } +.border-orange-dark { border-color: $orange-dark } +.border-green { border-color: $green } +.border-green2 { border-color: $green2 } +.border-green-dark { border-color: $green-dark } +.border-red { border-color: $red } +.border-red2 { border-color: $red2 } +.border-blue { border-color: $blue } +.border-blue2 { border-color: $blue2 } +.border-active-blue { border-color: $active-blue } +.border-active-blue-border { border-color: $active-blue-border } +.border-pink { border-color: $pink } +.border-white { border-color: $white } +.border-borderColor { border-color: $borderColor } diff --git a/frontend/app/svg/icons/chevron-double-left.svg b/frontend/app/svg/icons/chevron-double-left.svg index 7181fd111..8f30320c6 100644 --- a/frontend/app/svg/icons/chevron-double-left.svg +++ b/frontend/app/svg/icons/chevron-double-left.svg @@ -1,4 +1,4 @@ - + \ No newline at end of file diff --git a/frontend/app/svg/icons/chevron-left.svg b/frontend/app/svg/icons/chevron-left.svg new file mode 100644 index 000000000..919d877d2 --- /dev/null +++ b/frontend/app/svg/icons/chevron-left.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/chevron-right.svg b/frontend/app/svg/icons/chevron-right.svg new file mode 100644 index 000000000..67cb89d1a --- /dev/null +++ b/frontend/app/svg/icons/chevron-right.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/types/errorInfo.js b/frontend/app/types/errorInfo.js index efcb5154e..364fa8e65 100644 --- a/frontend/app/types/errorInfo.js +++ b/frontend/app/types/errorInfo.js @@ -5,6 +5,7 @@ import Session from './session'; export const RESOLVED = "resolved"; export const UNRESOLVED = "unresolved"; export const IGNORED = "ignored"; +export const BOOKMARK = "bookmark"; function getStck0InfoString(stack) { diff --git a/frontend/app/types/filter/filterType.ts b/frontend/app/types/filter/filterType.ts index 2958f10e2..83511e20a 100644 --- a/frontend/app/types/filter/filterType.ts +++ b/frontend/app/types/filter/filterType.ts @@ -8,6 +8,21 @@ export enum FilterCategory { PERFORMANCE = "Performance", }; +export enum IssueType { + CLICK_RAGE = "click_rage", + DEAD_CLICK = "dead_click", + EXCESSIVE_SCROLLING = "excessive_scrolling", + BAD_REQUEST = "bad_request", + MISSING_RESOURCE = "missing_resource", + MEMORY = "memory", + CPU = "cpu", + SLOW_RESOURCE = "slow_resource", + SLOW_PAGE_LOAD = "slow_page_load", + CRASH = "crash", + CUSTOM = "custom", + JS_EXCEPTION = "js_exception", +} + export enum FilterType { STRING = "STRING", ISSUE = "ISSUE", diff --git a/frontend/app/types/funnel.js b/frontend/app/types/funnel.js index d9a27e44d..d06e518b5 100644 --- a/frontend/app/types/funnel.js +++ b/frontend/app/types/funnel.js @@ -14,7 +14,10 @@ const getRedableName = ({ type, value, operator }) => { break; case "INPUT": str = 'Entered'; - break; + break; + case "CUSTOM": + str = 'Custom Event'; + break; } return `${str} ${operator}`; @@ -52,7 +55,7 @@ export default Record({ }, fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => { let _stages = stages.map((stage, index) => { - // stage.label = getRedableName(stage.type, stage.value); + stage.headerText = getRedableName(stage.type, stage.value); stage.label = `Step ${index + 1}`; return stage; }); @@ -73,7 +76,7 @@ export default Record({ ...rest, stages: _stages.length > 0 ? _stages.map((stage, index) => { if (!stage) return; - // stage.label = getRedableName(stage); + stage.headerText = getRedableName(stage); stage.label = `Step ${index + 1}`; return stage; }) : [], diff --git a/frontend/app/utils.js b/frontend/app/utils.js index ca7c19b4f..5ea05633c 100644 --- a/frontend/app/utils.js +++ b/frontend/app/utils.js @@ -232,4 +232,10 @@ export const isGreaterOrEqualVersion = (version, compareTo) => { const [major, minor, patch] = version.split("-")[0].split('.'); const [majorC, minorC, patchC] = compareTo.split("-")[0].split('.'); return (major > majorC) || (major === majorC && minor > minorC) || (major === majorC && minor === minorC && patch >= patchC); +} + +export const sliceListPerPage = (list, page, perPage = 10) => { + const start = page * perPage; + const end = start + perPage; + return list.slice(start, end); } \ No newline at end of file diff --git a/frontend/scripts/colors.js b/frontend/scripts/colors.js index ac8eb69be..928fd2275 100644 --- a/frontend/scripts/colors.js +++ b/frontend/scripts/colors.js @@ -12,7 +12,9 @@ ${ colors.map(color => `.fill-${ color } { fill: $${ color } }`).join('\n') } /* color */ ${ colors.map(color => `.color-${ color } { color: $${ color } }`).join('\n') } -/* color */ +/* hover color */ ${ colors.map(color => `.hover-${ color }:hover { color: $${ color } }`).join('\n') } + +${ colors.map(color => `.border-${ color } { border-color: $${ color } }`).join('\n') } `) diff --git a/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql new file mode 100644 index 000000000..e03c8dfc7 --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -0,0 +1,25 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.5.4' +$$ LANGUAGE sql IMMUTABLE; + + +COMMIT; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index f1d141861..5b8193bd5 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -900,6 +900,23 @@ $$ CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; + CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; + CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; + CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; + CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; + CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; + CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; + CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; + CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; + CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; + CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; + CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; + CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; + CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; + CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; + + CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); CREATE TYPE job_action AS ENUM ('delete_user_data'); CREATE TABLE jobs diff --git a/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml b/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml index 6c978f7f6..f20d4fc38 100644 --- a/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml +++ b/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml @@ -19,7 +19,7 @@ spec: - port: {{ .port }} targetPort: {{ .targetPort }} protocol: TCP - name: {{ .targetPort }} + name: {{ .name }} {{- end }} selector: {{- include "nginx-ingress.selectorLabels" . | nindent 4 }} diff --git a/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml b/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml index 6984c1938..1f7169c0d 100644 --- a/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml +++ b/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml @@ -42,8 +42,10 @@ service: ports: - port: 80 targetPort: http + name: http - port: 443 targetPort: https + name: https ingress: enabled: false diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index 25d6a67d6..f5cc11067 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -100,7 +100,7 @@ utilities: env: debug: 0 uws: false - cluster: false + redis: false # If you want to override something # chartname: diff --git a/tracker/tracker-profiler/.gitignore b/tracker/tracker-profiler/.gitignore index 1736ff4de..1f2395a10 100644 --- a/tracker/tracker-profiler/.gitignore +++ b/tracker/tracker-profiler/.gitignore @@ -1,4 +1,5 @@ node_modules npm-debug.log lib +cjs .cache diff --git a/tracker/tracker-profiler/cjs/index.js b/tracker/tracker-profiler/cjs/index.js deleted file mode 100644 index 6a83f0e35..000000000 --- a/tracker/tracker-profiler/cjs/index.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const tracker_1 = require("@openreplay/tracker/cjs"); -function default_1() { - return (app) => { - if (app === null) { - return (name) => (fn, thisArg) => thisArg === undefined ? fn : fn.bind(thisArg); - } - return (name) => (fn, thisArg) => (...args) => { - const startTime = performance.now(); - const result = thisArg === undefined ? fn.apply(this, args) : fn.apply(thisArg, args); - const duration = performance.now() - startTime; - app.send(tracker_1.Messages.Profiler(name, duration, args.map(String).join(', '), String(result))); - return result; - }; - }; -} -exports.default = default_1; diff --git a/tracker/tracker-profiler/cjs/package.json b/tracker/tracker-profiler/cjs/package.json deleted file mode 100644 index a3c15a7a6..000000000 --- a/tracker/tracker-profiler/cjs/package.json +++ /dev/null @@ -1 +0,0 @@ -{ "type": "commonjs" } diff --git a/tracker/tracker-redux/package-lock.json b/tracker/tracker-redux/package-lock.json index 3cb97282f..00aecf7ec 100644 --- a/tracker/tracker-redux/package-lock.json +++ b/tracker/tracker-redux/package-lock.json @@ -1,8 +1,1121 @@ { "name": "@openreplay/tracker-redux", - "version": "3.0.0", - "lockfileVersion": 1, + "version": "3.5.0", + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "@openreplay/tracker-redux", + "version": "3.5.0", + "license": "MIT", + "devDependencies": { + "@openreplay/tracker": "^3.5.0", + "prettier": "^1.18.2", + "replace-in-files-cli": "^1.0.0", + "typescript": "^4.6.0-dev.20211126" + }, + "peerDependencies": { + "@openreplay/tracker": "^3.5.0", + "redux": "^4.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.12.13" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", + "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "dev": true + }, + "node_modules/@babel/highlight": { + "version": "7.13.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz", + "integrity": "sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.12.11", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.17.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", + "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "peer": true, + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz", + "integrity": "sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.4", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz", + "integrity": "sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz", + "integrity": "sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.4", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@openreplay/tracker": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.5.2.tgz", + "integrity": "sha512-b0/BCFRQW4afh/k1cYhudbszmdkTQu7GBob8MYzd0vuWLMx6muXv2oSXHsyc3cro9fWrymQPeRZV3zrpNb5ioA==", + "dev": true, + "dependencies": { + "error-stack-parser": "^2.0.6" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.1.tgz", + "integrity": "sha512-fZQQafSREFyuZcdWFAExYjBiCL7AUCdgsk80iO0q4yihYYdcIiH28CcuPTGFgLOCC8RlW49GSQxdHwZP+I7CNg==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz", + "integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==", + "dev": true + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", + "integrity": "sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=", + "dev": true, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/error-stack-parser": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.6.tgz", + "integrity": "sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==", + "dev": true, + "dependencies": { + "stackframe": "^1.1.1" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fast-glob": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.5.tgz", + "integrity": "sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.0", + "merge2": "^1.3.0", + "micromatch": "^4.0.2", + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fastq": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.11.0.tgz", + "integrity": "sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.3.tgz", + "integrity": "sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.1.1", + "ignore": "^5.1.4", + "merge2": "^1.3.0", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "dev": true + }, + "node_modules/ignore": { + "version": "5.1.8", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", + "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/map-obj": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.2.0.tgz", + "integrity": "sha512-NAq0fCmZYGz9UFEQyndp7sisrow4GroyGeKluyKC/chuITZsPyOyC1UJZPJlVFImhXdROIP5xqouRLThT3BbpQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/meow/-/meow-7.1.1.tgz", + "integrity": "sha512-GWHvA5QOcS412WCo8vwKDlTelGLsCGBVevQB5Kva961rmNfun0PCbv5+xta2kUMFJyR8/oWnn7ddeKdosbAPbA==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^2.5.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.13.1", + "yargs-parser": "^18.1.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", + "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.0.5" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/minimist-options/node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redux": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", + "integrity": "sha512-SH8PglcebESbd/shgf6mii6EIoRM0zrQyjcuQ+ojmfxjTtE0z9Y8pa62iA/OJ58qjP6j27uyW4kUF4jl/jd6sw==", + "peer": true, + "dependencies": { + "@babel/runtime": "^7.9.2" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "peer": true + }, + "node_modules/replace-in-files-cli": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/replace-in-files-cli/-/replace-in-files-cli-1.0.0.tgz", + "integrity": "sha512-/HMPLZeCA24CBUQ59ymHji6LyMKM+gEgDZlYsiPvXW6+3PdfOw6SsMCVd9KC2B+KlAEe/8vkJA6gfnexVdF15A==", + "dev": true, + "dependencies": { + "arrify": "^2.0.1", + "escape-string-regexp": "^4.0.0", + "globby": "^11.0.1", + "meow": "^7.1.1", + "normalize-path": "^3.0.0", + "write-file-atomic": "^3.0.0" + }, + "bin": { + "replace-in-files": "cli.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "dependencies": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/signal-exit": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", + "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==", + "dev": true + }, + "node_modules/stackframe": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.2.0.tgz", + "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==", + "dev": true + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.0.tgz", + "integrity": "sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/type-fest": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", + "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typescript": { + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + } + }, "dependencies": { "@babel/code-frame": { "version": "7.12.13", @@ -30,6 +1143,15 @@ "js-tokens": "^4.0.0" } }, + "@babel/runtime": { + "version": "7.17.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", + "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "peer": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, "@nodelib/fs.scandir": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz", @@ -57,9 +1179,9 @@ } }, "@openreplay/tracker": { - "version": "3.4.8", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", - "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.5.2.tgz", + "integrity": "sha512-b0/BCFRQW4afh/k1cYhudbszmdkTQu7GBob8MYzd0vuWLMx6muXv2oSXHsyc3cro9fWrymQPeRZV3zrpNb5ioA==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -623,6 +1745,21 @@ "strip-indent": "^3.0.0" } }, + "redux": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", + "integrity": "sha512-SH8PglcebESbd/shgf6mii6EIoRM0zrQyjcuQ+ojmfxjTtE0z9Y8pa62iA/OJ58qjP6j27uyW4kUF4jl/jd6sw==", + "peer": true, + "requires": { + "@babel/runtime": "^7.9.2" + } + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "peer": true + }, "replace-in-files-cli": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/replace-in-files-cli/-/replace-in-files-cli-1.0.0.tgz", diff --git a/tracker/tracker-redux/package.json b/tracker/tracker-redux/package.json index 87a365754..862d5b829 100644 --- a/tracker/tracker-redux/package.json +++ b/tracker/tracker-redux/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-redux", "description": "Tracker plugin for Redux state recording", - "version": "3.4.8", + "version": "3.5.0", "keywords": [ "redux", "logging", @@ -23,11 +23,11 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.4.8", + "@openreplay/tracker": "^3.5.0", "redux": "^4.0.0" }, "devDependencies": { - "@openreplay/tracker": "^3.4.8", + "@openreplay/tracker": "^3.5.0", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", "typescript": "^4.6.0-dev.20211126" diff --git a/tracker/tracker-redux/src/index.ts b/tracker/tracker-redux/src/index.ts index 5a4749e71..dfc092d9c 100644 --- a/tracker/tracker-redux/src/index.ts +++ b/tracker/tracker-redux/src/index.ts @@ -23,8 +23,11 @@ export default function(opts: Partial = {}) { return () => next => action => next(action); } const encoder = new Encoder(sha1, 50); + app.attachStopCallback(() => { + encoder.clear() + }) return ({ getState }) => next => action => { - if (!options.actionFilter(action)) { + if (!app.active() || !options.actionFilter(action)) { return next(action); } const startTime = performance.now(); diff --git a/tracker/tracker-redux/tsconfig.json b/tracker/tracker-redux/tsconfig.json index 0c5b8d1b3..ce07a685b 100644 --- a/tracker/tracker-redux/tsconfig.json +++ b/tracker/tracker-redux/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "nodenext", + "moduleResolution": "node", "declaration": true, "outDir": "./lib" } diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index e485faf11..6df50033b 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.5.3", + "version": "3.5.4", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/context.ts b/tracker/tracker/src/main/app/context.ts index aa9a5dfb3..781f91ea8 100644 --- a/tracker/tracker/src/main/app/context.ts +++ b/tracker/tracker/src/main/app/context.ts @@ -41,32 +41,57 @@ export function isInstance(node: Node, constr: Cons // @ts-ignore (for EI, Safary) doc.parentWindow || doc.defaultView; // TODO: smart global typing for Window object - while(context.parent && context.parent !== context) { + while((context.parent || context.top) && context.parent !== context) { // @ts-ignore if (node instanceof context[constr.name]) { return true } // @ts-ignore - context = context.parent + context = context.parent || context.top } // @ts-ignore return node instanceof context[constr.name] } -export function inDocument(node: Node): boolean { +// TODO: ensure 1. it works in every cases (iframes/detached nodes) and 2. the most efficient +export function inDocument(node: Node) { const doc = node.ownerDocument - if (!doc) { return false } - if (doc.contains(node)) { return true } - let context: Window = - // @ts-ignore (for EI, Safary) - doc.parentWindow || - doc.defaultView; - while(context.parent && context.parent !== context) { - if (context.document.contains(node)) { + if (!doc) { return true } // Document + let current: Node | null = node + while(current) { + if (current === doc) { return true + } else if(isInstance(current, ShadowRoot)) { + current = current.host + } else { + current = current.parentNode } - // @ts-ignore - context = context.parent } - return false; + return false } + +// export function inDocument(node: Node): boolean { +// // @ts-ignore compatability +// if (node.getRootNode) { +// let root: Node +// while ((root = node.getRootNode()) !== node) { +// //// +// } +// } + +// const doc = node.ownerDocument +// if (!doc) { return false } +// if (doc.contains(node)) { return true } +// let context: Window = +// // @ts-ignore (for EI, Safary) +// doc.parentWindow || +// doc.defaultView; +// while(context.parent && context.parent !== context) { +// if (context.document.contains(node)) { +// return true +// } +// // @ts-ignore +// context = context.parent +// } +// return false; +// } diff --git a/tracker/tracker/src/main/app/observer/observer.ts b/tracker/tracker/src/main/app/observer/observer.ts index 0f4ff2994..06823e07c 100644 --- a/tracker/tracker/src/main/app/observer/observer.ts +++ b/tracker/tracker/src/main/app/observer/observer.ts @@ -1,4 +1,3 @@ -import { hasOpenreplayAttribute } from "../../utils.js"; import { RemoveNodeAttribute, SetNodeAttribute, @@ -59,9 +58,7 @@ export default abstract class Observer { private readonly indexes: Array = []; private readonly attributesList: Array | undefined> = []; private readonly textSet: Set = new Set(); - private readonly inUpperContext: boolean; - constructor(protected readonly app: App, protected readonly context: Window = window) { - this.inUpperContext = context.parent === context //TODO: get rid of context here + constructor(protected readonly app: App, protected readonly isTopContext = false) { this.observer = new MutationObserver( this.app.safe((mutations) => { for (const mutation of mutations) { @@ -226,7 +223,7 @@ export default abstract class Observer { // Disable parent check for the upper context HTMLHtmlElement, because it is root there... (before) // TODO: get rid of "special" cases (there is an issue with CreateDocument altered behaviour though) // TODO: Clean the logic (though now it workd fine) - if (!isInstance(node, HTMLHtmlElement) || !this.inUpperContext) { + if (!isInstance(node, HTMLHtmlElement) || !this.isTopContext) { if (parent === null) { this.unbindNode(node); return false; @@ -321,6 +318,8 @@ export default abstract class Observer { for (let id = 0; id < this.recents.length; id++) { // TODO: make things/logic nice here. // commit required in any case if recents[id] true or false (in case of unbinding) or undefined (in case of attr change). + // Possible solution: separate new node commit (recents) and new attribute/move node commit + // Otherwise commitNode is called on each node, which might be a lot if (!this.myNodes[id]) { continue } this.commitNode(id); if (this.recents[id] === true && (node = this.app.nodes.getNode(id))) { diff --git a/tracker/tracker/src/main/app/observer/top_observer.ts b/tracker/tracker/src/main/app/observer/top_observer.ts index b35f5d901..14bed9768 100644 --- a/tracker/tracker/src/main/app/observer/top_observer.ts +++ b/tracker/tracker/src/main/app/observer/top_observer.ts @@ -6,7 +6,7 @@ import ShadowRootObserver from "./shadow_root_observer.js"; import { CreateDocument } from "../../../messages/index.js"; import App from "../index.js"; -import { IN_BROWSER } from '../../utils.js' +import { IN_BROWSER, hasOpenreplayAttribute } from '../../utils.js' export interface Options { captureIFrames: boolean @@ -17,15 +17,16 @@ const attachShadowNativeFn = IN_BROWSER ? Element.prototype.attachShadow : ()=>n export default class TopObserver extends Observer { private readonly options: Options; constructor(app: App, options: Partial) { - super(app); + super(app, true); this.options = Object.assign({ - captureIFrames: false + captureIFrames: true }, options); // IFrames this.app.nodes.attachNodeCallback(node => { if (isInstance(node, HTMLIFrameElement) && - (this.options.captureIFrames || node.getAttribute("data-openreplay-capture")) + ((this.options.captureIFrames && !hasOpenreplayAttribute(node, "obscured")) + || hasOpenreplayAttribute(node, "capture")) ) { this.handleIframe(node) } @@ -42,26 +43,25 @@ export default class TopObserver extends Observer { private iframeObservers: IFrameObserver[] = []; private handleIframe(iframe: HTMLIFrameElement): void { - let context: Window | null = null + let doc: Document | null = null const handle = this.app.safe(() => { const id = this.app.nodes.getID(iframe) if (id === undefined) { return } //log - if (iframe.contentWindow === context) { return } //Does this happen frequently? - context = iframe.contentWindow as Window | null; - if (!context) { return } - const observer = new IFrameObserver(this.app, context) + if (iframe.contentDocument === doc) { return } // How frequently can it happen? + doc = iframe.contentDocument + if (!doc || !iframe.contentWindow) { return } + const observer = new IFrameObserver(this.app) this.iframeObservers.push(observer) observer.observe(iframe) }) - this.app.attachEventListener(iframe, "load", handle) + iframe.addEventListener("load", handle) // why app.attachEventListener not working? handle() } private shadowRootObservers: ShadowRootObserver[] = [] private handleShadowRoot(shRoot: ShadowRoot) { - const observer = new ShadowRootObserver(this.app, this.context) - + const observer = new ShadowRootObserver(this.app) this.shadowRootObservers.push(observer) observer.observe(shRoot.host) } @@ -81,9 +81,9 @@ export default class TopObserver extends Observer { // the change in the re-player behaviour caused by CreateDocument message: // the 0-node ("fRoot") will become #document rather than documentElement as it is now. // Alternatively - observe(#document) then bindNode(documentElement) - this.observeRoot(this.context.document, () => { + this.observeRoot(window.document, () => { this.app.send(new CreateDocument()) - }, this.context.document.documentElement); + }, window.document.documentElement); } disconnect() { diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index b772228be..772bd7315 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -2,8 +2,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); const geoip2Reader = require('@maxmind/geoip2-node').Reader; -var {extractPeerId} = require('./peerjs-server'); -var wsRouter = express.Router(); +const {extractPeerId} = require('./peerjs-server'); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -12,83 +12,152 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; let io; -let debug = process.env.debug === "1" || false; +const debug = process.env.debug === "1" || false; -const socketsList = function (req, res) { +const createSocketIOServer = function (server, prefix) { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: (prefix ? prefix : '') + '/socket' + }); +} + +const extractUserIdFromRequest = function (req) { + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; + } + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + return io.sockets.adapter.rooms.keys(); +} + +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions})); + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); -const socketsListByProject = function (req, res) { - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); +const socketsListByProject = async function (req, res) { + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []})); + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions})); + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []})); + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -167,16 +236,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; @@ -247,10 +308,10 @@ module.exports = { socket.onAny(async (eventName, ...args) => { socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); } else { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`); let socketId = await findSessionSocketId(io, socket.peerId); if (socketId === null) { debug && console.log(`session not found for:${socket.peerId}`); @@ -264,7 +325,7 @@ module.exports = { }); console.log("WS server started") - setInterval((io) => { + setInterval(async (io) => { try { let count = 0; console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);