diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 37d87bfb6..49fd0948b 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -47,7 +47,15 @@ jobs: # # Getting the images to build # - git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt + + { + git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 + + git diff --name-only HEAD HEAD~1 | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do + grep -rl "pkg/$pkg_name" backend/services | cut -d '/' -f3 + done + } | uniq > backend/images_to_build.txt + [[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry diff --git a/api/.chalice/config.json b/api/.chalice/config.json index 79199456c..d1fe6c36c 100644 --- a/api/.chalice/config.json +++ b/api/.chalice/config.json @@ -54,7 +54,8 @@ "S3_SECRET": "", "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", - "version_number": "1.3.5" + "iosBucket": "openreplay-ios-images", + "version_number": "1.3.6" }, "lambda_timeout": 150, "lambda_memory_size": 400, diff --git a/api/app.py b/api/app.py index 92224f99e..e67810de5 100644 --- a/api/app.py +++ b/api/app.py @@ -7,7 +7,7 @@ from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons from chalicelib.blueprints.app import v1_api from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints.subs import bp_dashboard,bp_insights +from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ @@ -106,5 +106,4 @@ app.register_blueprint(bp_core_crons.app) app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) -app.register_blueprint(bp_insights.app) app.register_blueprint(v1_api.app) diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py index 18773f68c..303bca306 100644 --- a/api/chalicelib/blueprints/bp_core.py +++ b/api/chalicelib/blueprints/bp_core.py @@ -1,5 +1,3 @@ -from chalicelib.utils.helper import environ - from chalice import Blueprint from chalice import Response @@ -11,9 +9,10 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig log_tool_stackdriver, reset_password, sessions_favorite_viewed, \ log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \ log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ - assist, heatmaps + assist, heatmaps, mobile from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import email_helper +from chalicelib.utils.helper import environ app = Blueprint(__name__) _overrides.chalice_app(app) @@ -897,3 +896,14 @@ def sessions_live_search(projectId, context): def get_heatmaps_by_url(projectId, context): data = app.current_request.json_body return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} + + +@app.route('/general_stats', methods=['GET'], authorizer=None) +def get_general_stats(): + return {"data": {"sessions:": sessions.count_all()}} + + +@app.route('/{projectId}/mobile/{sessionId}/urls', methods=['POST']) +def mobile_signe(projectId, sessionId, context): + data = app.current_request.json_body + return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data["keys"])} diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py index 59df33c84..4beb73deb 100644 --- a/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/api/chalicelib/blueprints/bp_core_dynamic.py @@ -11,6 +11,8 @@ from chalicelib.core import signup from chalicelib.core import tenants from chalicelib.core import users from chalicelib.core import webhook +from chalicelib.core import license +from chalicelib.core import assist from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import captcha from chalicelib.utils import helper diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py deleted file mode 100644 index 8c79e2663..000000000 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ /dev/null @@ -1,69 +0,0 @@ -from chalice import Blueprint -from chalicelib.utils import helper -from chalicelib import _overrides - -from chalicelib.core import dashboard, insights -from chalicelib.core import metadata - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -# -# @app.route('/{projectId}/dashboard/metadata', methods=['GET']) -# def get_metadata_map(projectId, context): -# metamap = [] -# for m in metadata.get(project_id=projectId): -# metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) -# return {"data": metamap} -# -# -@app.route('/{projectId}/insights/journey', methods=['GET', 'POST']) -def get_insights_journey(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.get_journey(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) -def get_users_retention(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.get_retention(project_id=projectId, **{**data, **args})} - -# -# -# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) -# def get_dashboard_autocomplete(projectId, widget, context): -# params = app.current_request.query_params -# if params is None or params.get('q') is None or len(params.get('q')) == 0: -# return {"data": []} -# params['q'] = '^' + params['q'] -# -# if widget in ['performance']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=True) -# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', -# 'impacted_sessions_by_slow_pages', 'pages_response_time']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), pages_only=True) -# elif widget in ['resources_loading_time']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=False) -# elif widget in ['time_between_events', 'events']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=False, events_only=True) -# elif widget in ['metadata']: -# data = dashboard.search(params.get('q', ''), None, project_id=projectId, -# platform=params.get('platform', None), metadata=True, key=params.get("key")) -# else: -# return {"errors": [f"unsupported widget: {widget}"]} -# return {'data': data} diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 1a89df032..12e24cac9 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,5 +1,6 @@ import requests - +from chalicelib.core import projects, sessions, sessions_metas +from chalicelib.utils import pg_client, helper from chalicelib.core import projects, sessions, sessions_metas from chalicelib.utils import pg_client, helper from chalicelib.utils.helper import environ diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py deleted file mode 100644 index c39fb5cea..000000000 --- a/api/chalicelib/core/insights.py +++ /dev/null @@ -1,211 +0,0 @@ -from chalicelib.core import sessions_metas -from chalicelib.utils import args_transformer -from chalicelib.utils import helper, dev -from chalicelib.utils import pg_client -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.metrics_helper import __get_step_size -import math -from chalicelib.core.dashboard import __get_constraints, __get_constraint_values - - -def __transform_journey(rows): - nodes = [] - links = [] - for r in rows: - source = r["source_event"][r["source_event"].index("_"):] - target = r["target_event"][r["target_event"].index("_"):] - if source not in nodes: - nodes.append(source) - if target not in nodes: - nodes.append(target) - links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) - return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} - - -JOURNEY_DEPTH = 5 -JOURNEY_TYPES = { - "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, - "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, - "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, - "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} -} - - -@dev.timed -def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_start = None - event_table = JOURNEY_TYPES["PAGES"]["table"] - event_column = JOURNEY_TYPES["PAGES"]["column"] - event_table_id = JOURNEY_TYPES["PAGES"]["table_id"] - extra_values = {} - for f in filters: - if f["type"] == "START_POINT": - event_start = f["value"] - elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT source_event, - target_event, - MAX(target_id) max_target_id, - MAX(source_id) max_source_id, - count(*) AS value - - FROM (SELECT event_number || '_' || value as target_event, - message_id AS target_id, - LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event, - LAG(message_id, 1) OVER ( PARTITION BY session_rank ) AS source_id - FROM (SELECT value, - session_rank, - message_id, - ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number - - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" - if event_start else ""} - - FROM (SELECT session_id, - message_id, - timestamp, - value, - SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank - FROM (SELECT *, - CASE - WHEN source_timestamp IS NULL THEN 1 - ELSE 0 END AS new_session - FROM (SELECT session_id, - {event_table_id} AS message_id, - timestamp, - {event_column} AS value, - LAG(timestamp) - OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp - FROM {event_table} INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_subset)} - ) AS related_events) AS ranked_events) AS processed - {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} - ) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE source_event IS NOT NULL - and target_event IS NOT NULL - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, - **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - - return __transform_journey(rows) - - -def __compute_retention_percentage(rows): - if rows is None or len(rows) == 0: - return rows - t = -1 - for r in rows: - if r["week"] == 0: - t = r["usersCount"] - r["percentage"] = r["usersCount"] / t - return rows - - -def __complete_retention(rows, start_date, end_date=None): - if rows is None or len(rows) == 0: - return rows - max_week = 10 - week = 0 - delta_date = 0 - while max_week > 0: - start_date += TimeUTC.MS_WEEK - if end_date is not None and start_date >= end_date: - break - delta = 0 - if delta_date + week >= len(rows) \ - or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - rows.insert(delta_date + week + i, neutral) - delta = i - else: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if delta_date + week + i < len(rows) \ - and i != rows[delta_date + week + i]["week"]: - rows.insert(delta_date + week + i, neutral) - elif delta_date + week + i >= len(rows): - rows.append(neutral) - delta = i - week += delta - max_week -= 1 - delta_date += 1 - return rows - - -@dev.timed -def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts 1: print(f"multiple users found for [{data['email']}] please contact our support") diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 340733d30..aa1ab3d58 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -79,10 +79,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) - data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id, - device=data["userDevice"], - os_version=data["userOsVersion"], - mob_url=data["mobsUrl"]) else: data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, group_clickrage=True) @@ -162,6 +158,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False "projectId": project_id, "userId": user_id} with pg_client.PostgresClient() as cur: + ss_constraints = [] extra_constraints = [ cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}), cur.mogrify("s.duration IS NOT NULL", {}) @@ -173,7 +170,96 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id})) events_query_part = "" + if "filters" in data: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + for f in data["filters"]: + if not isinstance(f.get("value"), list): + f["value"] = [f.get("value")] + if len(f["value"]) == 0 or f["value"][0] is None: + continue + filter_type = f["type"].upper() + f["value"] = __get_sql_value_multiple(f["value"]) + if filter_type == sessions_metas.meta_type.USERBROWSER: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_browser {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_os {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_device {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_country {op} %(value)s', {"value": f["value"]})) + elif filter_type == "duration".upper(): + if len(f["value"]) > 0 and f["value"][0] is not None: + extra_constraints.append( + cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) + ss_constraints.append( + cur.mogrify("ms.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) + if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0: + extra_constraints.append( + cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) + ss_constraints.append( + cur.mogrify("ms.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) + elif filter_type == sessions_metas.meta_type.REFERRER: + # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" + extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append( + cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]})) + elif filter_type == events.event_type.METADATA.ui_type: + op = __get_sql_operator(f["operator"]) + if f.get("key") in meta_keys.keys(): + extra_constraints.append( + cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)})) + ss_constraints.append( + cur.mogrify(f"ms.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)})) + elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.user_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.user_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID, + sessions_metas.meta_type.USERANONYMOUSID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.user_anonymous_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.user_anonymous_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.rev_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.rev_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + + # --------------------------------------------------------------------------- if len(data.get("events", [])) > 0: + ss_constraints = [s.decode('UTF-8') for s in ss_constraints] events_query_from = [] event_index = 0 @@ -268,7 +354,8 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False else: continue - + if event_index == 0: + event_where += ss_constraints if is_not: if event_index == 0: events_query_from.append(cur.mogrify(f"""\ @@ -316,73 +403,6 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False else: data["events"] = [] - # --------------------------------------------------------------------------- - if "filters" in data: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - for f in data["filters"]: - if not isinstance(f.get("value"), list): - f["value"] = [f.get("value")] - if len(f["value"]) == 0 or f["value"][0] is None: - continue - filter_type = f["type"].upper() - f["value"] = __get_sql_value_multiple(f["value"]) - if filter_type == sessions_metas.meta_type.USERBROWSER: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]})) - elif filter_type == "duration".upper(): - if len(f["value"]) > 0 and f["value"][0] is not None: - extra_constraints.append( - cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) - if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0: - extra_constraints.append( - cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) - elif filter_type == sessions_metas.meta_type.REFERRER: - # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]})) - elif filter_type == events.event_type.METADATA.ui_type: - op = __get_sql_operator(f["operator"]) - if f.get("key") in meta_keys.keys(): - extra_constraints.append( - cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID, - sessions_metas.meta_type.USERANONYMOUSID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_anonymous_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.rev_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - # --------------------------------------------------------------------------- if data.get("startDate") is not None: @@ -741,3 +761,9 @@ def delete_sessions_by_user_ids(project_id, user_ids): cur.execute(query=query) return True + + +def count_all(): + with pg_client.PostgresClient() as cur: + row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") + return row.get("count", 0) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 7cf5c6c34..2af50ce57 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -9,6 +9,9 @@ from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.helper import environ +from chalicelib.core import tenants, assist +import secrets + def __generate_invitation_token(): return secrets.token_urlsafe(64) @@ -438,7 +441,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]: assist.get_ice_servers() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -466,7 +469,7 @@ def set_password_invitation(user_id, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]: assist.get_ice_servers() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 23ff97446..e0e6e0fa5 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -1,4 +1,5 @@ from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.helper import environ from chalicelib.utils.helper import get_issue_title @@ -30,7 +31,11 @@ def edit_config(user_id, weekly_report): def cron(): with pg_client.PostgresClient() as cur: - cur.execute("""\ + params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), + "1_week_ago": TimeUTC.midnight(delta_days=-7), + "2_week_ago": TimeUTC.midnight(delta_days=-14), + "5_week_ago": TimeUTC.midnight(delta_days=-35)} + cur.execute(cur.mogrify("""\ SELECT project_id, name AS project_name, users.emails AS emails, @@ -44,7 +49,7 @@ def cron(): SELECT sessions.project_id FROM public.sessions WHERE sessions.project_id = projects.project_id - AND start_ts >= (EXTRACT(EPOCH FROM now() - INTERVAL '3 days') * 1000)::BIGINT + AND start_ts >= %(3_days_ago)s LIMIT 1) AS recently_active USING (project_id) INNER JOIN LATERAL ( SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails @@ -54,14 +59,14 @@ def cron(): AND users.weekly_report ) AS users ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id @@ -69,16 +74,17 @@ def cron(): AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT ) AS month_1_issues ON (TRUE) - WHERE projects.deleted_at ISNULL;""") + WHERE projects.deleted_at ISNULL;"""), params) projects_data = cur.fetchall() for p in projects_data: + params["project_id"] = p["project_id"] print(f"checking {p['project_name']} : {p['project_id']}") if len(p["emails"]) == 0 \ or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0: @@ -104,7 +110,7 @@ def cron(): DATE_TRUNC('day', now()) - INTERVAL '1 day', '1 day'::INTERVAL ) AS timestamp_i - ORDER BY timestamp_i;""", {"project_id": p["project_id"]})) + ORDER BY timestamp_i;""", params)) days_partition = cur.fetchall() max_days_partition = max(x['issues_count'] for x in days_partition) for d in days_partition: @@ -120,7 +126,7 @@ def cron(): AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT GROUP BY type ORDER BY count DESC, type - LIMIT 4;""", {"project_id": p["project_id"]})) + LIMIT 4;""", params)) issues_by_type = cur.fetchall() max_issues_by_type = sum(i["count"] for i in issues_by_type) for i in issues_by_type: @@ -149,7 +155,7 @@ def cron(): '1 day'::INTERVAL ) AS timestamp_i GROUP BY timestamp_i - ORDER BY timestamp_i;""", {"project_id": p["project_id"]})) + ORDER BY timestamp_i;""", params)) issues_breakdown_by_day = cur.fetchall() for i in issues_breakdown_by_day: i["sum"] = sum(x["count"] for x in i["partition"]) @@ -195,7 +201,7 @@ def cron(): WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT GROUP BY type - ORDER BY issue_count DESC;""", {"project_id": p["project_id"]})) + ORDER BY issue_count DESC;""", params)) issues_breakdown_list = cur.fetchall() if len(issues_breakdown_list) > 4: others = {"type": "Others", diff --git a/api/chalicelib/utils/TimeUTC.py b/api/chalicelib/utils/TimeUTC.py index 9cd353eb4..c95359a00 100644 --- a/api/chalicelib/utils/TimeUTC.py +++ b/api/chalicelib/utils/TimeUTC.py @@ -115,6 +115,13 @@ class TimeUTC: def get_utc_offset(): return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000) + @staticmethod + def trunc_day(timestamp): + dt = TimeUTC.from_ms_timestamp(timestamp) + return TimeUTC.datetime_to_timestamp(dt + .replace(hour=0, minute=0, second=0, microsecond=0) + .astimezone(pytz.utc)) + @staticmethod def trunc_week(timestamp): dt = TimeUTC.from_ms_timestamp(timestamp) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index ecc2b920a..1a743a57c 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -1,8 +1,8 @@ -import math import random import re import string +import math import requests local_prefix = 'local-' @@ -367,3 +367,7 @@ def get_internal_project_id(project_id64): def has_smtp(): return environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0 + + +def get_edition(): + return "foss" if is_free_open_source_edition() else "ee" diff --git a/api/chalicelib/utils/s3urls.py b/api/chalicelib/utils/s3urls.py new file mode 100644 index 000000000..bc0b39bea --- /dev/null +++ b/api/chalicelib/utils/s3urls.py @@ -0,0 +1,120 @@ +import re +from urllib.parse import urlparse + + +def style(url): + """ Determine 'style' of a given S3 url + + >>> style("s3://my-bucket/my-key/") + 's3' + + >>> style("s3://user@my-bucket/my-key/") + 's3-credential' + + >>> style("https://my-bucket.s3.amazonaws.com/my-key/") + 'bucket-in-netloc' + + >>> style("https://s3.amazonaws.com/my-bucket/my-key/") + 'bucket-in-path' + """ + o = urlparse(url) + if o.scheme == 's3': + if '@' in o.netloc: + return 's3-credential' + else: + return 's3' + + if re.search(r'^s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): + return 'bucket-in-path' + + if re.search(r'\.s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): + return 'bucket-in-netloc' + + raise ValueError(f'Unknown url style: {url}') + + +def build_url(url_type, bucket, key=None, region=None, credential_name=None): + """ Construct an S3 URL + + Args: + url_type: one of 's3', 's3-credential', 'bucket-in-path', 'bucket-in-netloc' + bucket: S3 bucket name + key: Key within bucket (optional) + region: S3 region name (optional) + credential_name: user/credential name to use in S3 scheme url (optional) + + Returns + (string) S3 URL + """ + if url_type == 's3': + credential = f'{credential_name}@' if credential_name else "" + return f's3://{credential}{bucket}/{key or ""}' + + if url_type == 'bucket-in-path': + return f'https://s3{"-" if region else ""}{region or ""}.amazonaws.com/{bucket}/{key}' + + if url_type == 'bucket-in-netloc': + return f'https://{bucket}.s3.amazonaws.com/{key}' + + raise ValueError(f'Invalid url_type: {url_type}') + + +def parse_s3_credential_url(url): + """ Parse S3 scheme url containing a user/credential name + + >>> parse_s3_url("s3://user@my-bucket/my-key") + {'bucket': 'my-bucket', 'key': 'my-key/', 'credential_name': 'user'} + """ + o = urlparse(url) + cred_name, bucket = o.netloc.split('@') + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key, 'credential_name': cred_name} + + +def parse_s3_url(url): + """ Parse S3 scheme url + + >>> parse_s3_url("s3://my-bucket/my-key") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + o = urlparse(url) + bucket = o.netloc + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key} + + +def parse_bucket_in_path_url(url): + """ Parse url with bucket name path + + >>> parse_bucket_in_path_url("https://s3-eu-west-1.amazonaws.com/my-bucket/my-key/") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + path = urlparse(url).path + bucket = path.split('/')[1] + key = '/'.join(path.split('/')[2:]) + return {'bucket': bucket, 'key': key} + + +def parse_bucket_in_netloc_url(url): + """ Parse url with bucket name in host/netloc + + >>> parse_bucket_in_netloc_url("https://my-bucket.s3.amazonaws.com/my-key/") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + o = urlparse(url) + bucket = o.netloc.split('.')[0] + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key} + + +def parse_url(url): + url_style = style(url) + + if url_style == 's3-credential': + return parse_s3_credential_url(url) + if url_style == 's3': + return parse_s3_url(url) + if url_style == 'bucket-in-path': + return parse_bucket_in_path_url(url) + if url_style == 'bucket-in-netloc': + return parse_bucket_in_netloc_url(url) diff --git a/backend/Dockerfile b/backend/Dockerfile index 3e60e0e89..6ca305ca1 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -28,11 +28,11 @@ ENV TZ=UTC \ BEACON_SIZE_LIMIT=7000000 \ KAFKA_USE_SSL=true \ REDIS_STREAMS_MAX_LEN=3000 \ - TOPIC_RAW=raw \ + TOPIC_RAW_WEB=raw \ + TOPIC_RAW_IOS=raw-ios \ TOPIC_CACHE=cache \ TOPIC_ANALYTICS=analytics \ TOPIC_TRIGGER=trigger \ - TOPIC_EVENTS=events \ GROUP_SINK=sink \ GROUP_STORAGE=storage \ GROUP_DB=db \ @@ -41,7 +41,7 @@ ENV TZ=UTC \ AWS_REGION_WEB=eu-central-1 \ AWS_REGION_IOS=eu-west-1 \ AWS_REGION_ASSETS=eu-central-1 \ - CACHE_ASSETS=false \ + CACHE_ASSETS=true \ ASSETS_SIZE_LIMIT=6291456 \ FS_CLEAN_HRS=72 diff --git a/backend/Dockerfile.bundle b/backend/Dockerfile.bundle index 904bb45f4..efbcb2684 100644 --- a/backend/Dockerfile.bundle +++ b/backend/Dockerfile.bundle @@ -29,11 +29,11 @@ ENV TZ=UTC \ BEACON_SIZE_LIMIT=1000000 \ KAFKA_USE_SSL=true \ REDIS_STREAMS_MAX_LEN=3000 \ - TOPIC_RAW=raw \ + TOPIC_RAW_WEB=raw \ + TOPIC_RAW_IOS=raw-ios \ TOPIC_CACHE=cache \ TOPIC_ANALYTICS=analytics \ TOPIC_TRIGGER=trigger \ - TOPIC_EVENTS=events \ GROUP_SINK=sink \ GROUP_STORAGE=storage \ GROUP_DB=db \ diff --git a/backend/go.mod b/backend/go.mod index 8bd0386af..ab98ca444 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -8,7 +8,7 @@ require ( github.com/Masterminds/squirrel v1.5.0 github.com/aws/aws-sdk-go v1.35.23 github.com/btcsuite/btcutil v1.0.2 - github.com/confluentinc/confluent-kafka-go v1.5.2 // indirect + github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect github.com/elastic/go-elasticsearch/v7 v7.13.1 github.com/go-redis/redis v6.15.9+incompatible github.com/google/uuid v1.1.2 @@ -24,6 +24,6 @@ require ( github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe google.golang.org/api v0.50.0 - gopkg.in/confluentinc/confluent-kafka-go.v1 v1.5.2 + gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 ) diff --git a/backend/pkg/db/cache/messages_common.go b/backend/pkg/db/cache/messages_common.go index 0b7d9a885..c05422cb2 100644 --- a/backend/pkg/db/cache/messages_common.go +++ b/backend/pkg/db/cache/messages_common.go @@ -28,3 +28,52 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { } return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash) } + + +func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error { + if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil { + return err + } + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + session.UserID = &userID.Value + return nil +} + +func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error { + if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil { + return err + } + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + session.UserAnonymousID = &userAnonymousID.Value + return nil +} + +func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + project, err := c.GetProject(session.ProjectID) + if err != nil { + return err + } + + keyNo := project.GetMetadataNo(metadata.Key) + + if keyNo == 0 { + // insert project metadata + } + + if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil { + return err + } + + session.SetMetadata(keyNo, metadata.Value) + return nil +} diff --git a/backend/pkg/db/cache/messages_ios.go b/backend/pkg/db/cache/messages_ios.go index 151ffe58e..0cced5472 100644 --- a/backend/pkg/db/cache/messages_ios.go +++ b/backend/pkg/db/cache/messages_ios.go @@ -22,6 +22,7 @@ func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) er UserOSVersion: s.UserOSVersion, UserDevice: s.UserDevice, UserCountry: s.UserCountry, + UserDeviceType: s.UserDeviceType, } if err := c.Conn.InsertSessionStart(sessionID, c.sessions[ sessionID ]); err != nil { c.sessions[ sessionID ] = nil @@ -95,46 +96,3 @@ func (c *PGCache) InsertIOSIssueEvent(sessionID uint64, issueEvent *IOSIssueEven return nil } -func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error { - if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserID = &userID.Value - return nil -} - -func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error { - if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserAnonymousID = &userAnonymousID.Value - return nil -} - -func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - project, err := c.GetProject(session.ProjectID) - if err != nil { - return err - } - - keyNo := project.GetMetadataNo(metadata.Key) - if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil { - return err - } - - session.SetMetadata(keyNo, metadata.Value) - return nil -} - diff --git a/backend/pkg/db/cache/pg_cache.go b/backend/pkg/db/cache/pg_cache.go index 0ca429eb5..9a62354f1 100644 --- a/backend/pkg/db/cache/pg_cache.go +++ b/backend/pkg/db/cache/pg_cache.go @@ -2,6 +2,7 @@ package cache import ( "time" + "sync" "openreplay/backend/pkg/db/postgres" . "openreplay/backend/pkg/db/types" @@ -20,8 +21,8 @@ type ProjectMeta struct { type PGCache struct { *postgres.Conn sessions map[uint64]*Session - projects map[uint32]*ProjectMeta - projectsByKeys map[string]*ProjectMeta + projects map[uint32]*ProjectMeta + projectsByKeys sync.Map // map[string]*ProjectMeta projectExpirationTimeout time.Duration } @@ -31,7 +32,7 @@ func NewPGCache(pgConn *postgres.Conn, projectExpirationTimeoutMs int64) *PGCach Conn: pgConn, sessions: make(map[uint64]*Session), projects: make(map[uint32]*ProjectMeta), - projectsByKeys: make(map[string]*ProjectMeta), + //projectsByKeys: make(map[string]*ProjectMeta), projectExpirationTimeout: time.Duration(1000 * projectExpirationTimeoutMs), } } diff --git a/backend/pkg/db/cache/project.go b/backend/pkg/db/cache/project.go index dacb46633..daf498d32 100644 --- a/backend/pkg/db/cache/project.go +++ b/backend/pkg/db/cache/project.go @@ -6,16 +6,21 @@ import ( ) func (c *PGCache) GetProjectByKey(projectKey string) (*Project, error) { - if c.projectsByKeys[ projectKey ] != nil && - time.Now().Before(c.projectsByKeys[ projectKey ].expirationTime) { - return c.projectsByKeys[ projectKey ].Project, nil + pmInterface, found := c.projectsByKeys.Load(projectKey) + if found { + if pm, ok := pmInterface.(*ProjectMeta); ok { + if time.Now().Before(pm.expirationTime) { + return pm.Project, nil + } + } } + p, err := c.Conn.GetProjectByKey(projectKey) - if p == nil { + if err != nil { return nil, err } - c.projectsByKeys[ projectKey ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projects[ p.ProjectID ] = c.projectsByKeys[ projectKey ] + //c.projects[ p.ProjectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } + c.projectsByKeys.Store(projectKey, p) return p, nil } @@ -27,11 +32,11 @@ func (c *PGCache) GetProject(projectID uint32) (*Project, error) { return c.projects[ projectID ].Project, nil } p, err := c.Conn.GetProject(projectID) - if p == nil { + if err != nil { return nil, err } c.projects[ projectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projectsByKeys[ p.ProjectKey ] = c.projects[ projectID ] + //c.projectsByKeys.Store(p.ProjectKey, c.projects[ projectID ]) return p, nil } diff --git a/backend/pkg/db/postgres/errors.go b/backend/pkg/db/postgres/errors.go index 9012bfe6b..a83c8f03a 100644 --- a/backend/pkg/db/postgres/errors.go +++ b/backend/pkg/db/postgres/errors.go @@ -2,15 +2,17 @@ package postgres import ( "errors" - + + "github.com/jackc/pgx/v4" "github.com/jackc/pgconn" "github.com/jackc/pgerrcode" ) func IsPkeyViolation(err error) bool { var pgErr *pgconn.PgError - if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation { - return true - } - return false -} \ No newline at end of file + return errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation +} + +func IsNoRowsErr(err error) bool { + return err == pgx.ErrNoRows +} diff --git a/backend/pkg/db/postgres/project.go b/backend/pkg/db/postgres/project.go index 461db66fb..2eea30662 100644 --- a/backend/pkg/db/postgres/project.go +++ b/backend/pkg/db/postgres/project.go @@ -1,7 +1,6 @@ package postgres import ( - "github.com/jackc/pgx/v4" . "openreplay/backend/pkg/db/types" ) @@ -14,9 +13,6 @@ func (conn *Conn) GetProjectByKey(projectKey string) (*Project, error) { `, projectKey, ).Scan(&p.MaxSessionDuration, &p.SampleRate, &p.ProjectID); err != nil { - if err == pgx.ErrNoRows { - err = nil - } return nil, err } return p, nil @@ -36,9 +32,6 @@ func (conn *Conn) GetProject(projectID uint32) (*Project, error) { ).Scan(&p.ProjectKey,&p.MaxSessionDuration, &p.Metadata1, &p.Metadata2, &p.Metadata3, &p.Metadata4, &p.Metadata5, &p.Metadata6, &p.Metadata7, &p.Metadata8, &p.Metadata9, &p.Metadata10); err != nil { - if err == pgx.ErrNoRows { - err = nil - } return nil, err } return p, nil diff --git a/backend/pkg/messages/batch.go b/backend/pkg/messages/batch.go index a70d96c98..fa40db7b2 100644 --- a/backend/pkg/messages/batch.go +++ b/backend/pkg/messages/batch.go @@ -30,6 +30,14 @@ func ReadBatch(b []byte, callback func(Message)) error { timestamp = m.Timestamp isBatchMeta = true // continue readLoop + case *IOSBatchMeta: + if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though + return errors.New("Batch Meta found at the end of the batch") + } + index = m.FirstIndex + timestamp = int64(m.Timestamp) + isBatchMeta = true + // continue readLoop case *Timestamp: timestamp = int64(m.Timestamp) // TODO(?): replace timestamp type to int64 everywhere (including encoding part in tracker) // No skipping here for making it easy to encode back the same sequence of message diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index 80525a2cd..f43f40142 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -3,9 +3,9 @@ package messages func IsReplayerType(id uint64) bool { - return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 100 == id || 102 == id || 103 == id || 105 == id + return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id } func IsIOSType(id uint64) bool { - return 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id + return 107 == id || 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id } diff --git a/backend/pkg/messages/get_timestamp.go b/backend/pkg/messages/get_timestamp.go new file mode 100644 index 000000000..c8e42f756 --- /dev/null +++ b/backend/pkg/messages/get_timestamp.go @@ -0,0 +1,65 @@ +// Auto-generated, do not edit +package messages + + +func GetTimestamp(message Message) uint64 { + switch msg := message.(type) { + + case *IOSBatchMeta: + return msg.Timestamp + + case *IOSSessionStart: + return msg.Timestamp + + case *IOSSessionEnd: + return msg.Timestamp + + case *IOSMetadata: + return msg.Timestamp + + case *IOSCustomEvent: + return msg.Timestamp + + case *IOSUserID: + return msg.Timestamp + + case *IOSUserAnonymousID: + return msg.Timestamp + + case *IOSScreenChanges: + return msg.Timestamp + + case *IOSCrash: + return msg.Timestamp + + case *IOSScreenEnter: + return msg.Timestamp + + case *IOSScreenLeave: + return msg.Timestamp + + case *IOSClickEvent: + return msg.Timestamp + + case *IOSInputEvent: + return msg.Timestamp + + case *IOSPerformanceEvent: + return msg.Timestamp + + case *IOSLog: + return msg.Timestamp + + case *IOSInternalError: + return msg.Timestamp + + case *IOSNetworkCall: + return msg.Timestamp + + case *IOSIssueEvent: + return msg.Timestamp + + } + return uint64(message.Meta().Timestamp) +} + diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 2f1865884..3d8bae7f6 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -1192,6 +1192,22 @@ p = WriteUint(msg.ID, buf, p) return buf[:p] } +type IOSBatchMeta struct { + *meta + Timestamp uint64 +Length uint64 +FirstIndex uint64 +} +func (msg *IOSBatchMeta) Encode() []byte{ + buf := make([]byte, 31 ) + buf[0] = 107 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) +p = WriteUint(msg.Length, buf, p) +p = WriteUint(msg.FirstIndex, buf, p) + return buf[:p] +} + type IOSSessionStart struct { *meta Timestamp uint64 @@ -1305,14 +1321,22 @@ p = WriteString(msg.Value, buf, p) type IOSScreenChanges struct { *meta Timestamp uint64 -SkipData []byte +Length uint64 +X uint64 +Y uint64 +Width uint64 +Height uint64 } func (msg *IOSScreenChanges) Encode() []byte{ - buf := make([]byte, 21 + len(msg.SkipData)) + buf := make([]byte, 61 ) buf[0] = 96 p := 1 p = WriteUint(msg.Timestamp, buf, p) -p = WriteData(msg.SkipData, buf, p) +p = WriteUint(msg.Length, buf, p) +p = WriteUint(msg.X, buf, p) +p = WriteUint(msg.Y, buf, p) +p = WriteUint(msg.Width, buf, p) +p = WriteUint(msg.Height, buf, p) return buf[:p] } diff --git a/backend/pkg/messages/primitives.go b/backend/pkg/messages/primitives.go index 0c938d2b2..70952eeab 100644 --- a/backend/pkg/messages/primitives.go +++ b/backend/pkg/messages/primitives.go @@ -49,7 +49,7 @@ func ReadUint(reader io.Reader) (uint64, error) { } if b < 0x80 { if i > 9 || i == 9 && b > 1 { - return x, errors.New("overflow") + return x, errors.New("uint overflow") } return x | uint64(b)<>50, 10) } diff --git a/backend/services/assets/main.go b/backend/services/assets/main.go index 05c779bbb..450dfc83c 100644 --- a/backend/services/assets/main.go +++ b/backend/services/assets/main.go @@ -15,12 +15,11 @@ import ( "openreplay/backend/services/assets/cacher" ) - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) GROUP_CACHE := env.String("GROUP_CACHE") - TOPIC_TRIGGER := env.String("TOPIC_TRIGGER") + TOPIC_CACHE := env.String("TOPIC_CACHE") cacher := cacher.NewCacher( env.String("AWS_REGION"), @@ -31,7 +30,7 @@ func main() { consumer := queue.NewMessageConsumer( GROUP_CACHE, - []string{ TOPIC_TRIGGER }, + []string{ TOPIC_CACHE }, func(sessionID uint64, message messages.Message, e *types.Meta) { switch msg := message.(type) { case *messages.AssetCache: diff --git a/backend/services/db/main.go b/backend/services/db/main.go index a2cef41b3..a14aa7648 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -17,7 +17,6 @@ import ( "openreplay/backend/services/db/heuristics" ) - var pg *cache.PGCache func main() { @@ -32,13 +31,13 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_DB"), []string{ - //env.String("TOPIC_RAW"), + env.String("TOPIC_RAW_IOS"), env.String("TOPIC_TRIGGER"), }, func(sessionID uint64, msg messages.Message, _ *types.Meta) { if err := insertMessage(sessionID, msg); err != nil { if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, Message %v", err, msg) + log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err,sessionID, msg) } return } @@ -46,13 +45,13 @@ func main() { session, err := pg.GetSession(sessionID) if err != nil { // Might happen due to the assets-related message TODO: log only if session is necessary for this kind of message - log.Printf("Error on session retrieving from cache: %v, Message %v, sessionID %v", err, msg, sessionID) + log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg) return; } err = insertStats(session, msg) if err != nil { - log.Printf("Stats Insertion Error %v; Session:%v, Message: %v", err, session, msg) + log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) } heurFinder.HandleMessage(session, msg) @@ -60,14 +59,14 @@ func main() { // TODO: DRY code (carefully with the return statement logic) if err := insertMessage(sessionID, msg); err != nil { if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, Message %v", err, msg) + log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) } return } err = insertStats(session, msg) if err != nil { - log.Printf("Stats Insertion Error %v", err) + log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) } }) }, diff --git a/backend/services/ender/builder/builder.go b/backend/services/ender/builder/builder.go index f72737894..f17116501 100644 --- a/backend/services/ender/builder/builder.go +++ b/backend/services/ender/builder/builder.go @@ -108,11 +108,11 @@ func (b *builder) buildInputEvent() { } func (b *builder) handleMessage(message Message, messageID uint64) { - timestamp := uint64(message.Meta().Timestamp) - if b.timestamp <= timestamp { // unnecessary. TODO: test and remove + timestamp := GetTimestamp(message) + if b.timestamp <= timestamp { // unnecessary? TODO: test and remove b.timestamp = timestamp } - // Before the first timestamp. + // Might happen before the first timestamp. switch msg := message.(type) { case *SessionStart, *Metadata, diff --git a/backend/services/ender/main.go b/backend/services/ender/main.go index e99c6866c..9c62d14b0 100644 --- a/backend/services/ender/main.go +++ b/backend/services/ender/main.go @@ -16,7 +16,6 @@ import ( "openreplay/backend/services/ender/builder" ) - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) @@ -30,7 +29,8 @@ func main() { consumer := queue.NewMessageConsumer( GROUP_EVENTS, []string{ - env.String("TOPIC_RAW"), + env.String("TOPIC_RAW_WEB"), + env.String("TOPIC_RAW_IOS"), }, func(sessionID uint64, msg messages.Message, meta *types.Meta) { lastTs = meta.Timestamp diff --git a/backend/services/http/assets.go b/backend/services/http/assets.go index 69fb7f53d..cc055087a 100644 --- a/backend/services/http/assets.go +++ b/backend/services/http/assets.go @@ -7,7 +7,7 @@ import ( func sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) { if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable { - producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(&messages.AssetCache{ + producer.Produce(TOPIC_CACHE, sessionID, messages.Encode(&messages.AssetCache{ URL: fullURL, })) } diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 975abe31b..e45e84e64 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -1,287 +1,41 @@ package main import ( - "encoding/json" - "errors" "io" "io/ioutil" "log" - "math/rand" "net/http" - "strconv" - "time" gzip "github.com/klauspost/pgzip" - - "openreplay/backend/pkg/db/postgres" - . "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/token" ) const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb -func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { - type request struct { - Token string `json:"token"` - UserUUID *string `json:"userUUID"` - RevID string `json:"revID"` - Timestamp uint64 `json:"timestamp"` - TrackerVersion string `json:"trackerVersion"` - IsSnippet bool `json:"isSnippet"` - DeviceMemory uint64 `json:"deviceMemory"` - JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"` - ProjectKey *string `json:"projectKey"` - Reset bool `json:"reset"` - } - type response struct { - Timestamp int64 `json:"timestamp"` - Delay int64 `json:"delay"` - Token string `json:"token"` - UserUUID string `json:"userUUID"` - SessionID string `json:"sessionID"` - BeaconSizeLimit int64 `json:"beaconSizeLimit"` - } - - startTime := time.Now() - req := &request{} - body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error? - //defer body.Close() - if err := json.NewDecoder(body).Decode(req); err != nil { - responseWithError(w, http.StatusBadRequest, err) - return - } - - if req.ProjectKey == nil { - responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) - return - } - - p, err := pgconn.GetProjectByKey(*req.ProjectKey) - if p == nil { - if err == nil { - responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) - } else { - responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging - } - return - } - - userUUID := getUUID(req.UserUUID) - tokenData, err := tokenizer.Parse(req.Token) - if err != nil || req.Reset { // Starting the new one - dice := byte(rand.Intn(100)) // [0, 100) - if dice >= p.SampleRate { - responseWithError(w, http.StatusForbidden, errors.New("cancel")) - return - } - - ua := uaParser.ParseFromHTTPRequest(r) - if ua == nil { - responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) - return - } - sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) - if err != nil { - responseWithError(w, http.StatusInternalServerError, err) - return - } - // TODO: if EXPIRED => send message for two sessions association - expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) - tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} - - country := geoIP.ExtractISOCodeFromHTTPRequest(r) - producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&SessionStart{ - Timestamp: req.Timestamp, - ProjectID: uint64(p.ProjectID), - TrackerVersion: req.TrackerVersion, - RevID: req.RevID, - UserUUID: userUUID, - UserAgent: r.Header.Get("User-Agent"), - UserOS: ua.OS, - UserOSVersion: ua.OSVersion, - UserBrowser: ua.Browser, - UserBrowserVersion: ua.BrowserVersion, - UserDevice: ua.Device, - UserDeviceType: ua.DeviceType, - UserCountry: country, - UserDeviceMemorySize: req.DeviceMemory, - UserDeviceHeapSize: req.JsHeapSizeLimit, - })) - } - - //delayDuration := time.Now().Sub(startTime) - responseWithJSON(w, &response{ - //Timestamp: startTime.UnixNano() / 1e6, - //Delay: delayDuration.Nanoseconds() / 1e6, - Token: tokenizer.Compose(*tokenData), - UserUUID: userUUID, - SessionID: strconv.FormatUint(tokenData.ID, 10), - BeaconSizeLimit: BEACON_SIZE_LIMIT, - }) -} - -func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { +func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) { body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) //defer body.Close() var reader io.ReadCloser + var err error switch r.Header.Get("Content-Encoding") { case "gzip": - reader, err := gzip.NewReader(body) + log.Println("Gzip", reader) + + reader, err = gzip.NewReader(body) if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce return } + log.Println("Gzip reader init", reader) defer reader.Close() default: reader = body } + log.Println("Reader after switch:", reader) buf, err := ioutil.ReadAll(reader) if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging return } - producer.Produce(TOPIC_RAW, sessionID, buf) // What if not able to send? - w.WriteHeader(http.StatusOK) -} - -func pushMessagesHandler(w http.ResponseWriter, r *http.Request) { - sessionData, err := tokenizer.ParseFromHTTPRequest(r) - if err != nil { - responseWithError(w, http.StatusUnauthorized, err) - return - } - pushMessages(w, r, sessionData.ID) -} - -func pushMessagesSeparatelyHandler(w http.ResponseWriter, r *http.Request) { - sessionData, err := tokenizer.ParseFromHTTPRequest(r) - if err != nil { - responseWithError(w, http.StatusUnauthorized, err) - return - } - body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) - //defer body.Close() - buf, err := ioutil.ReadAll(body) - if err != nil { - responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging - return - } - //log.Printf("Sending batch...") - //startTime := time.Now() - - // analyticsMessages := make([]Message, 0, 200) - - rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message { - switch m := msg.(type) { - case *SetNodeAttributeURLBased: - if m.Name == "src" || m.Name == "href" { - msg = &SetNodeAttribute{ - ID: m.ID, - Name: m.Name, - Value: handleURL(sessionData.ID, m.BaseURL, m.Value), - } - } else if m.Name == "style" { - msg = &SetNodeAttribute{ - ID: m.ID, - Name: m.Name, - Value: handleCSS(sessionData.ID, m.BaseURL, m.Value), - } - } - case *SetCSSDataURLBased: - msg = &SetCSSData{ - ID: m.ID, - Data: handleCSS(sessionData.ID, m.BaseURL, m.Data), - } - case *CSSInsertRuleURLBased: - msg = &CSSInsertRule{ - ID: m.ID, - Index: m.Index, - Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule), - } - } - - // switch msg.(type) { - // case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair) - // *SetPageLocation, - // *PageLoadTiming, - // *PageRenderTiming, - // *PerformanceTrack, - // *SetInputTarget, - // *SetInputValue, - // *MouseClick, - // *RawErrorEvent, - // *JSException, - // *ResourceTiming, - // *RawCustomEvent, - // *CustomIssue, - // *Fetch, - // *StateAction, - // *GraphQL, - // *CreateElementNode, - // *CreateTextNode, - // *RemoveNode, - // *CreateDocument, - // *RemoveNodeAttribute, - // *MoveNode, - // *SetCSSData, - // *CSSInsertRule, - // *CSSDeleteRule: - // analyticsMessages = append(analyticsMessages, msg) - //} - - return msg - }) - if err != nil { - responseWithError(w, http.StatusForbidden, err) - return - } - producer.Produce(TOPIC_RAW, sessionData.ID, rewritenBuf) - //producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages)) - //duration := time.Now().Sub(startTime) - //log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf))) - w.WriteHeader(http.StatusOK) -} - -func notStartedHandler(w http.ResponseWriter, r *http.Request) { - type request struct { - ProjectKey *string `json:"projectKey"` - TrackerVersion string `json:"trackerVersion"` - DoNotTrack bool `json:"DoNotTrack"` - // RevID string `json:"revID"` - } - req := &request{} - body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) - defer body.Close() - if err := json.NewDecoder(body).Decode(req); err != nil { - responseWithError(w, http.StatusBadRequest, err) - return - } - if req.ProjectKey == nil { - responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) - return - } - ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway - if ua == nil { - responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) - return - } - country := geoIP.ExtractISOCodeFromHTTPRequest(r) - err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{ - ProjectKey: *req.ProjectKey, - TrackerVersion: req.TrackerVersion, - DoNotTrack: req.DoNotTrack, - Platform: "web", - UserAgent: r.Header.Get("User-Agent"), - UserOS: ua.OS, - UserOSVersion: ua.OSVersion, - UserBrowser: ua.Browser, - UserBrowserVersion: ua.BrowserVersion, - UserDevice: ua.Device, - UserDeviceType: ua.DeviceType, - UserCountry: country, - }) - if err != nil { - log.Printf("Unable to insert Unstarted Session: %v\n", err) - } + producer.Produce(topicName, sessionID, buf) // What if not able to send? w.WriteHeader(http.StatusOK) } diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 2c874a312..6c3f945bd 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -1,145 +1,199 @@ package main -// const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb +import ( + "encoding/json" + "net/http" + "errors" + "time" + "math/rand" + "strconv" + "log" -// func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { -// type request struct { -// // SessionID *string -// EncodedProjectID *uint64 `json:"projectID"` -// TrackerVersion string `json:"trackerVersion"` -// RevID string `json:"revID"` -// UserUUID *string `json:"userUUID"` -// //UserOS string `json"userOS"` //hardcoded 'MacOS' -// UserOSVersion string `json:"userOSVersion"` -// UserDevice string `json:"userDevice"` -// Timestamp uint64 `json:"timestamp"` -// // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac -// // “performances”:{ -// // “activeProcessorCount”:8, -// // “isLowPowerModeEnabled”:0, -// // “orientation”:0, -// // “systemUptime”:585430, -// // “batteryState”:0, -// // “thermalState”:0, -// // “batteryLevel”:0, -// // “processorCount”:8, -// // “physicalMemory”:17179869184 -// // }, -// } -// type response struct { -// Token string `json:"token"` -// ImagesHashList []string `json:"imagesHashList"` -// UserUUID string `json:"userUUID"` -// SESSION_ID uint64 `json:"SESSION_ID"` ///TEMP -// } -// startTime := time.Now() -// req := &request{} -// body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) -// //defer body.Close() -// if err := json.NewDecoder(body).Decode(req); err != nil { -// responseWithError(w, http.StatusBadRequest, err) -// return -// } + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/token" + . "openreplay/backend/pkg/messages" +) -// if req.EncodedProjectID == nil { -// responseWithError(w, http.StatusForbidden, errors.New("ProjectID value required")) -// return -// } -// projectID := decodeProjectID(*(req.EncodedProjectID)) -// if projectID == 0 { -// responseWithError(w, http.StatusUnprocessableEntity, errors.New("ProjectID value is invalid")) -// return -// } -// p, err := pgconn.GetProject(uint32(projectID)) -// if p == nil { -// if err == nil { -// responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) -// } else { -// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging -// } -// return -// } -// sessionID, err := flaker.Compose(req.Timestamp) -// if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) -// return -// } -// userUUID := getUUID(req.UserUUID) -// country := geoIP.ExtractISOCodeFromHTTPRequest(r) -// expirationTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) +const FILES_SIZE_LIMIT int64 = 1e7 // 10Mb -// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size -// if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) -// return -// } +func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { + type request struct { + Token string `json:"token"` + ProjectKey *string `json:"projectKey"` + TrackerVersion string `json:"trackerVersion"` + RevID string `json:"revID"` + UserUUID *string `json:"userUUID"` + //UserOS string `json"userOS"` //hardcoded 'MacOS' + UserOSVersion string `json:"userOSVersion"` + UserDevice string `json:"userDevice"` + Timestamp uint64 `json:"timestamp"` + // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac + // “performances”:{ + // “activeProcessorCount”:8, + // “isLowPowerModeEnabled”:0, + // “orientation”:0, + // “systemUptime”:585430, + // “batteryState”:0, + // “thermalState”:0, + // “batteryLevel”:0, + // “processorCount”:8, + // “physicalMemory”:17179869184 + // }, + } + type response struct { + Token string `json:"token"` + ImagesHashList []string `json:"imagesHashList"` + UserUUID string `json:"userUUID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` + SessionID string `json:"sessionID"` + } + startTime := time.Now() + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) + //defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } -// responseWithJSON(w, &response{ -// Token: tokenizer.Compose(sessionID, uint64(expirationTime.UnixNano()/1e6)), -// ImagesHashList: imagesHashList, -// UserUUID: userUUID, -// //TEMP: -// SESSION_ID: sessionID, -// }) -// producer.Produce(topicRaw, sessionID, messages.Encode(&messages.IOSSessionStart{ -// Timestamp: req.Timestamp, -// ProjectID: projectID, -// TrackerVersion: req.TrackerVersion, -// RevID: req.RevID, -// UserUUID: userUUID, -// UserOS: "MacOS", -// UserOSVersion: req.UserOSVersion, -// UserDevice: MapIOSDevice(req.UserDevice), -// UserDeviceType: GetIOSDeviceType(req.UserDevice), // string `json:"userDeviceType"` // From UserDevice; ENUM ? -// UserCountry: country, -// })) -// } + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + + p, err := pgconn.GetProjectByKey(*req.ProjectKey) + if err != nil { + if postgres.IsNoRowsErr(err) { + responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) + } else { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } + return + } + userUUID := getUUID(req.UserUUID) + tokenData, err := tokenizer.Parse(req.Token) + + if err != nil { // Starting the new one + dice := byte(rand.Intn(100)) // [0, 100) + if dice >= p.SampleRate { + responseWithError(w, http.StatusForbidden, errors.New("cancel")) + return + } + + ua := uaParser.ParseFromHTTPRequest(r) + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) + return + } + // TODO: if EXPIRED => send message for two sessions association + expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) + tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} + + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + + // The difference with web is mostly here: + producer.Produce(TOPIC_RAW_IOS, tokenData.ID, Encode(&IOSSessionStart{ + Timestamp: req.Timestamp, + ProjectID: uint64(p.ProjectID), + TrackerVersion: req.TrackerVersion, + RevID: req.RevID, + UserUUID: userUUID, + UserOS: "IOS", + UserOSVersion: req.UserOSVersion, + UserDevice: MapIOSDevice(req.UserDevice), + UserDeviceType: GetIOSDeviceType(req.UserDevice), + UserCountry: country, + })) + } + + // imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size + // if err != nil { + // responseWithError(w, http.StatusInternalServerError, err) + // return + // } + + responseWithJSON(w, &response{ + // ImagesHashList: imagesHashList, + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), + BeaconSizeLimit: BEACON_SIZE_LIMIT, + }) +} -// func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) { -// sessionData, err := tokenizer.ParseFromHTTPRequest(r) -// if err != nil && err != token.EXPIRED { -// responseWithError(w, http.StatusUnauthorized, err) -// return -// } -// // Check timestamps here? -// pushMessages(w, r, sessionData.ID) -// } +func pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { + responseWithError(w, http.StatusUnauthorized, err) + return + } + pushMessages(w, r, sessionData.ID, TOPIC_RAW_IOS) +} -// func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { -// r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) -// // defer r.Body.Close() -// err := r.ParseMultipartForm(1e5) // 100Kb -// if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { -// responseWithError(w, http.StatusUnsupportedMediaType, err) -// // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB -// } else if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging -// } -// if len(r.MultipartForm.Value["projectID"]) == 0 { -// responseWithError(w, http.StatusBadRequest, errors.New("projectID parameter required")) // status for missing/wrong parameter? -// return -// } -// // encodedProjectID, err := strconv.ParseUint(r.MultipartForm.Value["projectID"][0], 10, 64) -// // projectID := decodeProjectID(encodedProjectID) -// // if projectID == 0 || err != nil { -// // responseWithError(w, http.StatusUnprocessableEntity, errors.New("projectID value is incorrect")) -// // return -// // } -// prefix := r.MultipartForm.Value["projectID"][0] + "/" //strconv.FormatUint(uint64(projectID), 10) + "/" +func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil && err != token.EXPIRED { + responseWithError(w, http.StatusUnauthorized, err) + return + } + // Check timestamps here? + pushMessages(w, r, sessionData.ID,TOPIC_RAW_IOS) +} -// for _, fileHeaderList := range r.MultipartForm.File { -// for _, fileHeader := range fileHeaderList { -// file, err := fileHeader.Open() -// if err != nil { -// continue // TODO: send server error or accumulate successful files -// } -// key := prefix + fileHeader.Filename // TODO: Malicious image put: use jwt? -// go s3.Upload(file, key, "image/png", false) -// } -// } -// w.WriteHeader(http.StatusOK) -// } +func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { + log.Printf("recieved imagerequest") + + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { // Should accept expired token? + responseWithError(w, http.StatusUnauthorized, err) + return + } + + r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) + // defer r.Body.Close() + err = r.ParseMultipartForm(1e6) // ~1Mb + if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { + responseWithError(w, http.StatusUnsupportedMediaType, err) + // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB + } else if err != nil { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } + + if (r.MultipartForm == nil) { + responseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed")) + } + + if len(r.MultipartForm.Value["projectKey"]) == 0 { + responseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter? + return + } + + prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/" + + for _, fileHeaderList := range r.MultipartForm.File { + for _, fileHeader := range fileHeaderList { + file, err := fileHeader.Open() + if err != nil { + continue // TODO: send server error or accumulate successful files + } + key := prefix + fileHeader.Filename + log.Printf("Uploading image... %v", key) + go func() { //TODO: mime type from header + if err := s3.Upload(file, key, "image/jpeg", false); err != nil { + log.Printf("Upload ios screen error. %v", err) + } + }() + } + } + + w.WriteHeader(http.StatusOK) +} diff --git a/backend/services/http/handlers_web.go b/backend/services/http/handlers_web.go new file mode 100644 index 000000000..5e144f1cc --- /dev/null +++ b/backend/services/http/handlers_web.go @@ -0,0 +1,249 @@ +package main + +import ( + "encoding/json" + "errors" + "io/ioutil" + "log" + "math/rand" + "net/http" + "strconv" + "time" + + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/token" + . "openreplay/backend/pkg/messages" +) + +func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { + type request struct { + Token string `json:"token"` + UserUUID *string `json:"userUUID"` + RevID string `json:"revID"` + Timestamp uint64 `json:"timestamp"` + TrackerVersion string `json:"trackerVersion"` + IsSnippet bool `json:"isSnippet"` + DeviceMemory uint64 `json:"deviceMemory"` + JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"` + ProjectKey *string `json:"projectKey"` + Reset bool `json:"reset"` + } + type response struct { + Timestamp int64 `json:"timestamp"` + Delay int64 `json:"delay"` + Token string `json:"token"` + UserUUID string `json:"userUUID"` + SessionID string `json:"sessionID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` + } + + startTime := time.Now() + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error? + //defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } + + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + + p, err := pgconn.GetProjectByKey(*req.ProjectKey) + if err != nil { + if postgres.IsNoRowsErr(err) { + responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or capture limit has been reached")) + } else { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } + return + } + + userUUID := getUUID(req.UserUUID) + tokenData, err := tokenizer.Parse(req.Token) + if err != nil || req.Reset { // Starting the new one + dice := byte(rand.Intn(100)) // [0, 100) + if dice >= p.SampleRate { + responseWithError(w, http.StatusForbidden, errors.New("cancel")) + return + } + + ua := uaParser.ParseFromHTTPRequest(r) + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) + return + } + // TODO: if EXPIRED => send message for two sessions association + expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) + tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} + + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + producer.Produce(TOPIC_RAW_WEB, tokenData.ID, Encode(&SessionStart{ + Timestamp: req.Timestamp, + ProjectID: uint64(p.ProjectID), + TrackerVersion: req.TrackerVersion, + RevID: req.RevID, + UserUUID: userUUID, + UserAgent: r.Header.Get("User-Agent"), + UserOS: ua.OS, + UserOSVersion: ua.OSVersion, + UserBrowser: ua.Browser, + UserBrowserVersion: ua.BrowserVersion, + UserDevice: ua.Device, + UserDeviceType: ua.DeviceType, + UserCountry: country, + UserDeviceMemorySize: req.DeviceMemory, + UserDeviceHeapSize: req.JsHeapSizeLimit, + })) + } + + //delayDuration := time.Now().Sub(startTime) + responseWithJSON(w, &response{ + //Timestamp: startTime.UnixNano() / 1e6, + //Delay: delayDuration.Nanoseconds() / 1e6, + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), + BeaconSizeLimit: BEACON_SIZE_LIMIT, + }) +} + +func pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { + responseWithError(w, http.StatusUnauthorized, err) + return + } + body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) + //defer body.Close() + buf, err := ioutil.ReadAll(body) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + return + } + //log.Printf("Sending batch...") + //startTime := time.Now() + + // analyticsMessages := make([]Message, 0, 200) + + rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message { + switch m := msg.(type) { + case *SetNodeAttributeURLBased: + if m.Name == "src" || m.Name == "href" { + msg = &SetNodeAttribute{ + ID: m.ID, + Name: m.Name, + Value: handleURL(sessionData.ID, m.BaseURL, m.Value), + } + } else if m.Name == "style" { + msg = &SetNodeAttribute{ + ID: m.ID, + Name: m.Name, + Value: handleCSS(sessionData.ID, m.BaseURL, m.Value), + } + } + case *SetCSSDataURLBased: + msg = &SetCSSData{ + ID: m.ID, + Data: handleCSS(sessionData.ID, m.BaseURL, m.Data), + } + case *CSSInsertRuleURLBased: + msg = &CSSInsertRule{ + ID: m.ID, + Index: m.Index, + Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule), + } + } + + // switch msg.(type) { + // case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair) + // *SetPageLocation, + // *PageLoadTiming, + // *PageRenderTiming, + // *PerformanceTrack, + // *SetInputTarget, + // *SetInputValue, + // *MouseClick, + // *RawErrorEvent, + // *JSException, + // *ResourceTiming, + // *RawCustomEvent, + // *CustomIssue, + // *Fetch, + // *StateAction, + // *GraphQL, + // *CreateElementNode, + // *CreateTextNode, + // *RemoveNode, + // *CreateDocument, + // *RemoveNodeAttribute, + // *MoveNode, + // *SetCSSData, + // *CSSInsertRule, + // *CSSDeleteRule: + // analyticsMessages = append(analyticsMessages, msg) + //} + + return msg + }) + if err != nil { + responseWithError(w, http.StatusForbidden, err) + return + } + producer.Produce(TOPIC_RAW_WEB, sessionData.ID, rewritenBuf) + //producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages)) + //duration := time.Now().Sub(startTime) + //log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf))) + w.WriteHeader(http.StatusOK) +} + +func notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { + type request struct { + ProjectKey *string `json:"projectKey"` + TrackerVersion string `json:"trackerVersion"` + DoNotTrack bool `json:"DoNotTrack"` + // RevID string `json:"revID"` + } + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) + defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{ + ProjectKey: *req.ProjectKey, + TrackerVersion: req.TrackerVersion, + DoNotTrack: req.DoNotTrack, + Platform: "web", + UserAgent: r.Header.Get("User-Agent"), + UserOS: ua.OS, + UserOSVersion: ua.OSVersion, + UserBrowser: ua.Browser, + UserBrowserVersion: ua.BrowserVersion, + UserDevice: ua.Device, + UserDeviceType: ua.DeviceType, + UserCountry: country, + }) + if err != nil { + log.Printf("Unable to insert Unstarted Session: %v\n", err) + } + w.WriteHeader(http.StatusOK) +} \ No newline at end of file diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 29181718f..eaede2d4b 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -34,10 +34,11 @@ var geoIP *geoip.GeoIP var tokenizer *token.Tokenizer var s3 *storage.S3 -var TOPIC_RAW string +var TOPIC_RAW_WEB string +var TOPIC_RAW_IOS string +var TOPIC_CACHE string var TOPIC_TRIGGER string -var TOPIC_ANALYTICS string -// var kafkaTopicEvents string +//var TOPIC_ANALYTICS string var CACHE_ASSESTS bool var BEACON_SIZE_LIMIT int64 @@ -46,13 +47,15 @@ func main() { producer = queue.NewProducer() defer producer.Close(15000) - TOPIC_RAW = env.String("TOPIC_RAW") + TOPIC_RAW_WEB = env.String("TOPIC_RAW_WEB") + TOPIC_RAW_IOS = env.String("TOPIC_RAW_IOS") + TOPIC_CACHE = env.String("TOPIC_CACHE") TOPIC_TRIGGER = env.String("TOPIC_TRIGGER") - TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS") + //TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS") rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN")) pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) defer pgconn.Close() - //s3 = storage.NewS3(env.String("S3_BUCKET_IMAGES_IOS"), env.String("AWS_REGION")) + s3 = storage.NewS3(env.String("AWS_REGION"), env.String("S3_BUCKET_IOS_IMAGES")) tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET")) uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE")) geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE")) @@ -76,13 +79,16 @@ func main() { return } + log.Printf("Request: %v - %v ", r.Method, r.URL.Path) + + switch r.URL.Path { case "/": w.WriteHeader(http.StatusOK) case "/v1/web/not-started": switch r.Method { case http.MethodPost: - notStartedHandler(w, r) + notStartedHandlerWeb(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } @@ -96,38 +102,38 @@ func main() { case "/v1/web/i": switch r.Method { case http.MethodPost: - pushMessagesSeparatelyHandler(w, r) + pushMessagesHandlerWeb(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/start": + switch r.Method { + case http.MethodPost: + startSessionHandlerIOS(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/i": + switch r.Method { + case http.MethodPost: + pushMessagesHandlerIOS(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/late": + switch r.Method { + case http.MethodPost: + pushLateMessagesHandlerIOS(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/images": + switch r.Method { + case http.MethodPost: + imagesUploadHandlerIOS(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } - // case "/v1/ios/start": - // switch r.Method { - // case http.MethodPost: - // startSessionHandlerIOS(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/append": - // switch r.Method { - // case http.MethodPost: - // pushMessagesHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/late": - // switch r.Method { - // case http.MethodPost: - // pushLateMessagesHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/images": - // switch r.Method { - // case http.MethodPost: - // iosImagesUploadHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } default: w.WriteHeader(http.StatusNotFound) } diff --git a/backend/services/http/response.go b/backend/services/http/response.go index ffd22875e..11d9b328d 100644 --- a/backend/services/http/response.go +++ b/backend/services/http/response.go @@ -11,6 +11,7 @@ func responseWithJSON(w http.ResponseWriter, res interface{}) { if err != nil { log.Println(err) } + w.Header().Set("Content-Type", "application/json") w.Write(body) } diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go index e1ea58ebd..f664fe862 100644 --- a/backend/services/integrations/main.go +++ b/backend/services/integrations/main.go @@ -19,7 +19,7 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - TOPIC_RAW := env.String("TOPIC_RAW") + TOPIC_RAW_WEB := env.String("TOPIC_RAW_WEB") POSTGRES_STRING := env.String("POSTGRES_STRING") pg := postgres.NewConn(POSTGRES_STRING) @@ -80,7 +80,7 @@ func main() { sessionID = sessData.ID } // TODO: send to ready-events topic. Otherwise it have to go through the events worker. - producer.Produce(TOPIC_RAW, sessionID, messages.Encode(event.RawErrorEvent)) + producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent)) case err := <-manager.Errors: log.Printf("Integration error: %v\n", err) case i := <-manager.RequestDataUpdates: diff --git a/backend/services/sink/main.go b/backend/services/sink/main.go index 4a6ac189d..b1bdf8d25 100644 --- a/backend/services/sink/main.go +++ b/backend/services/sink/main.go @@ -10,9 +10,9 @@ import ( "syscall" "openreplay/backend/pkg/env" - "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" + . "openreplay/backend/pkg/messages" ) @@ -27,16 +27,17 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_SINK"), []string{ - env.String("TOPIC_RAW"), + env.String("TOPIC_RAW_WEB"), + env.String("TOPIC_RAW_IOS"), }, - func(sessionID uint64, message messages.Message, _ *types.Meta) { - //typeID, err := messages.GetMessageTypeID(value) + func(sessionID uint64, message Message, _ *types.Meta) { + //typeID, err := GetMessageTypeID(value) // if err != nil { // log.Printf("Message type decoding error: %v", err) // return // } typeID := message.Meta().TypeID - if !messages.IsReplayerType(typeID) { + if !IsReplayerType(typeID) { return } @@ -44,7 +45,7 @@ func main() { value := message.Encode() var data []byte - if messages.IsIOSType(typeID) { + if IsIOSType(typeID) { data = value } else { data = make([]byte, len(value)+8) diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 7705f0fbd..db58c76ba 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -55,15 +55,20 @@ "S3_HOST": "", "S3_KEY": "", "S3_SECRET": "", - "version_number": "1.0.0", "LICENSE_KEY": "", "SAML2_MD_URL": "", "idp_entityId": "", "idp_sso_url": "", "idp_x509cert": "", "idp_sls_url": "", + "idp_name": "", + "sso_exp_delta_seconds": "172800", + "sso_landing": "/login?jwt=%s", "invitation_link": "/api/users/invitation?token=%s", - "change_password_link": "/reset-password?invitation=%s&&pass=%s" + "change_password_link": "/reset-password?invitation=%s&&pass=%s", + "iosBucket": "openreplay-ios-images", + "version_number": "1.3.6", + "assist_secret": "" }, "lambda_timeout": 150, "lambda_memory_size": 400, diff --git a/ee/api/.gitignore b/ee/api/.gitignore index cdb20e0f7..41d3d640f 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -204,6 +204,8 @@ Pipfile /chalicelib/core/log_tool_sentry.py /chalicelib/core/log_tool_stackdriver.py /chalicelib/core/log_tool_sumologic.py +/chalicelib/core/mobile.py +/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_favorite_viewed.py /chalicelib/core/sessions_metas.py @@ -233,3 +235,12 @@ Pipfile /chalicelib/utils/smtp.py /chalicelib/utils/strings.py /chalicelib/utils/TimeUTC.py +/chalicelib/core/heatmaps.py +/entrypoint.bundle.sh +/entrypoint.sh +/env_handler.py +/chalicelib/blueprints/app/v1_api.py +/build.sh +/chalicelib/core/assist.py +/chalicelib/blueprints/app/__init__.py +/Dockerfile.bundle diff --git a/ee/api/_clickhouse_upgrade.sh b/ee/api/_clickhouse_upgrade.sh new file mode 100644 index 000000000..9b656a584 --- /dev/null +++ b/ee/api/_clickhouse_upgrade.sh @@ -0,0 +1,10 @@ +sudo yum update +sudo yum install yum-utils +sudo rpm --import https://repo.clickhouse.com/CLICKHOUSE-KEY.GPG +sudo yum-config-manager --add-repo https://repo.clickhouse.com/rpm/stable/x86_64 +sudo yum update +sudo service clickhouse-server restart + + +#later mus use in clickhouse-client: +#SET allow_experimental_window_functions = 1; \ No newline at end of file diff --git a/ee/api/app.py b/ee/api/app.py index 222e37a39..e12b64e0b 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -6,13 +6,13 @@ from chalicelib import _overrides from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons +from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml +from chalicelib.blueprints.app import v1_api, v1_api_ee from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ -from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml - app = Chalice(app_name='parrot') app.debug = not helper.is_production() or helper.is_local() @@ -121,7 +121,8 @@ app.register_blueprint(bp_core_crons.app) app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) - +app.register_blueprint(v1_api.app) +app.register_blueprint(v1_api_ee.app) # Enterprise app.register_blueprint(bp_ee.app) app.register_blueprint(bp_ee_crons.app) diff --git a/ee/api/chalicelib/blueprints/app/v1_api_ee.py b/ee/api/chalicelib/blueprints/app/v1_api_ee.py new file mode 100644 index 000000000..5682bf5b2 --- /dev/null +++ b/ee/api/chalicelib/blueprints/app/v1_api_ee.py @@ -0,0 +1,16 @@ +from chalice import Blueprint + +from chalicelib import _overrides +from chalicelib.blueprints import bp_authorizers +from chalicelib.utils import assist_helper + +app = Blueprint(__name__) +_overrides.chalice_app(app) + + +@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) +def get_assist_credentials(context): + credentials = assist_helper.get_temporary_credentials() + if "errors" in credentials: + return credentials + return {"data": credentials} diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 694f2f5ca..019da951f 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -1,10 +1,11 @@ from chalice import Blueprint, Response from chalicelib import _overrides +from chalicelib.core import assist from chalicelib.core import boarding from chalicelib.core import errors from chalicelib.core import license -from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager, assist +from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager from chalicelib.core import notifications from chalicelib.core import projects from chalicelib.core import signup @@ -25,9 +26,7 @@ def login(): data = app.current_request.json_body if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): return {"errors": ["Invalid captcha."]} - r = users.authenticate(data['email'], data['password'], - for_plugin=False - ) + r = users.authenticate(data['email'], data['password'], for_plugin=False) if r is None: return Response(status_code=401, body={ 'errors': ['You’ve entered invalid Email or Password.'] @@ -46,6 +45,9 @@ def login(): c.pop("createdAt") c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True, version=True) + c["smtp"] = helper.has_smtp() + c["iceServers"] = assist.get_ice_servers() + return { 'jwt': r.pop('jwt'), 'data': { @@ -142,7 +144,10 @@ def put_client(context): @app.route('/signup', methods=['GET'], authorizer=None) def get_all_signup(): - return {"data": tenants.tenants_exists()} + return {"data": {"tenants": tenants.tenants_exists(), + "sso": SAML2_helper.is_saml2_available(), + "ssoProvider": SAML2_helper.get_saml2_provider(), + "edition": helper.get_edition()}} @app.route('/signup', methods=['POST', 'PUT'], authorizer=None) @@ -347,8 +352,8 @@ def get_members(context): @app.route('/client/members', methods=['PUT', 'POST']) def add_member(context): - if SAML2_helper.is_saml2_available(): - return {"errors": ["please use your SSO server to add teammates"]} + # if SAML2_helper.is_saml2_available(): + # return {"errors": ["please use your SSO server to add teammates"]} data = app.current_request.json_body return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data) diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index a0fa0aa8c..c71668e36 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -1,9 +1,58 @@ from chalice import Blueprint from chalicelib import _overrides +from chalicelib.core import roles from chalicelib.core import unlock +from chalicelib.utils import assist_helper app = Blueprint(__name__) _overrides.chalice_app(app) unlock.check() + + +@app.route('/client/roles', methods=['GET']) +def get_roles(context): + return { + 'data': roles.get_roles(tenant_id=context["tenantId"]) + } + + +@app.route('/client/roles', methods=['POST', 'PUT']) +def add_role(context): + data = app.current_request.json_body + data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], name=data["name"], + description=data.get("description"), permissions=data["permissions"]) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.route('/client/roles/{roleId}', methods=['POST', 'PUT']) +def edit_role(roleId, context): + data = app.current_request.json_body + data = roles.update(tenant_id=context['tenantId'], user_id=context['userId'], role_id=roleId, changes=data) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.route('/client/roles/{roleId}', methods=['DELETE']) +def delete_role(roleId, context): + data = roles.delete(tenant_id=context['tenantId'], user_id=context["userId"], role_id=roleId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.route('/assist/credentials', methods=['GET']) +def get_assist_credentials(context): + return {"data": assist_helper.get_full_config()} diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index d5a964211..76e73b3b7 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -1,6 +1,7 @@ from chalice import Blueprint from chalicelib import _overrides +from chalicelib.utils import SAML2_helper from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth app = Blueprint(__name__) @@ -9,30 +10,27 @@ _overrides.chalice_app(app) from chalicelib.utils.helper import environ from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request -from onelogin.saml2.utils import OneLogin_Saml2_Utils from chalice import Response -from chalicelib.core import users, tenants +from chalicelib.core import users, tenants, roles -@app.route("/saml2", methods=['GET'], authorizer=None) +@app.route('/sso/saml2', methods=['GET'], authorizer=None) def start_sso(): app.current_request.path = '' req = prepare_request(request=app.current_request) auth = init_saml_auth(req) sso_built_url = auth.login() return Response( - # status_code=301, status_code=307, body='', headers={'Location': sso_built_url, 'Content-Type': 'text/plain'}) -@app.route('/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None) +@app.route('/sso/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None) def process_sso_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] - request = req['request'] auth = init_saml_auth(req) request_id = None @@ -46,92 +44,64 @@ def process_sso_assertion(): if 'AuthNRequestID' in session: del session['AuthNRequestID'] user_data = auth.get_attributes() - # session['samlUserdata'] = user_data - # session['samlNameId'] = auth.get_nameid() - # session['samlNameIdFormat'] = auth.get_nameid_format() - # session['samlNameIdNameQualifier'] = auth.get_nameid_nq() - # session['samlNameIdSPNameQualifier'] = auth.get_nameid_spnq() - # session['samlSessionIndex'] = auth.get_session_index() - # session['samlSessionExpiration'] = auth.get_session_expiration() - # print('>>>>') - # print(session) - self_url = OneLogin_Saml2_Utils.get_self_url(req) - if 'RelayState' in request.form and self_url != request.form['RelayState']: - print("====>redirect") - return Response( - status_code=307, - body='', - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) elif auth.get_settings().is_debug_active(): error_reason = auth.get_last_error_reason() return {"errors": [error_reason]} email = auth.get_nameid() + print("received nameId:") + print(email) existing = users.get_by_email_only(auth.get_nameid()) internal_id = next(iter(user_data.get("internalId", [])), None) - if len(existing) == 0 or existing[0].get("origin") != 'saml': - tenant_key = user_data.get("tenantKey", []) - if len(tenant_key) == 0: - print("tenantKey not present in assertion") - return Response( - status_code=307, - body={"errors": ["tenantKey not present in assertion"]}, - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) - else: - t = tenants.get_by_tenant_key(tenant_key[0]) - if t is None: - return Response( - status_code=307, - body={"errors": ["Unknown tenantKey"]}, - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) - if len(existing) == 0: - print("== new user ==") - users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, origin='saml', - name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), - internal_id=internal_id) - else: - existing = existing[0] - if existing.get("origin") != 'saml': - print("== migrating user to SAML ==") - users.update(tenant_id=t['tenantId'], user_id=existing["id"], - changes={"origin": 'saml', "internal_id": internal_id}) - - return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) - - -@app.route('/saml2/slo', methods=['GET']) -def process_slo_request(context): - req = prepare_request(request=app.current_request) - session = req["cookie"]["session"] - request = req['request'] - auth = init_saml_auth(req) - - name_id = session_index = name_id_format = name_id_nq = name_id_spnq = None - if 'samlNameId' in session: - name_id = session['samlNameId'] - if 'samlSessionIndex' in session: - session_index = session['samlSessionIndex'] - if 'samlNameIdFormat' in session: - name_id_format = session['samlNameIdFormat'] - if 'samlNameIdNameQualifier' in session: - name_id_nq = session['samlNameIdNameQualifier'] - if 'samlNameIdSPNameQualifier' in session: - name_id_spnq = session['samlNameIdSPNameQualifier'] - users.change_jwt_iat(context["userId"]) + tenant_key = user_data.get("tenantKey", []) + if len(tenant_key) == 0: + print("tenantKey not present in assertion, please check your SP-assertion-configuration") + return {"errors": ["tenantKey not present in assertion, please check your SP-assertion-configuration"]} + else: + t = tenants.get_by_tenant_key(tenant_key[0]) + if t is None: + print("invalid tenantKey, please copy the correct value from Preferences > Account") + return {"errors": ["invalid tenantKey, please copy the correct value from Preferences > Account"]} + print(user_data) + role_name = user_data.get("role", []) + if len(role_name) == 0: + print("No role specified, setting role to member") + role_name = ["member"] + role_name = role_name[0] + role = roles.get_role_by_name(tenant_id=t['tenantId'], name=role_name) + if role is None: + return {"errors": [f"role {role_name} not found, please create it in openreplay first"]} + if existing is None: + print("== new user ==") + users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, + origin=SAML2_helper.get_saml2_provider(), + name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), + internal_id=internal_id, role_id=role["roleId"]) + else: + if t['tenantId'] != existing["tenantId"]: + print("user exists for a different tenant") + return {"errors": ["user exists for a different tenant"]} + if existing.get("origin") is None: + print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") + users.update(tenant_id=t['tenantId'], user_id=existing["id"], + changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) + expiration = auth.get_session_expiration() + expiration = expiration if expiration is not None and expiration > 10 * 60 \ + else int(environ.get("sso_exp_delta_seconds", 24 * 60 * 60)) + jwt = users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration) + if jwt is None: + return {"errors": ["null JWT"]} return Response( - status_code=307, + status_code=302, body='', - headers={'Location': auth.logout(name_id=name_id, session_index=session_index, nq=name_id_nq, - name_id_format=name_id_format, - spnq=name_id_spnq), 'Content-Type': 'text/plain'}) + headers={'Location': SAML2_helper.get_landing_URL(jwt), 'Content-Type': 'text/plain'}) -@app.route('/saml2/sls', methods=['GET'], authorizer=None) +@app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) def process_sls_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] - request = req['request'] auth = init_saml_auth(req) request_id = None if 'LogoutRequestID' in session: @@ -169,7 +139,7 @@ def process_sls_assertion(): headers={'Location': environ["SITE_URL"], 'Content-Type': 'text/plain'}) -@app.route('/saml2/metadata', methods=['GET'], authorizer=None) +@app.route('/sso/saml2/metadata', methods=['GET'], authorizer=None) def saml2_metadata(): req = prepare_request(request=app.current_request) auth = init_saml_auth(req) diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index f7f50f52b..ea326c2a1 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -1,10 +1,10 @@ -from chalicelib.utils.helper import environ import jwt -from chalicelib.utils import helper -from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.core import tenants from chalicelib.core import users +from chalicelib.utils import helper +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.helper import environ def jwt_authorizer(token): @@ -44,7 +44,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None): "userId": id, "tenantId": tenant_id, "exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000 \ - if exp is None else exp, + if exp is None else exp + TimeUTC.get_utc_offset() // 1000, "iss": environ["jwt_issuer"], "iat": iat // 1000, "aud": aud diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index a62e900bc..98b5620af 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -321,7 +321,7 @@ def get_details_chart(project_id, error_id, user_id, **data): "error_id": error_id} main_ch_query = f"""\ - SELECT error_id, + SELECT browser_details.error_id AS error_id, browsers_partition, os_partition, device_partition, @@ -516,7 +516,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F FROM errors WHERE {" AND ".join(ch_sub_query)} GROUP BY error_id, timestamp - ORDER BY timestamp) + ORDER BY timestamp) AS sub_table GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;""" # print("--------------------") diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index 3a636c967..e51816e85 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -1,36 +1,26 @@ -from chalicelib.utils import email_helper, captcha, helper from chalicelib.core import users +from chalicelib.utils import email_helper, captcha, helper -def step1(data): - print("====================== reset password 1 ===============") +def reset(data): + print("====================== reset password ===============") print(data) if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): print("error: Invalid captcha.") return {"errors": ["Invalid captcha."]} if "email" not in data: return {"errors": ["email not found in body"]} - - a_users = users.get_by_email_only(data["email"]) - if len(a_users) > 1: - print(f"multiple users found for [{data['email']}] please contact our support") - return {"errors": ["multiple users, please contact our support"]} - elif len(a_users) == 1: - a_users = a_users[0] - invitation_link = users.generate_new_invitation(user_id=a_users["id"]) + if not helper.has_smtp(): + return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} + a_user = users.get_by_email_only(data["email"]) + if a_user is not None: + # ---FOR SSO + if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False: + return {"errors": ["Please use your SSO to login"]} + # ---------- + invitation_link = users.generate_new_invitation(user_id=a_user["id"]) email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link) else: print(f"invalid email address [{data['email']}]") return {"errors": ["invalid email address"]} return {"data": {"state": "success"}} - -# def step2(data): -# print("====================== change password 2 ===============") -# user = users.get_by_email_reset(data["email"], data["code"]) -# if not user: -# print("error: wrong email or reset code") -# return {"errors": ["wrong email or reset code"]} -# users.update(tenant_id=user["tenantId"], user_id=user["id"], -# changes={"token": None, "password": data["password"], "generatedPassword": False, -# "verifiedEmail": True}) -# return {"data": {"state": "success"}} diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py new file mode 100644 index 000000000..8ba62091a --- /dev/null +++ b/ee/api/chalicelib/core/roles.py @@ -0,0 +1,122 @@ +from chalicelib.core import users +from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC + + +def update(tenant_id, user_id, role_id, changes): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + + if len(changes.keys()) == 0: + return None + ALLOW_EDIT = ["name", "description", "permissions"] + sub_query = [] + for key in changes.keys(): + if key in ALLOW_EDIT: + sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.roles + SET {" ,".join(sub_query)} + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND deleted_at ISNULL + AND protected = FALSE + RETURNING *;""", + {"tenant_id": tenant_id, "role_id": role_id, **changes}) + ) + row = cur.fetchone() + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) + + +def create(tenant_id, user_id, name, description, permissions): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions) + VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[]) + RETURNING *;""", + {"tenant_id": tenant_id, "name": name, "description": description, "permissions": permissions}) + ) + row = cur.fetchone() + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) + + +def get_roles(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT * + FROM public.roles + where tenant_id =%(tenant_id)s + AND deleted_at IS NULL + ORDER BY role_id;""", + {"tenant_id": tenant_id}) + ) + rows = cur.fetchall() + for r in rows: + r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) + return helper.list_to_camel_case(rows) + + +def get_role_by_name(tenant_id, name): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT * + FROM public.roles + where tenant_id =%(tenant_id)s + AND deleted_at IS NULL + AND name ILIKE %(name)s + ;""", + {"tenant_id": tenant_id, "name": name}) + ) + row = cur.fetchone() + if row is not None: + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) + + +def delete(tenant_id, user_id, role_id): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT 1 + FROM public.roles + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND protected = TRUE + LIMIT 1;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + if cur.fetchone() is not None: + return {"errors": ["this role is protected"]} + cur.execute( + cur.mogrify("""SELECT 1 + FROM public.users + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + LIMIT 1;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + if cur.fetchone() is not None: + return {"errors": ["this role is already attached to other user(s)"]} + cur.execute( + cur.mogrify("""UPDATE public.roles + SET deleted_at = timezone('utc'::text, now()) + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND protected = FALSE;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + return get_roles(tenant_id=tenant_id) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 652867c25..4650736a5 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -68,10 +68,16 @@ def create_step1(data): VALUES (%(companyName)s, %(versionNumber)s, 'ee') RETURNING tenant_id, api_key ), + r AS ( + INSERT INTO public.roles(tenant_id, name, description, permissions, protected) + VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE), + ((SELECT tenant_id FROM t), 'Member', 'Member', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE) + RETURNING * + ), u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data) - VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s) - RETURNING user_id,email,role,name + INSERT INTO public.users (tenant_id, email, role, name, data, role_id) + VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s, (SELECT role_id FROM r WHERE name ='Owner')) + RETURNING user_id,email,role,name,role_id ), au AS ( INSERT INTO public.basic_authentication (user_id, password, generated_password) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 5fb293a66..f82abd191 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -3,7 +3,7 @@ import secrets from chalicelib.core import authorizers, metadata, projects, assist from chalicelib.core import tenants -from chalicelib.utils import dev +from chalicelib.utils import dev, SAML2_helper from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -14,13 +14,13 @@ def __generate_invitation_token(): return secrets.token_urlsafe(64) -def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False): +def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False, role_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ WITH u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data) - VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s) - RETURNING user_id,email,role,name,appearance + INSERT INTO public.users (tenant_id, email, role, name, data, role_id) + VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(role_id)s) + RETURNING user_id,email,role,name,appearance, role_id ), au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) @@ -35,19 +35,20 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - au.invitation_token + au.invitation_token, + u.role_id FROM u,au;""", {"tenantId": tenant_id, "email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), - "invitation_token": invitation_token}) + "invitation_token": invitation_token, "role_id": role_id}) cur.execute( query ) return helper.dict_to_camel_case(cur.fetchone()) -def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False): +def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False, role_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ UPDATE public.users @@ -56,7 +57,8 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own deleted_at= NULL, created_at = timezone('utc'::text, now()), tenant_id= %(tenant_id)s, - api_key= generate_api_key(20) + api_key= generate_api_key(20), + role_id= %(role_id)s WHERE user_id=%(user_id)s RETURNING user_id AS id, email, @@ -65,9 +67,11 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""", + (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, + role_id;""", {"tenant_id": tenant_id, "user_id": user_id, "email": email, - "role": "owner" if owner else "admin" if admin else "member", "name": name}) + "role": "owner" if owner else "admin" if admin else "member", "name": name, + "role_id": role_id}) cur.execute( query ) @@ -157,7 +161,8 @@ def update(tenant_id, user_id, changes): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + users.appearance, + users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) if len(sub_query_bauth) > 0: @@ -177,7 +182,8 @@ def update(tenant_id, user_id, changes): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + users.appearance, + users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -198,14 +204,15 @@ def create_member(tenant_id, user_id, data): return {"errors": ["invalid user name"]} if name is None: name = data["email"] + role_id = data.get("roleId") invitation_token = __generate_invitation_token() user = get_deleted_user_by_email(email=data["email"]) if user is not None: new_member = restore_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, user_id=user["userId"]) + admin=data.get("admin", False), name=name, user_id=user["userId"], role_id=role_id) else: new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name) + admin=data.get("admin", False), name=name, role_id=role_id) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) helper.async_post(environ['email_basic'] % 'member_invitation', { @@ -243,19 +250,25 @@ def get(user_id, tenant_id): users.user_id AS id, email, role, - name, + users.name, basic_authentication.generated_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, appearance, api_key, - origin - FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + origin, + role_id, + roles.name AS role_name, + roles.permissions, + basic_authentication.password IS NOT NULL AS has_password + FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) WHERE users.user_id = %(userId)s - AND tenant_id = %(tenantId)s - AND deleted_at IS NULL + AND users.tenant_id = %(tenantId)s + AND users.deleted_at IS NULL + AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s) LIMIT 1;""", {"userId": user_id, "tenantId": tenant_id}) ) @@ -280,7 +293,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance"] + ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -324,15 +337,16 @@ def get_by_email_only(email): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - origin + origin, + basic_authentication.password IS NOT NULL AS has_password FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id - WHERE - users.email = %(email)s - AND users.deleted_at IS NULL;""", + WHERE users.email = %(email)s + AND users.deleted_at IS NULL + LIMIT 1;""", {"email": email}) ) - r = cur.fetchall() - return helper.list_to_camel_case(r) + r = cur.fetchone() + return helper.dict_to_camel_case(r) def get_by_email_reset(email, reset_token): @@ -375,9 +389,13 @@ def get_members(tenant_id): (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, DATE_PART('day',timezone('utc'::text, now()) \ - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, - basic_authentication.password IS NOT NULL AS joined, - invitation_token - FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined, + invitation_token, + role_id, + roles.name AS role_name + FROM public.users + LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) WHERE users.tenant_id = %(tenantId)s AND users.deleted_at IS NULL ORDER BY name, id""", {"tenantId": tenant_id}) @@ -428,8 +446,8 @@ def change_password(tenant_id, user_id, email, old_password, new_password): item = get(tenant_id=tenant_id, user_id=user_id) if item is None: return {"errors": ["access denied"]} - if item["origin"] is not None: - return {"errors": ["cannot change your password because you are logged-in form an SSO service"]} + if item["origin"] is not None and item["hasPassword"] is False: + return {"errors": ["cannot change your password because you are logged-in from an SSO service"]} if old_password == new_password: return {"errors": ["old and new password are the same"]} auth = authenticate(email, old_password, for_change_password=True) @@ -597,19 +615,35 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, users.appearance, - users.origin + users.origin, + users.role_id, + roles.name AS role_name, + roles.permissions FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) + LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1) + AND (roles.role_id IS NULL OR roles.deleted_at IS NULL) LIMIT 1;""", {"email": email, "password": password}) cur.execute(query) r = cur.fetchone() + if r is None and SAML2_helper.is_saml2_available(): + query = cur.mogrify( + f"""SELECT 1 + FROM public.users + WHERE users.email = %(email)s + AND users.deleted_at IS NULL + AND users.origin IS NOT NULL + LIMIT 1;""", + {"email": email}) + cur.execute(query) + if cur.fetchone() is not None: + return {"errors": ["must sign-in with SSO"]} + if r is not None: - if r["origin"] is not None: - return {"errors": ["must sign-in with SSO"]} if for_change_password: return True r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) @@ -637,7 +671,8 @@ def authenticate_sso(email, internal_id, exp=None): (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, users.appearance, - origin + origin, + role_id FROM public.users AS users WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""", {"email": email, "internal_id": internal_id}) @@ -645,33 +680,26 @@ def authenticate_sso(email, internal_id, exp=None): cur.execute(query) r = cur.fetchone() - if r is not None: - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) - query = cur.mogrify( - f"""UPDATE public.users - SET jwt_iat = timezone('utc'::text, now()) - WHERE user_id = %(user_id)s - RETURNING jwt_iat;""", - {"user_id": r["id"]}) - cur.execute(query) - return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], - TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), - aud=f"front:{helper.get_stage_name()}", - exp=exp), - "email": email, - **r - } + if r is not None: + r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) + return authorizers.generate_jwt(r['id'], r['tenantId'], + jwt_iat, aud=f"front:{helper.get_stage_name()}", + exp=(exp + jwt_iat // 1000) if exp is not None else None) return None -def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None): +def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ WITH u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id) - VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s) + INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id, role_id) + VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s, %(role_id)s) RETURNING * + ), + au AS ( + INSERT INTO public.basic_authentication(user_id) + VALUES ((SELECT user_id FROM u)) ) SELECT u.user_id AS id, u.email, @@ -686,7 +714,7 @@ def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None): FROM u;""", {"tenantId": tenant_id, "email": email, "internal_id": internal_id, "role": "admin" if admin else "member", "name": name, "origin": origin, - "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}) + "role_id": role_id, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}) cur.execute( query ) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index a0d9f28b5..25f279d3a 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -9,13 +9,13 @@ SAML2 = { "strict": True, "debug": True, "sp": { - "entityId": environ["SITE_URL"] + "/api/saml2/metadata/", + "entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/", "assertionConsumerService": { - "url": environ["SITE_URL"] + "/api/saml2/acs", + "url": environ["SITE_URL"] + "/api/sso/saml2/acs", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "singleLogoutService": { - "url": environ["SITE_URL"] + "/api/saml2/sls", + "url": environ["SITE_URL"] + "/api/sso/saml2/sls", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", @@ -26,7 +26,7 @@ SAML2 = { } idp = None # SAML2 config handler -if len(environ.get("SAML2_MD_URL","")) > 0: +if environ.get("SAML2_MD_URL") is not None and len(environ["SAML2_MD_URL"]) > 0: print("SAML2_MD_URL provided, getting IdP metadata config") from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser @@ -90,7 +90,7 @@ def prepare_request(request): 'https': 'on' if request.headers.get('x-forwarded-proto', 'http') == 'https' else 'off', 'http_host': request.headers['host'], 'server_port': url_data.port, - 'script_name': request.path, + 'script_name': "/api" + request.path, 'get_data': request.args.copy(), # Uncomment if using ADFS as IdP, https://github.com/onelogin/python-saml/pull/144 # 'lowercase_urlencoding': True, @@ -102,3 +102,12 @@ def prepare_request(request): def is_saml2_available(): return idp is not None + + +def get_saml2_provider(): + return environ.get("idp_name", "saml2") if is_saml2_available() and len( + environ.get("idp_name", "saml2")) > 0 else None + + +def get_landing_URL(jwt): + return environ["SITE_URL"] + environ.get("sso_landing", "/login?jwt=%s") % jwt diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py new file mode 100644 index 000000000..d31cadd1f --- /dev/null +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -0,0 +1,46 @@ +import base64 +import hashlib +import hmac +from time import time + +from chalicelib.core import assist +from chalicelib.utils import helper +from chalicelib.utils.helper import environ + + +def __get_secret(): + return environ["assist_secret"] if environ["assist_secret"] is not None and len( + environ["assist_secret"]) > 0 else None + + +def get_temporary_credentials(): + secret = __get_secret() + if secret is None: + return {"errors": ["secret not defined"]} + user = helper.generate_salt() + ttl = int(environ.get("assist_ttl", 48)) * 3600 + timestamp = int(time()) + ttl + username = str(timestamp) + ':' + user + dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) + dig = dig.digest() + credential = base64.b64encode(dig).decode() + return {'username': username, 'credential': credential} + + +def get_full_config(): + servers = assist.get_ice_servers() + if servers is None: + return None + servers = servers.split("|") + credentials = get_temporary_credentials() + if __get_secret() is not None: + servers = [{"url": s.split(",")[0], **credentials} for s in servers] + else: + for i in range(len(servers)): + s = servers[i].split("|") + if len(s) == 3: + servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + else: + servers[i] = {"url": s[0]} + + return servers diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 8dc6ce340..e241b5edd 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -10,4 +10,4 @@ jira==2.0.0 schedule==1.1.0 croniter==1.0.12 clickhouse-driver==0.1.5 -python3-saml==1.10.1 \ No newline at end of file +python3-saml==1.12.0 \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql new file mode 100644 index 000000000..2d7a14020 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql @@ -0,0 +1,13 @@ + +ALTER TABLE sessions_metadata + ADD COLUMN project_id UInt32, + ADD COLUMN tracker_version String, + ADD COLUMN rev_id Nullable(String), + ADD COLUMN user_uuid UUID, + ADD COLUMN user_os String, + ADD COLUMN user_os_version Nullable(String), + ADD COLUMN user_browser String, + ADD COLUMN user_browser_version Nullable(String), + ADD COLUMN user_device Nullable(String), + ADD COLUMN user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + ADD COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122); \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql new file mode 100644 index 000000000..6d466a7a0 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql @@ -0,0 +1,22 @@ +CREATE TABLE customs +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + name Nullable(String), + payload Nullable(String), + level Enum8('info'=0, 'error'=1) DEFAULT 'info' +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime) + TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index 6ad10f8a3..a6d0382e6 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -1,19 +1,31 @@ -CREATE TABLE sessions_metadata ( - session_id UInt64, - user_id Nullable(String), - user_anonymous_id Nullable(String), - metadata_1 Nullable(String), - metadata_2 Nullable(String), - metadata_3 Nullable(String), - metadata_4 Nullable(String), - metadata_5 Nullable(String), - metadata_6 Nullable(String), - metadata_7 Nullable(String), - metadata_8 Nullable(String), - metadata_9 Nullable(String), - metadata_10 Nullable(String), - datetime DateTime +CREATE TABLE sessions_metadata +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) ) ENGINE = MergeTree -PARTITION BY toDate(datetime) -ORDER BY (session_id) -TTL datetime + INTERVAL 1 MONTH; + PARTITION BY toDate(datetime) + ORDER BY (session_id) + TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql new file mode 100644 index 000000000..15ce18803 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -0,0 +1,72 @@ +BEGIN; + +CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; +CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; +CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + +CREATE INDEX users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; +CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues (issue_id, timestamp); +CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp); +CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); + +CREATE TABLE roles +( + role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + description text DEFAULT NULL, + permissions text[] NOT NULL DEFAULT '{}', + protected bool NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL +); + +INSERT INTO roles(tenant_id, name, description, permissions, protected) +SELECT * +FROM (SELECT tenant_id FROM tenants) AS tenants, + (VALUES ('Owner', 'Owner', + '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE), + ('Member', 'Member', + '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE) + ) AS default_roles(name, description, permissions, protected); + + +ALTER TABLE users + ADD COLUMN role_id integer REFERENCES roles (role_id) ON DELETE SET NULL; + +UPDATE users +SET role_id = r.role_id +FROM (SELECT tenant_id, role_id + FROM tenants + INNER JOIN roles USING (tenant_id) + WHERE roles.name = 'Owner') AS r(tenant_id, role_id) +WHERE users.tenant_id = r.tenant_id + AND users.role = 'owner'; + +UPDATE users +SET role_id = r.role_id +FROM (SELECT tenant_id, role_id + FROM tenants + INNER JOIN roles USING (tenant_id) + WHERE roles.name = 'Member') AS r(tenant_id, role_id) +WHERE users.tenant_id = r.tenant_id + AND users.role != 'owner'; + +DO +$$ + BEGIN + IF NOT EXISTS(SELECT 1 FROM pg_type WHERE typname = 'user_origin') THEN + CREATE TYPE user_origin AS ENUM ('saml'); + END IF; + END +$$; +ALTER TABLE public.users + ADD COLUMN IF NOT EXISTS origin user_origin NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS internal_id text NULL DEFAULT NULL; + + + +ALTER TABLE public.users + ALTER COLUMN origin TYPE text; +DROP TYPE IF EXISTS user_origin; +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a47779a3b..10448c495 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1,10 +1,7 @@ BEGIN; - --- --- public.sql --- - -CREATE EXTENSION IF NOT EXISTS pg_trgm; -CREATE EXTENSION IF NOT EXISTS pgcrypto; - +-- Schemas and functions definitions: +CREATE SCHEMA IF NOT EXISTS events_common; +CREATE SCHEMA IF NOT EXISTS events; -- --- accounts.sql --- @@ -26,795 +23,7 @@ begin end; $$ LANGUAGE plpgsql; - -CREATE TABLE tenants -( - tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id text NOT NULL DEFAULT generate_api_key(20), - name text, - api_key text UNIQUE default generate_api_key(20) not null, - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - edition varchar(3) NOT NULL, - version_number text NOT NULL, - license text NULL, - opt_out bool NOT NULL DEFAULT FALSE, - t_projects integer NOT NULL DEFAULT 1, - t_sessions bigint NOT NULL DEFAULT 0, - t_users integer NOT NULL DEFAULT 1, - t_integrations integer NOT NULL DEFAULT 0 -); - -CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); -CREATE TYPE user_origin AS ENUM ('saml'); -CREATE TABLE users -( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, - api_key text UNIQUE default generate_api_key(20) not null, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT '{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE, - origin user_origin NULL DEFAULT NULL -); - - -CREATE TABLE basic_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, - changed_at timestamp, - UNIQUE (user_id) -); - - -CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); -CREATE TABLE oauth_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - provider oauth_provider NOT NULL, - provider_user_id text NOT NULL, - token text NOT NULL, - UNIQUE (user_id, provider) -); - - --- --- projects.sql --- - -CREATE TABLE projects -( - project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - name text NOT NULL, - active boolean NOT NULL, - sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - max_session_duration integer NOT NULL DEFAULT 7200000, - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL, - gdpr jsonb NOT NULL DEFAULT '{ - "maskEmails": true, - "sampleRate": 33, - "maskNumbers": false, - "defaultInputMode": "plain" - }'::jsonb -- ?????? -); - -CREATE INDEX ON public.projects (project_key); -CREATE INDEX projects_tenant_id_idx ON projects (tenant_id); - -CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS -$$ -BEGIN - PERFORM pg_notify('project', row_to_json(NEW)::text); - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update - AFTER INSERT OR UPDATE - ON projects - FOR EACH ROW -EXECUTE PROCEDURE notify_project(); - --- --- alerts.sql --- - -CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); - -CREATE TABLE alerts -( - alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - name text NOT NULL, - description text NULL DEFAULT NULL, - active boolean NOT NULL DEFAULT TRUE, - detection_method alert_detection_method NOT NULL, - query jsonb NOT NULL, - deleted_at timestamp NULL DEFAULT NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{ - "renotifyInterval": 1440 - }'::jsonb -); - - -CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS -$$ -DECLARE - clone jsonb; - tenant_id integer; -BEGIN - clone = to_jsonb(NEW); - clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); - IF NEW.deleted_at NOTNULL THEN - clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); - END IF; - SELECT projects.tenant_id INTO tenant_id FROM public.projects WHERE projects.project_id = NEW.project_id LIMIT 1; - clone = jsonb_set(clone, '{tenant_id}', to_jsonb(tenant_id)); - PERFORM pg_notify('alert', clone::text); - RETURN NEW; -END ; -$$ LANGUAGE plpgsql; - - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON alerts - FOR EACH ROW -EXECUTE PROCEDURE notify_alert(); - - --- --- webhooks.sql --- - -create type webhook_type as enum ('webhook', 'slack', 'email'); - -create table webhooks -( - webhook_id integer generated by default as identity - constraint webhooks_pkey - primary key, - tenant_id integer not null - constraint webhooks_tenant_id_fkey - references tenants - on delete cascade, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - auth_header text, - type webhook_type not null, - index integer default 0 not null, - name varchar(100) -); - -CREATE INDEX webhooks_tenant_id_idx ON webhooks (tenant_id); - --- --- notifications.sql --- - - -CREATE TABLE notifications -( - notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer REFERENCES tenants (tenant_id) ON DELETE CASCADE, - user_id integer REFERENCES users (user_id) ON DELETE CASCADE, - title text NOT NULL, - description text NOT NULL, - button_text varchar(80) NULL, - button_url text NULL, - image_url text NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{}'::jsonb, - CONSTRAINT notification_tenant_xor_user CHECK ( tenant_id NOTNULL AND user_id ISNULL OR - tenant_id ISNULL AND user_id NOTNULL ) -); -CREATE INDEX notifications_user_id_index ON public.notifications (user_id); -CREATE INDEX notifications_tenant_id_index ON public.notifications (tenant_id); -CREATE INDEX notifications_created_at_index ON public.notifications (created_at DESC); -CREATE INDEX notifications_created_at_epoch_idx ON public.notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); - -CREATE TABLE user_viewed_notifications -( - user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, - notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, - constraint user_viewed_notifications_pkey primary key (user_id, notification_id) -); - --- --- funnels.sql --- - -CREATE TABLE funnels -( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False -); - -CREATE INDEX ON public.funnels (user_id, is_public); - --- --- announcements.sql --- - -create type announcement_type as enum ('notification', 'alert'); - -create table announcements -( - announcement_id serial not null - constraint announcements_pk - primary key, - title text not null, - description text not null, - button_text varchar(30), - button_url text, - image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null -); - --- --- integrations.sql --- - -CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); -CREATE TABLE integrations -( - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - provider integration_provider NOT NULL, - options jsonb NOT NULL, - request_data jsonb NOT NULL DEFAULT '{}'::jsonb, - PRIMARY KEY (project_id, provider) -); - -CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS -$$ -BEGIN - IF NEW IS NULL THEN - PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); - ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN - PERFORM pg_notify('integration', row_to_json(NEW)::text); - END IF; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON integrations - FOR EACH ROW -EXECUTE PROCEDURE notify_integration(); - - -create table jira_cloud -( - user_id integer not null - constraint jira_cloud_pk - primary key - constraint jira_cloud_users_fkey - references users - on delete cascade, - username text not null, - token text not null, - url text -); - - --- --- issues.sql --- - -CREATE TYPE issue_type AS ENUM ( - 'click_rage', - 'dead_click', - 'excessive_scrolling', - 'bad_request', - 'missing_resource', - 'memory', - 'cpu', - 'slow_resource', - 'slow_page_load', - 'crash', - 'ml_cpu', - 'ml_memory', - 'ml_dead_click', - 'ml_click_rage', - 'ml_mouse_thrashing', - 'ml_excessive_scrolling', - 'ml_slow_resources', - 'custom', - 'js_exception' - ); - -CREATE TABLE issues -( - issue_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - type issue_type NOT NULL, - context_string text NOT NULL, - context jsonb DEFAULT NULL -); -CREATE INDEX ON issues (issue_id, type); -CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); - --- --- errors.sql --- - -CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); -CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); -CREATE TABLE errors -( - error_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - source error_source NOT NULL, - name text DEFAULT NULL, - message text NOT NULL, - payload jsonb NOT NULL, - status error_status NOT NULL DEFAULT 'unresolved', - parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, - stacktrace jsonb, --to save the stacktrace and not query S3 another time - stacktrace_parsed_at timestamp -); -CREATE INDEX ON errors (project_id, source); -CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); -CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); -CREATE INDEX errors_project_id_idx ON public.errors (project_id); -CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); - -CREATE TABLE user_favorite_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); - -CREATE TABLE user_viewed_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); -CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); -CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - - --- --- sessions.sql --- -CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); -CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); -CREATE TYPE platform AS ENUM ('web','ios','android'); - -CREATE TABLE sessions -( - session_id bigint PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - tracker_version text NOT NULL, - start_ts bigint NOT NULL, - duration integer NULL, - rev_id text DEFAULT NULL, - platform platform NOT NULL DEFAULT 'web', - is_snippet boolean NOT NULL DEFAULT FALSE, - user_id text DEFAULT NULL, - user_anonymous_id text DEFAULT NULL, - user_uuid uuid NOT NULL, - user_agent text DEFAULT NULL, - user_os text NOT NULL, - user_os_version text DEFAULT NULL, - user_browser text DEFAULT NULL, - user_browser_version text DEFAULT NULL, - user_device text NOT NULL, - user_device_type device_type NOT NULL, - user_device_memory_size integer DEFAULT NULL, - user_device_heap_size bigint DEFAULT NULL, - user_country country NOT NULL, - pages_count integer NOT NULL DEFAULT 0, - events_count integer NOT NULL DEFAULT 0, - errors_count integer NOT NULL DEFAULT 0, - watchdogs_score bigint NOT NULL DEFAULT 0, - issue_score bigint NOT NULL DEFAULT 0, - issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL --- , --- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL -); -CREATE INDEX ON sessions (project_id, start_ts); -CREATE INDEX ON sessions (project_id, user_id); -CREATE INDEX ON sessions (project_id, user_anonymous_id); -CREATE INDEX ON sessions (project_id, user_device); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX ON sessions (project_id, metadata_1); -CREATE INDEX ON sessions (project_id, metadata_2); -CREATE INDEX ON sessions (project_id, metadata_3); -CREATE INDEX ON sessions (project_id, metadata_4); -CREATE INDEX ON sessions (project_id, metadata_5); -CREATE INDEX ON sessions (project_id, metadata_6); -CREATE INDEX ON sessions (project_id, metadata_7); -CREATE INDEX ON sessions (project_id, metadata_8); -CREATE INDEX ON sessions (project_id, metadata_9); -CREATE INDEX ON sessions (project_id, metadata_10); --- CREATE INDEX ON sessions (rehydration_id); -CREATE INDEX ON sessions (project_id, watchdogs_score DESC); -CREATE INDEX platform_idx ON public.sessions (platform); - -CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); -CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); -CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); -CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); -CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); -CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); -CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); -CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); -CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); -CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); -CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); -CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); -CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); -CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); -CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); -CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; - - -ALTER TABLE public.sessions - ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_browser ISNULL)); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_agent_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_agent ISNULL)); - - - -CREATE TABLE user_viewed_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - -CREATE TABLE user_favorite_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - - --- --- assignments.sql --- - -create table assigned_sessions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - issue_id text NOT NULL, - provider oauth_provider NOT NULL, - created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default '{}'::jsonb NOT NULL -); -CREATE INDEX ON assigned_sessions (session_id); - --- --- events_common.sql --- - -CREATE SCHEMA events_common; - -CREATE TYPE events_common.custom_level AS ENUM ('info','error'); - -CREATE TABLE events_common.customs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - name text NOT NULL, - payload jsonb NOT NULL, - level events_common.custom_level NOT NULL DEFAULT 'info', - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.customs (name); -CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); -CREATE INDEX ON events_common.customs (timestamp); - - -CREATE TABLE events_common.issues -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, - payload jsonb DEFAULT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); - - -CREATE TABLE events_common.requests -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - url text NOT NULL, - duration integer NOT NULL, - success boolean NOT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.requests (url); -CREATE INDEX ON events_common.requests (duration); -CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); -CREATE INDEX ON events_common.requests (timestamp); -CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); - -- --- events.sql --- -CREATE SCHEMA events; - -CREATE TABLE events.pages -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - host text NOT NULL, - path text NOT NULL, - base_path text NOT NULL, - referrer text DEFAULT NULL, - base_referrer text DEFAULT NULL, - dom_building_time integer DEFAULT NULL, - dom_content_loaded_time integer DEFAULT NULL, - load_time integer DEFAULT NULL, - first_paint_time integer DEFAULT NULL, - first_contentful_paint_time integer DEFAULT NULL, - speed_index integer DEFAULT NULL, - visually_complete integer DEFAULT NULL, - time_to_interactive integer DEFAULT NULL, - response_time bigint DEFAULT NULL, - response_end bigint DEFAULT NULL, - ttfb integer DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.pages (session_id); -CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); -CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); -CREATE INDEX ON events.pages (timestamp); -CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); -CREATE INDEX pages_base_path_idx ON events.pages (base_path); -CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); -CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); -CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, length(base_referrer) - (CASE - WHEN base_referrer LIKE 'http://%' - THEN 7 - WHEN base_referrer LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); -CREATE INDEX ON events.pages (response_time); -CREATE INDEX ON events.pages (response_end); -CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); -CREATE INDEX pages_path_idx ON events.pages (path); -CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; -CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; -CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; -CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); - - -CREATE TABLE events.clicks -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, - selector text DEFAULT '' NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.clicks (session_id); -CREATE INDEX ON events.clicks (label); -CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); -CREATE INDEX ON events.clicks (timestamp); -CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); -CREATE INDEX clicks_url_idx ON events.clicks (url); -CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); -CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); - - -CREATE TABLE events.inputs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.inputs (session_id); -CREATE INDEX ON events.inputs (label, value); -CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); -CREATE INDEX inputs_label_idx ON events.inputs (label); -CREATE INDEX ON events.inputs (timestamp); -CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); - -CREATE TABLE events.errors -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.errors (session_id); -CREATE INDEX ON events.errors (timestamp); - - -CREATE TABLE events.graphql -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.graphql (name); -CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.graphql (timestamp); - -CREATE TABLE events.state_actions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.state_actions (name); -CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.state_actions (timestamp); - -CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); -CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); -CREATE TABLE events.resources -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - duration bigint NULL, - type events.resource_type NOT NULL, - url text NOT NULL, - url_host text NOT NULL, - url_hostpath text NOT NULL, - success boolean NOT NULL, - status smallint NULL, - method events.resource_method NULL, - ttfb bigint NULL, - header_size bigint NULL, - encoded_body_size integer NULL, - decoded_body_size integer NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.resources (session_id); -CREATE INDEX ON events.resources (timestamp); -CREATE INDEX ON events.resources (success); -CREATE INDEX ON events.resources (status); -CREATE INDEX ON events.resources (type); -CREATE INDEX ON events.resources (duration) WHERE duration > 0; -CREATE INDEX ON events.resources (url_host); - -CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); -CREATE INDEX resources_url_idx ON events.resources (url); -CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); -CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); - - - -CREATE TABLE events.performance -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - message_id bigint NOT NULL, - min_fps smallint NOT NULL, - avg_fps smallint NOT NULL, - max_fps smallint NOT NULL, - min_cpu smallint NOT NULL, - avg_cpu smallint NOT NULL, - max_cpu smallint NOT NULL, - min_total_js_heap_size bigint NOT NULL, - avg_total_js_heap_size bigint NOT NULL, - max_total_js_heap_size bigint NOT NULL, - min_used_js_heap_size bigint NOT NULL, - avg_used_js_heap_size bigint NOT NULL, - max_used_js_heap_size bigint NOT NULL, - PRIMARY KEY (session_id, message_id) -); - CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS $$ @@ -839,38 +48,868 @@ BEGIN END; $$ LANGUAGE plpgsql IMMUTABLE; +-- --- integrations.sql --- + +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +-- --- alerts.sql --- + +CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS +$$ +DECLARE + clone jsonb; +BEGIN + clone = to_jsonb(NEW); + clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); + IF NEW.deleted_at NOTNULL THEN + clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); + END IF; + PERFORM pg_notify('alert', clone::text); + RETURN NEW; +END ; +$$ LANGUAGE plpgsql; + +-- --- projects.sql --- + +CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS +$$ +BEGIN + PERFORM pg_notify('project', row_to_json(NEW)::text); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- All tables and types: + +DO +$$ + BEGIN + IF EXISTS(SELECT + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = 'tenants') THEN + raise notice 'DB exists, skipping creation query'; + ELSE + raise notice 'Creating DB'; + +-- --- public.sql --- + + CREATE EXTENSION IF NOT EXISTS pg_trgm; + CREATE EXTENSION IF NOT EXISTS pgcrypto; + + +-- --- accounts.sql --- + + CREATE TABLE tenants + ( + tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + user_id text NOT NULL DEFAULT generate_api_key(20), + name text, + api_key text UNIQUE default generate_api_key(20) not null, + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + edition varchar(3) NOT NULL, + version_number text NOT NULL, + license text NULL, + opt_out bool NOT NULL DEFAULT FALSE, + t_projects integer NOT NULL DEFAULT 1, + t_sessions bigint NOT NULL DEFAULT 0, + t_users integer NOT NULL DEFAULT 1, + t_integrations integer NOT NULL DEFAULT 0 + ); + + CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); + + CREATE TABLE users + ( + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + appearance jsonb NOT NULL default '{ + "role": "dev", + "dashboard": { + "cpu": true, + "fps": false, + "avgCpu": true, + "avgFps": true, + "errors": true, + "crashes": true, + "overview": true, + "sessions": true, + "topMetrics": true, + "callsErrors": true, + "pageMetrics": true, + "performance": true, + "timeToRender": false, + "userActivity": false, + "avgFirstPaint": false, + "countSessions": true, + "errorsPerType": true, + "slowestImages": true, + "speedLocation": true, + "slowestDomains": true, + "avgPageLoadTime": true, + "avgTillFirstBit": false, + "avgTimeToRender": true, + "avgVisitedPages": false, + "avgImageLoadTime": true, + "busiestTimeOfDay": true, + "errorsPerDomains": true, + "missingResources": true, + "resourcesByParty": true, + "sessionsFeedback": false, + "slowestResources": true, + "avgUsedJsHeapSize": true, + "domainsErrors_4xx": true, + "domainsErrors_5xx": true, + "memoryConsumption": true, + "pagesDomBuildtime": false, + "pagesResponseTime": true, + "avgRequestLoadTime": true, + "avgSessionDuration": false, + "sessionsPerBrowser": false, + "applicationActivity": true, + "sessionsFrustration": false, + "avgPagesDomBuildtime": true, + "avgPagesResponseTime": false, + "avgTimeToInteractive": true, + "resourcesCountByType": true, + "resourcesLoadingTime": true, + "avgDomContentLoadStart": true, + "avgFirstContentfulPixel": false, + "resourceTypeVsResponseEnd": true, + "impactedSessionsByJsErrors": true, + "impactedSessionsBySlowPages": true, + "resourcesVsVisuallyComplete": true, + "pagesResponseTimeDistribution": true + }, + "sessionsLive": false, + "sessionsDevtools": true + }'::jsonb, + api_key text UNIQUE default generate_api_key(20) not null, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT '{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE, + origin text NULL DEFAULT NULL, + role_id integer REFERENCES roles (role_id) ON DELETE SET NULL, + internal_id text NULL DEFAULT NULL + ); + + + CREATE TABLE basic_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + password text DEFAULT NULL, + generated_password boolean NOT NULL DEFAULT false, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + changed_at timestamp, + UNIQUE (user_id) + ); + + + CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); + CREATE TABLE oauth_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + provider oauth_provider NOT NULL, + provider_user_id text NOT NULL, + token text NOT NULL, + UNIQUE (user_id, provider) + ); + + +-- --- projects.sql --- + + CREATE TABLE projects + ( + project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + active boolean NOT NULL, + sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + max_session_duration integer NOT NULL DEFAULT 7200000, + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL, + gdpr jsonb NOT NULL DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "plain" + }'::jsonb -- ?????? + ); + + CREATE INDEX ON public.projects (project_key); + CREATE INDEX projects_tenant_id_idx ON projects (tenant_id); + +-- --- alerts.sql --- + + CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); + + CREATE TABLE alerts + ( + alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + description text NULL DEFAULT NULL, + active boolean NOT NULL DEFAULT TRUE, + detection_method alert_detection_method NOT NULL, + query jsonb NOT NULL, + deleted_at timestamp NULL DEFAULT NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{ + "renotifyInterval": 1440 + }'::jsonb + ); + + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON alerts + FOR EACH ROW + EXECUTE PROCEDURE notify_alert(); + + +-- --- webhooks.sql --- + + create type webhook_type as enum ('webhook', 'slack', 'email'); + + create table webhooks + ( + webhook_id integer generated by default as identity + constraint webhooks_pkey + primary key, + tenant_id integer not null + constraint webhooks_tenant_id_fkey + references tenants + on delete cascade, + endpoint text not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + auth_header text, + type webhook_type not null, + index integer default 0 not null, + name varchar(100) + ); + +CREATE INDEX webhooks_tenant_id_idx ON webhooks (tenant_id); + +-- --- notifications.sql --- + + + CREATE TABLE notifications + ( + notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer REFERENCES tenants (tenant_id) ON DELETE CASCADE, + user_id integer REFERENCES users (user_id) ON DELETE CASCADE, + title text NOT NULL, + description text NOT NULL, + button_text varchar(80) NULL, + button_url text NULL, + image_url text NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{}'::jsonb, + CONSTRAINT notification_tenant_xor_user CHECK ( tenant_id NOTNULL AND user_id ISNULL OR + tenant_id ISNULL AND user_id NOTNULL ) + ); + CREATE INDEX notifications_user_id_index ON public.notifications (user_id); + CREATE INDEX notifications_tenant_id_index ON public.notifications (tenant_id); + CREATE INDEX notifications_created_at_index ON public.notifications (created_at DESC); + CREATE INDEX notifications_created_at_epoch_idx ON public.notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); + + CREATE TABLE user_viewed_notifications + ( + user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, + notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, + constraint user_viewed_notifications_pkey primary key (user_id, notification_id) + ); + +-- --- funnels.sql --- + + CREATE TABLE funnels + ( + funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + + CREATE INDEX ON public.funnels (user_id, is_public); + +-- --- announcements.sql --- + + create type announcement_type as enum ('notification', 'alert'); + + create table announcements + ( + announcement_id serial not null + constraint announcements_pk + primary key, + title text not null, + description text not null, + button_text varchar(30), + button_url text, + image_url text, + created_at timestamp default timezone('utc'::text, now()) not null, + type announcement_type default 'notification'::announcement_type not null + ); + +-- --- integrations.sql --- + + CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); + CREATE TABLE integrations + ( + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + provider integration_provider NOT NULL, + options jsonb NOT NULL, + request_data jsonb NOT NULL DEFAULT '{}'::jsonb, + PRIMARY KEY (project_id, provider) + ); + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON integrations + FOR EACH ROW + EXECUTE PROCEDURE notify_integration(); + + + create table jira_cloud + ( + user_id integer not null + constraint jira_cloud_pk + primary key + constraint jira_cloud_users_fkey + references users + on delete cascade, + username text not null, + token text not null, + url text + ); + + +-- --- issues.sql --- + + CREATE TYPE issue_type AS ENUM ( + 'click_rage', + 'dead_click', + 'excessive_scrolling', + 'bad_request', + 'missing_resource', + 'memory', + 'cpu', + 'slow_resource', + 'slow_page_load', + 'crash', + 'ml_cpu', + 'ml_memory', + 'ml_dead_click', + 'ml_click_rage', + 'ml_mouse_thrashing', + 'ml_excessive_scrolling', + 'ml_slow_resources', + 'custom', + 'js_exception' + ); + + CREATE TABLE issues + ( + issue_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + type issue_type NOT NULL, + context_string text NOT NULL, + context jsonb DEFAULT NULL + ); + CREATE INDEX ON issues (issue_id, type); + CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); + +-- --- errors.sql --- + + CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); + CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); + CREATE TABLE errors + ( + error_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + source error_source NOT NULL, + name text DEFAULT NULL, + message text NOT NULL, + payload jsonb NOT NULL, + status error_status NOT NULL DEFAULT 'unresolved', + parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, + stacktrace jsonb, --to save the stacktrace and not query S3 another time + stacktrace_parsed_at timestamp + ); + CREATE INDEX errors_error_id_idx ON errors (error_id); + CREATE INDEX ON errors (project_id, source); + CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); + CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); + CREATE INDEX errors_project_id_idx ON public.errors (project_id); + CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); + + CREATE TABLE user_favorite_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + + CREATE TABLE user_viewed_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); + CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + + +-- --- sessions.sql --- + CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); + CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); + CREATE TYPE platform AS ENUM ('web','ios','android'); + + CREATE TABLE sessions + ( + session_id bigint PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + tracker_version text NOT NULL, + start_ts bigint NOT NULL, + duration integer NULL, + rev_id text DEFAULT NULL, + platform platform NOT NULL DEFAULT 'web', + is_snippet boolean NOT NULL DEFAULT FALSE, + user_id text DEFAULT NULL, + user_anonymous_id text DEFAULT NULL, + user_uuid uuid NOT NULL, + user_agent text DEFAULT NULL, + user_os text NOT NULL, + user_os_version text DEFAULT NULL, + user_browser text DEFAULT NULL, + user_browser_version text DEFAULT NULL, + user_device text NOT NULL, + user_device_type device_type NOT NULL, + user_device_memory_size integer DEFAULT NULL, + user_device_heap_size bigint DEFAULT NULL, + user_country country NOT NULL, + pages_count integer NOT NULL DEFAULT 0, + events_count integer NOT NULL DEFAULT 0, + errors_count integer NOT NULL DEFAULT 0, + watchdogs_score bigint NOT NULL DEFAULT 0, + issue_score bigint NOT NULL DEFAULT 0, + issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL +-- , +-- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL + ); + CREATE INDEX ON sessions (project_id, start_ts); + CREATE INDEX ON sessions (project_id, user_id); + CREATE INDEX ON sessions (project_id, user_anonymous_id); + CREATE INDEX ON sessions (project_id, user_device); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX ON sessions (project_id, metadata_1); + CREATE INDEX ON sessions (project_id, metadata_2); + CREATE INDEX ON sessions (project_id, metadata_3); + CREATE INDEX ON sessions (project_id, metadata_4); + CREATE INDEX ON sessions (project_id, metadata_5); + CREATE INDEX ON sessions (project_id, metadata_6); + CREATE INDEX ON sessions (project_id, metadata_7); + CREATE INDEX ON sessions (project_id, metadata_8); + CREATE INDEX ON sessions (project_id, metadata_9); + CREATE INDEX ON sessions (project_id, metadata_10); +-- CREATE INDEX ON sessions (rehydration_id); + CREATE INDEX ON sessions (project_id, watchdogs_score DESC); + CREATE INDEX platform_idx ON public.sessions (platform); + + CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); + CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); + CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); + CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); + CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); + CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); + CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); + CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); + CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); + CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); + CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); + CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); + CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); + CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); + CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); + CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; + + + ALTER TABLE public.sessions + ADD CONSTRAINT web_browser_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_browser ISNULL)); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_agent_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_agent ISNULL)); + + + CREATE TABLE user_viewed_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + CREATE TABLE user_favorite_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + +-- --- assignments.sql --- + + create table assigned_sessions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL + ); + CREATE INDEX ON assigned_sessions (session_id); + +-- --- events_common.sql --- + + CREATE SCHEMA IF NOT EXISTS events_common; + + CREATE TYPE events_common.custom_level AS ENUM ('info','error'); + + CREATE TABLE events_common.customs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + name text NOT NULL, + payload jsonb NOT NULL, + level events_common.custom_level NOT NULL DEFAULT 'info', + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.customs (name); + CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); + CREATE INDEX ON events_common.customs (timestamp); + + + CREATE TABLE events_common.issues + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, + payload jsonb DEFAULT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + + + CREATE TABLE events_common.requests + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + url text NOT NULL, + duration integer NOT NULL, + success boolean NOT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.requests (url); + CREATE INDEX ON events_common.requests (duration); + CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); + CREATE INDEX ON events_common.requests (timestamp); + CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE + WHEN url LIKE 'http://%' + THEN 7 + WHEN url LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + +-- --- events.sql --- + CREATE SCHEMA IF NOT EXISTS events; + + CREATE TABLE events.pages + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + host text NOT NULL, + path text NOT NULL, + base_path text NOT NULL, + referrer text DEFAULT NULL, + base_referrer text DEFAULT NULL, + dom_building_time integer DEFAULT NULL, + dom_content_loaded_time integer DEFAULT NULL, + load_time integer DEFAULT NULL, + first_paint_time integer DEFAULT NULL, + first_contentful_paint_time integer DEFAULT NULL, + speed_index integer DEFAULT NULL, + visually_complete integer DEFAULT NULL, + time_to_interactive integer DEFAULT NULL, + response_time bigint DEFAULT NULL, + response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.pages (session_id); + CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); + CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); + CREATE INDEX ON events.pages (timestamp); + CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); + CREATE INDEX pages_base_path_idx ON events.pages (base_path); + CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); + CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); + CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, + length(base_referrer) - (CASE + WHEN base_referrer LIKE 'http://%' + THEN 7 + WHEN base_referrer LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + CREATE INDEX ON events.pages (response_time); + CREATE INDEX ON events.pages (response_end); + CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); + CREATE INDEX pages_path_idx ON events.pages (path); + CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; + CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; + CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; + CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); + CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); + + CREATE TABLE events.clicks + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.clicks (session_id); + CREATE INDEX ON events.clicks (label); + CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); + CREATE INDEX ON events.clicks (timestamp); + CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); + CREATE INDEX clicks_url_idx ON events.clicks (url); + CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); + CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); + CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); + + + CREATE TABLE events.inputs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.inputs (session_id); + CREATE INDEX ON events.inputs (label, value); + CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); + CREATE INDEX inputs_label_idx ON events.inputs (label); + CREATE INDEX ON events.inputs (timestamp); + CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); + + CREATE TABLE events.errors + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.errors (session_id); + CREATE INDEX ON events.errors (timestamp); + CREATE INDEX errors_error_id_idx ON events.errors (error_id); + + + CREATE TABLE events.graphql + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.graphql (name); + CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.graphql (timestamp); + + CREATE TABLE events.state_actions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.state_actions (name); + CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.state_actions (timestamp); + + CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); + CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); + CREATE TABLE events.resources + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + duration bigint NULL, + type events.resource_type NOT NULL, + url text NOT NULL, + url_host text NOT NULL, + url_hostpath text NOT NULL, + success boolean NOT NULL, + status smallint NULL, + method events.resource_method NULL, + ttfb bigint NULL, + header_size bigint NULL, + encoded_body_size integer NULL, + decoded_body_size integer NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.resources (session_id); + CREATE INDEX ON events.resources (timestamp); + CREATE INDEX ON events.resources (success); + CREATE INDEX ON events.resources (status); + CREATE INDEX ON events.resources (type); + CREATE INDEX ON events.resources (duration) WHERE duration > 0; + CREATE INDEX ON events.resources (url_host); + + CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); + CREATE INDEX resources_url_idx ON events.resources (url); + CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); + CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); + + + CREATE TABLE events.performance + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + message_id bigint NOT NULL, + min_fps smallint NOT NULL, + avg_fps smallint NOT NULL, + max_fps smallint NOT NULL, + min_cpu smallint NOT NULL, + avg_cpu smallint NOT NULL, + max_cpu smallint NOT NULL, + min_total_js_heap_size bigint NOT NULL, + avg_total_js_heap_size bigint NOT NULL, + max_total_js_heap_size bigint NOT NULL, + min_used_js_heap_size bigint NOT NULL, + avg_used_js_heap_size bigint NOT NULL, + max_used_js_heap_size bigint NOT NULL, + PRIMARY KEY (session_id, message_id) + ); -- --- autocomplete.sql --- -CREATE TABLE autocomplete -( - value text NOT NULL, - type text NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE -); + CREATE TABLE autocomplete + ( + value text NOT NULL, + type text NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE + ); -CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); -CREATE index autocomplete_project_id_idx ON autocomplete (project_id); -CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); -CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); + CREATE index autocomplete_project_id_idx ON autocomplete (project_id); + CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); + CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); -- --- jobs.sql --- -CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); -CREATE TYPE job_action AS ENUM ('delete_user_data'); -CREATE TABLE jobs -( - job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - description text NOT NULL, - status job_status NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - action job_action NOT NULL, - reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, - start_at timestamp NOT NULL, - errors text NULL -); -CREATE INDEX ON jobs (status); -CREATE INDEX ON jobs (start_at); + CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); + CREATE TYPE job_action AS ENUM ('delete_user_data'); + CREATE TABLE jobs + ( + job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + description text NOT NULL, + status job_status NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + action job_action NOT NULL, + reference_id text NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + updated_at timestamp default timezone('utc'::text, now()) NULL, + start_at timestamp NOT NULL, + errors text NULL + ); + CREATE INDEX ON jobs (status); + CREATE INDEX ON jobs (start_at); + + CREATE TABLE roles + ( + role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + description text DEFAULT NULL, + permissions text[] NOT NULL DEFAULT '{}', + protected bool NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL + ); + + + raise notice 'DB created'; + END IF; + END; + +$$ +LANGUAGE plpgsql; COMMIT; diff --git a/ee/scripts/helm/roles/openreplay/defaults/main.yaml b/ee/scripts/helm/roles/openreplay/defaults/main.yaml index e7d0d5b0b..eb9071ff3 100644 --- a/ee/scripts/helm/roles/openreplay/defaults/main.yaml +++ b/ee/scripts/helm/roles/openreplay/defaults/main.yaml @@ -4,7 +4,6 @@ app_name: "" db_name: "" db_list: - "minio" - - "nfs-server-provisioner" - "postgresql" - "redis" - "clickhouse" diff --git a/frontend/app/Router.js b/frontend/app/Router.js index bbbd98c3c..643351d67 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -69,7 +69,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); organisation: state.getIn([ 'user', 'client', 'name' ]), tenantId: state.getIn([ 'user', 'client', 'tenantId' ]), tenants: state.getIn(['user', 'tenants']), - existingTenant: state.getIn(['user', 'existingTenant']), + existingTenant: state.getIn(['user', 'authDetails', 'tenants']), onboarding: state.getIn([ 'user', 'onboarding' ]) }; }, { diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index babb7a7c6..02e575033 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -80,6 +80,7 @@ export default class APIClient { path !== '/targets_temp' && !path.includes('/metadata/session_search') && !path.includes('/watchdogs/rules') && + !path.includes('/assist/credentials') && !!this.siteId && siteIdRequiredPaths.some(sidPath => path.startsWith(sidPath)) ) { diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx index 1c6521a6b..0327b8254 100644 --- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx +++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx @@ -10,21 +10,21 @@ import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream export interface Props { - remoteStream: MediaStream | null, + incomeStream: MediaStream | null, localStream: LocalStream | null, userId: String, endCall: () => void } -const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localStream, endCall }) { +const ChatWindow: FC = function ChatWindow({ userId, incomeStream, localStream, endCall }) { const [localVideoEnabled, setLocalVideoEnabled] = useState(false) const [remoteVideoEnabled, setRemoteVideoEnabled] = useState(false) - + useEffect(() => { - if (!remoteStream) { return } + if (!incomeStream) { return } const iid = setInterval(() => { - const settings = remoteStream.getVideoTracks()[0]?.getSettings() + const settings = incomeStream.getVideoTracks()[0]?.getSettings() const isDummyVideoTrack = !!settings ? (settings.width === 2 || settings.frameRate === 0) : true console.log(isDummyVideoTrack, settings) const shouldBeEnabled = !isDummyVideoTrack @@ -33,7 +33,7 @@ const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localS } }, 1000) return () => clearInterval(iid) - }, [ remoteStream, localVideoEnabled ]) + }, [ incomeStream, localVideoEnabled ]) const minimize = !localVideoEnabled && !remoteVideoEnabled @@ -48,7 +48,7 @@ const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localS
- +
diff --git a/frontend/app/components/Assist/components/AssistActions/AassistActions.css b/frontend/app/components/Assist/components/AssistActions/AassistActions.css index 85f5867c6..8a5758d90 100644 --- a/frontend/app/components/Assist/components/AssistActions/AassistActions.css +++ b/frontend/app/components/Assist/components/AssistActions/AassistActions.css @@ -1,11 +1,4 @@ -.inCall { - & svg { - fill: $red - } - color: $red; -} - .disabled { opacity: 0.5; pointer-events: none; -} \ No newline at end of file +} diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index e3d1dfcf9..fd0505e7f 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -31,16 +31,19 @@ interface Props { userId: String, toggleChatWindow: (state) => void, calling: CallingState, - peerConnectionStatus: ConnectionStatus + peerConnectionStatus: ConnectionStatus, + remoteControlEnabled: boolean, + hasPermission: boolean, + isEnterprise: boolean, } -function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus }: Props) { - const [ remoteStream, setRemoteStream ] = useState(null); +function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled, hasPermission, isEnterprise }: Props) { + const [ incomeStream, setIncomeStream ] = useState(null); const [ localStream, setLocalStream ] = useState(null); - const [ endCall, setEndCall ] = useState<()=>void>(()=>{}); + const [ callObject, setCallObject ] = useState<{ end: ()=>void, toggleRemoteControl: ()=>void } | null >(null); useEffect(() => { - return endCall + return callObject?.end() }, []) useEffect(() => { @@ -49,13 +52,12 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus } }, [peerConnectionStatus]) - function call() { RequestLocalStream().then(lStream => { setLocalStream(lStream); - setEndCall(() => callPeer( + setCallObject(callPeer( lStream, - setRemoteStream, + setIncomeStream, lStream.stop.bind(lStream), onReject, onError @@ -74,6 +76,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus } const inCall = calling !== CallingState.False; + const cannotCall = (peerConnectionStatus !== ConnectionStatus.Connected) || (isEnterprise && !hasPermission) return (
@@ -83,11 +86,11 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus className={ cn( 'cursor-pointer p-2 mr-2 flex items-center', - {[stl.inCall] : inCall }, - {[stl.disabled]: peerConnectionStatus !== ConnectionStatus.Connected} + // {[stl.inCall] : inCall }, + {[stl.disabled]: cannotCall} ) } - onClick={ inCall ? endCall : confirmCall} + onClick={ inCall ? callObject?.end : confirmCall} role="button" > - { inCall ? 'End Call' : 'Call' } + { inCall ? 'End Call' : 'Call' }
} content={ `Call ${userId ? userId : 'User'}` } @@ -103,16 +106,41 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus inverted position="top right" /> + { calling === CallingState.True && +
+ + { 'Remote Control' } +
+ }
- { inCall && } + { inCall && callObject && }
) } -const con = connect(null, { toggleChatWindow }) +const con = connect(state => { + const permissions = state.getIn([ 'user', 'account', 'permissions' ]) || [] + return { + hasPermission: permissions.includes('ASSIST_CALL'), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', + } +}, { toggleChatWindow }) export default con(connectPlayer(state => ({ calling: state.calling, + remoteControlEnabled: state.remoteControl, peerConnectionStatus: state.peerConnectionStatus, }))(AssistActions)) diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js index 720bc713d..a71c54cf9 100644 --- a/frontend/app/components/BugFinder/BugFinder.js +++ b/frontend/app/components/BugFinder/BugFinder.js @@ -18,7 +18,7 @@ import withLocationHandlers from "HOCs/withLocationHandlers"; import { fetch as fetchFilterVariables } from 'Duck/sources'; import { fetchList as fetchIntegrationVariables, fetchSources } from 'Duck/customField'; import { RehydrateSlidePanel } from './WatchDogs/components'; -import { setActiveTab } from 'Duck/sessions'; +import { setActiveTab, setFunnelPage } from 'Duck/sessions'; import SessionsMenu from './SessionsMenu/SessionsMenu'; import SessionFlowList from './SessionFlowList/SessionFlowList'; import { LAST_7_DAYS } from 'Types/app/period'; @@ -74,7 +74,8 @@ const allowedQueryKeys = [ fetchSiteList, fetchFunnelsList, resetFunnel, - resetFunnelFilters + resetFunnelFilters, + setFunnelPage }) @withPageTitle("Sessions - OpenReplay") export default class BugFinder extends React.PureComponent { @@ -114,6 +115,10 @@ export default class BugFinder extends React.PureComponent { } } + componentDidMount() { + this.props.setFunnelPage(false); + } + toggleRehydratePanel = () => { this.setState({ showRehydratePanel: !this.state.showRehydratePanel }) } diff --git a/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx index c07af0f99..dfcfeb90e 100644 --- a/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx @@ -4,6 +4,7 @@ import { connect } from 'react-redux'; import { NoContent, Loader } from 'UI'; import { List, Map } from 'immutable'; import SessionItem from 'Shared/SessionItem'; +import withPermissions from 'HOCs/withPermissions' import { KEYS } from 'Types/filter/customFilter'; import { applyFilter, addAttribute } from 'Duck/filters'; import Filter from 'Types/filter'; @@ -38,7 +39,7 @@ function LiveSessionList(props: Props) { } else { props.addAttribute({ label: 'Anonymous ID', key: 'USERANONYMOUSID', type: "USERANONYMOUSID", operator: 'is', value: userAnonymousId }) } - + props.applyFilter() } @@ -77,8 +78,12 @@ function LiveSessionList(props: Props) { ) } -export default connect(state => ({ - list: state.getIn(['sessions', 'liveSessions']), - loading: state.getIn([ 'sessions', 'loading' ]), - filters: state.getIn([ 'filters', 'appliedFilter' ]), -}), { fetchList, applyFilter, addAttribute })(LiveSessionList) +export default withPermissions(['ASSIST_LIVE'])(connect( + (state) => ({ + list: state.getIn(['sessions', 'liveSessions']), + loading: state.getIn([ 'sessions', 'loading' ]), + filters: state.getIn([ 'filters', 'appliedFilter' ]), + }), + { + fetchList, applyFilter, addAttribute } +)(LiveSessionList)); diff --git a/frontend/app/components/Client/Client.js b/frontend/app/components/Client/Client.js index f8a3ee0c8..6cae36710 100644 --- a/frontend/app/components/Client/Client.js +++ b/frontend/app/components/Client/Client.js @@ -15,6 +15,7 @@ import styles from './client.css'; import cn from 'classnames'; import PreferencesMenu from './PreferencesMenu'; import Notifications from './Notifications'; +import Roles from './Roles'; @connect((state) => ({ appearance: state.getIn([ 'user', 'account', 'appearance' ]), @@ -42,6 +43,7 @@ export default class Client extends React.PureComponent { + ) diff --git a/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js b/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js new file mode 100644 index 000000000..6d8233826 --- /dev/null +++ b/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js @@ -0,0 +1,60 @@ +import Highlight from 'react-highlight' +import ToggleContent from 'Shared/ToggleContent' +import DocLink from 'Shared/DocLink/DocLink'; + +const AxiosDoc = (props) => { + const { projectKey } = props; + return ( +
+
This plugin allows you to capture axios requests and inspect them later on while replaying session recordings. This is very useful for understanding and fixing issues.
+ +
Installation
+ + {`npm i @openreplay/tracker-axios`} + + +
Usage
+

Initialize the @openreplay/tracker package as usual then load the axios plugin. Note that OpenReplay axios plugin requires axios@^0.21.2 as a peer dependency.

+
+ +
Usage
+ + {`import tracker from '@openreplay/tracker'; +import trackerAxios from '@openreplay/tracker-axios'; +const tracker = new OpenReplay({ + projectKey: '${projectKey}' +}); +tracker.use(trackerAxios(options)); // check list of available options below +tracker.start();`} + + } + second={ + + {`import OpenReplay from '@openreplay/tracker/cjs'; +import trackerAxios from '@openreplay/tracker-axios/cjs'; +const tracker = new OpenReplay({ + projectKey: '${projectKey}' +}); +tracker.use(trackerAxios(options)); // check list of available options below +//... +function MyApp() { + useEffect(() => { // use componentDidMount in case of React Class Component + tracker.start(); + }, []) +//... +}`} + + } + /> + + +
+ ) +}; + +AxiosDoc.displayName = "AxiosDoc"; + +export default AxiosDoc; diff --git a/frontend/app/components/Client/Integrations/AxiosDoc/index.js b/frontend/app/components/Client/Integrations/AxiosDoc/index.js new file mode 100644 index 000000000..a5a8a1873 --- /dev/null +++ b/frontend/app/components/Client/Integrations/AxiosDoc/index.js @@ -0,0 +1 @@ +export { default } from './AxiosDoc' \ No newline at end of file diff --git a/frontend/app/components/Client/Integrations/Integrations.js b/frontend/app/components/Client/Integrations/Integrations.js index f34050055..855ff8511 100644 --- a/frontend/app/components/Client/Integrations/Integrations.js +++ b/frontend/app/components/Client/Integrations/Integrations.js @@ -29,6 +29,7 @@ import FetchDoc from './FetchDoc'; import MobxDoc from './MobxDoc'; import ProfilerDoc from './ProfilerDoc'; import AssistDoc from './AssistDoc'; +import AxiosDoc from './AxiosDoc/AxiosDoc'; const NONE = -1; const SENTRY = 0; @@ -51,6 +52,7 @@ const FETCH = 16; const MOBX = 17; const PROFILER = 18; const ASSIST = 19; +const AXIOS = 20; const TITLE = { [ SENTRY ]: 'Sentry', @@ -73,6 +75,7 @@ const TITLE = { [ MOBX ] : 'MobX', [ PROFILER ] : 'Profiler', [ ASSIST ] : 'Assist', + [ AXIOS ] : 'Axios', } const DOCS = [REDUX, VUE, GRAPHQL, NGRX, FETCH, MOBX, PROFILER, ASSIST] @@ -191,6 +194,8 @@ export default class Integrations extends React.PureComponent { return case ASSIST: return + case AXIOS: + return default: return null; } @@ -295,7 +300,14 @@ export default class Integrations extends React.PureComponent { onClick={ () => this.showIntegrationConfig(NGRX) } // integrated={ sentryIntegrated } /> - + this.showIntegrationConfig(MOBX) } + // integrated={ sentryIntegrated } + /> this.showIntegrationConfig(FETCH) } // integrated={ sentryIntegrated } /> - - this.showIntegrationConfig(MOBX) } - // integrated={ sentryIntegrated } - /> - this.showIntegrationConfig(PROFILER) } // integrated={ sentryIntegrated } /> + this.showIntegrationConfig(AXIOS) } + // integrated={ sentryIntegrated } + /> !r.protected).map(r => ({ text: r.name, value: r.roleId })).toJS(), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', }), { init, save, edit, deleteMember, fetchList, - generateInviteLink + generateInviteLink, + fetchRoles }) @withPageTitle('Users - OpenReplay Preferences') class ManageUsers extends React.PureComponent { state = { showModal: false, remaining: this.props.account.limits.teamMember.remaining, invited: false } + // writeOption = (e, { name, value }) => this.props.edit({ [ name ]: value }); onChange = (e, { name, value }) => this.props.edit({ [ name ]: value }); onChangeCheckbox = ({ target: { checked, name } }) => this.props.edit({ [ name ]: checked }); setFocus = () => this.focusElement.focus(); closeModal = () => this.setState({ showModal: false }); componentWillMount = () => { this.props.fetchList(); + this.props.fetchRoles(); } adminLabel = (user) => { @@ -76,81 +84,99 @@ class ManageUsers extends React.PureComponent { }); } - formContent = (member, account) => ( -
-
-
- - { this.focusElement = ref; } } - name="name" - value={ member.name } - onChange={ this.onChange } - className={ styles.input } - id="name-field" - /> -
+ formContent = () => { + const { member, account, isEnterprise, roles } = this.props; -
- - -
- { !account.smtp && -
- SMTP is not configured. Please follow (see here how to set it up). You can still add new users, but you’d have to manually copy then send them the invitation link. -
- } -
-
-
-
- - +
+ + +
+ { !account.smtp && +
+ SMTP is not configured. Please follow (see here how to set it up). You can still add new users, but you’d have to manually copy then send them the invitation link. +
+ } +
+ +
{ 'Can manage Projects and team members.' }
+
+ + { isEnterprise && ( +
+ + +
+ )} + + +
+
+ + +
+ { !member.joined && member.invitationLink && + + }
- { !member.joined && member.invitationLink && - - }
-
- ) + ) + } init = (v) => { this.props.init(v); @@ -160,7 +186,7 @@ class ManageUsers extends React.PureComponent { render() { const { - members, member, loading, account, hideHeader = false, + members, loading, account, hideHeader = false } = this.props; const { showModal, remaining, invited } = this.state; const isAdmin = account.admin || account.superAdmin; @@ -173,7 +199,7 @@ class ManageUsers extends React.PureComponent { title="Invite People" size="small" isDisplayed={ showModal } - content={ this.formContent(member, account) } + content={ this.formContent() } onClose={ this.closeModal } />
@@ -194,7 +220,7 @@ class ManageUsers extends React.PureComponent { />
} - // disabled={ canAddUsers } + disabled={ canAddUsers } content={ `${ !canAddUsers ? (!isAdmin ? PERMISSION_WARNING : LIMIT_WARNING) : 'Add team member' }` } size="tiny" inverted diff --git a/frontend/app/components/Client/ManageUsers/UserItem.js b/frontend/app/components/Client/ManageUsers/UserItem.js index 3c533080f..b40b5182a 100644 --- a/frontend/app/components/Client/ManageUsers/UserItem.js +++ b/frontend/app/components/Client/ManageUsers/UserItem.js @@ -7,6 +7,7 @@ const UserItem = ({ user, adminLabel, deleteHandler, editHandler, generateInvite
{ user.name || user.email }
{ adminLabel &&
{ adminLabel }
} + { user.roleName &&
{ user.roleName }
}
{ user.expiredInvitation && !user.joined && { history.push(clientRoute(tab)); @@ -50,7 +50,6 @@ function PreferencesMenu({ activeTab, appearance, history }) { {
- setTab(CLIENT_TABS.MANAGE_USERS) } /> -
+
+ + { isEnterprise && ( +
+ setTab(CLIENT_TABS.MANAGE_ROLES) } + /> +
+ )}
({ appearance: state.getIn([ 'user', 'account', 'appearance' ]), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', }))(withRouter(PreferencesMenu)); diff --git a/frontend/app/components/Client/ProfileSettings/ProfileSettings.js b/frontend/app/components/Client/ProfileSettings/ProfileSettings.js index c6fbda5ec..5df0d1aec 100644 --- a/frontend/app/components/Client/ProfileSettings/ProfileSettings.js +++ b/frontend/app/components/Client/ProfileSettings/ProfileSettings.js @@ -11,10 +11,11 @@ import { connect } from 'react-redux'; @withPageTitle('Account - OpenReplay Preferences') @connect(state => ({ account: state.getIn([ 'user', 'account' ]), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', })) export default class ProfileSettings extends React.PureComponent { render() { - const { account } = this.props; + const { account, isEnterprise } = this.props; return (
@@ -55,15 +56,19 @@ export default class ProfileSettings extends React.PureComponent {
-
+ { !isEnterprise && ( + <> +
+
+
+

{ 'Data Collection' }

+
{ 'Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.' }
+
+
+
+ + )} -
-
-

{ 'Data Collection' }

-
{ 'Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.' }
-
-
-
{ account.license && ( <>
diff --git a/frontend/app/components/Client/ProfileSettings/TenantKey.js b/frontend/app/components/Client/ProfileSettings/TenantKey.js index 8e76bdb43..e6c43591d 100644 --- a/frontend/app/components/Client/ProfileSettings/TenantKey.js +++ b/frontend/app/components/Client/ProfileSettings/TenantKey.js @@ -4,37 +4,35 @@ import { connect } from 'react-redux'; import styles from './profileSettings.css'; @connect(state => ({ - key: state.getIn([ 'user', 'client', 'tenantKey' ]), - loading: state.getIn([ 'user', 'updateAccountRequest', 'loading' ]) || - state.getIn([ 'user', 'putClientRequest', 'loading' ]), + tenantKey: state.getIn([ 'user', 'client', 'tenantKey' ]), })) export default class TenantKey extends React.PureComponent { state = { copied: false } copyHandler = () => { - const { key } = this.props; + const { tenantKey } = this.props; this.setState({ copied: true }); - copy(key); + copy(tenantKey); setTimeout(() => { this.setState({ copied: false }); }, 1000); }; render() { - const { key } = this.props; + const { tenantKey } = this.props; const { copied } = this.state; return (
- +
void, + edit: (role: any) => void, + instance: any, + roles: any[], + deleteRole: (id: any) => void, + fetchList: () => Promise, + account: any, + permissionsMap: any +} + +function Roles(props: Props) { + const { loading, instance, roles, init, edit, deleteRole, account, permissionsMap } = props + const [showModal, setShowmModal] = useState(false) + const isAdmin = account.admin || account.superAdmin; + + console.log('permissionsMap', permissionsMap) + + + useEffect(() => { + props.fetchList() + }, []) + + const closeModal = () => { + setShowmModal(false) + setTimeout(() => { + init() + }, 100) + } + + const editHandler = role => { + init(role) + setShowmModal(true) + } + + const deleteHandler = async (role) => { + if (await confirm({ + header: 'Roles', + confirmation: `Are you sure you want to remove this role?` + })) { + deleteRole(role.roleId) + } + } + + return ( + + + } + onClose={ closeModal } + /> +
+
+
+

Manage Roles and Permissions

+ + setShowmModal(true) } + /> +
+ } + disabled={ isAdmin } + size="tiny" + inverted + position="top left" + /> +
+
+ + +
+ {roles.map(role => ( + + ))} +
+
+
+ + + ) +} + +export default connect(state => { + const permissions = state.getIn(['roles', 'permissions']) + const permissionsMap = {} + permissions.forEach(p => { + permissionsMap[p.value] = p.name + }); + return { + instance: state.getIn(['roles', 'instance']) || null, + permissionsMap: permissionsMap, + roles: state.getIn(['roles', 'list']), + loading: state.getIn(['roles', 'fetchRequest', 'loading']), + account: state.getIn([ 'user', 'account' ]) + } +}, { init, edit, fetchList, deleteRole })(Roles) \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx b/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx new file mode 100644 index 000000000..0dd56dfd9 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import Role from 'Types/role' + +interface Props { + role: Role +} +function Permissions(props: Props) { + return ( +
+ +
+ ); +} + +export default Permissions; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/Permissions/index.ts b/frontend/app/components/Client/Roles/components/Permissions/index.ts new file mode 100644 index 000000000..659544a53 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/Permissions/index.ts @@ -0,0 +1 @@ +export { default } from './Permissions'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx new file mode 100644 index 000000000..7b79f6651 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx @@ -0,0 +1,101 @@ +import React, { useRef, useEffect } from 'react' +import { connect } from 'react-redux' +import stl from './roleForm.css' +import { save, edit } from 'Duck/roles' +import { Input, Button, Checkbox } from 'UI' + +interface Permission { + name: string, + value: string +} + +interface Props { + role: any, + edit: (role: any) => void, + save: (role: any) => Promise, + closeModal: () => void, + saving: boolean, + permissions: Array[] +} + +const RoleForm = ({ role, closeModal, edit, save, saving, permissions }: Props) => { + let focusElement = useRef(null) + const _save = () => { + save(role).then(() => { + closeModal() + }) + } + + const write = ({ target: { value, name } }) => edit({ [ name ]: value }) + + const onChangeOption = (e) => { + const { permissions } = role + const index = permissions.indexOf(e) + const _perms = permissions.contains(e) ? permissions.remove(index) : permissions.push(e) + edit({ permissions: _perms }) + } + + useEffect(() => { + focusElement && focusElement.current && focusElement.current.focus() + }, []) + + return ( +
+ +
+ + +
+ +
+ { permissions.map((permission: any, index) => ( +
+ onChangeOption(permission.value) } + label={permission.name} + /> +
+ ))} +
+ + +
+
+ + +
+
+
+ ); +} + +export default connect(state => ({ + role: state.getIn(['roles', 'instance']), + permissions: state.getIn(['roles', 'permissions']), + saving: state.getIn([ 'roles', 'saveRequest', 'loading' ]), +}), { edit, save })(RoleForm); \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/index.ts b/frontend/app/components/Client/Roles/components/RoleForm/index.ts new file mode 100644 index 000000000..3bb62ee58 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/index.ts @@ -0,0 +1 @@ +export { default } from './RoleForm'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css b/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css new file mode 100644 index 000000000..a0c5934c8 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css @@ -0,0 +1,21 @@ +.form { + padding: 0 20px; + + & .formGroup { + margin-bottom: 15px; + } + & label { + display: block; + margin-bottom: 5px; + font-weight: 500; + } + + & .input { + width: 100%; + } + + & input[type=checkbox] { + margin-right: 10px; + height: 13px; + } +} \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx new file mode 100644 index 000000000..c4cdb7a25 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx @@ -0,0 +1,48 @@ +import React from 'react' +import { Icon } from 'UI' +import stl from './roleItem.css' +import cn from 'classnames' + +function PermisionLabel({ permission }: any) { + return ( +
{ permission }
+ ); +} + +interface Props { + role: any, + deleteHandler?: (role: any) => void, + editHandler?: (role: any) => void, + permissions: any +} +function RoleItem({ role, deleteHandler, editHandler, permissions }: Props) { + return ( +
+ +
+ { role.name } +
+ {role.permissions.map((permission: any) => ( + + // { permissions[permission].name } + ))} +
+
+ +
+ { !!deleteHandler && +
deleteHandler(role) } id="trash"> + +
+ } + { !!editHandler && +
editHandler(role) }> + +
+ } +
+
+ ); +} + +export default RoleItem; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/index.ts b/frontend/app/components/Client/Roles/components/RoleItem/index.ts new file mode 100644 index 000000000..645d37fd1 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/index.ts @@ -0,0 +1 @@ +export { default } from './RoleItem' \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css new file mode 100644 index 000000000..7173d1c33 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css @@ -0,0 +1,47 @@ +.wrapper { + display: flex; + align-items: center; + width: 100%; + border-bottom: solid thin #e6e6e6; + padding: 10px 0px; +} + +.actions { + margin-left: auto; + /* opacity: 0; */ + transition: all 0.4s; + display: flex; + align-items: center; + & .button { + padding: 5px; + cursor: pointer; + margin-left: 10px; + display: flex; + align-items: center; + justify-content: center; + &:hover { + & svg { + fill: $teal-dark; + } + } + &.disabled { + pointer-events: none; + opacity: 0.5; + } + } + + & .disabled { + pointer-events: none; + opacity: 0.5; + } +} + +.label { + margin-left: 10px; + padding: 0 10px; + border-radius: 3px; + background-color: $gray-lightest; + font-size: 12px; + border: solid thin $gray-light; + width: fit-content; +} \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/index.ts b/frontend/app/components/Client/Roles/index.ts new file mode 100644 index 000000000..9e6fe3912 --- /dev/null +++ b/frontend/app/components/Client/Roles/index.ts @@ -0,0 +1 @@ +export { default } from './Roles'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/roles.css b/frontend/app/components/Client/Roles/roles.css new file mode 100644 index 000000000..819111686 --- /dev/null +++ b/frontend/app/components/Client/Roles/roles.css @@ -0,0 +1,13 @@ +.wrapper { + padding: 0; +} +.tabHeader { + display: flex; + align-items: center; + margin-bottom: 25px; + + & .tabTitle { + margin: 0 15px 0 0; + font-weight: 400 !important; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Dashboard.js b/frontend/app/components/Dashboard/Dashboard.js index ffbc87b41..2e5cba630 100644 --- a/frontend/app/components/Dashboard/Dashboard.js +++ b/frontend/app/components/Dashboard/Dashboard.js @@ -1,6 +1,7 @@ import { connect } from 'react-redux'; import cn from 'classnames'; import withPageTitle from 'HOCs/withPageTitle'; +import withPermissions from 'HOCs/withPermissions' import { setPeriod, setPlatform, fetchMetadataOptions } from 'Duck/dashboard'; import { NoContent } from 'UI'; import { WIDGET_KEYS } from 'Types/dashboard'; @@ -103,6 +104,7 @@ function isInViewport(el) { ); } +@withPermissions(['METRICS'], 'page-margin container-90') @connect(state => ({ period: state.getIn([ 'dashboard', 'period' ]), comparing: state.getIn([ 'dashboard', 'comparing' ]), diff --git a/frontend/app/components/Errors/Errors.js b/frontend/app/components/Errors/Errors.js index b4c5fce92..f9e7b5c9b 100644 --- a/frontend/app/components/Errors/Errors.js +++ b/frontend/app/components/Errors/Errors.js @@ -1,5 +1,6 @@ import { connect } from 'react-redux'; import withSiteIdRouter from 'HOCs/withSiteIdRouter'; +import withPermissions from 'HOCs/withPermissions' import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo"; import { getRE } from 'App/utils'; import { fetchBookmarks } from "Duck/errors"; @@ -33,6 +34,7 @@ function getStatusLabel(status) { } } +@withPermissions(['ERRORS'], 'page-margin container-90') @withSiteIdRouter @connect(state => ({ list: state.getIn([ "errors", "list" ]), diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js index 88715c174..87f7983b7 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js @@ -9,7 +9,6 @@ function FunnelDropdown(props) { const writeOption = (e, { name, value }) => { const { siteId, history } = props; - console.log(value) history.push(withSiteId(funnelRoute(parseInt(value)), siteId)); } diff --git a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js index 0bb049d24..6672bc580 100644 --- a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js +++ b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js @@ -28,7 +28,7 @@ function FunnelIssueDetails(props) {
- +
) diff --git a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js index 3aef18003..707049faa 100644 --- a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js +++ b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js @@ -2,19 +2,27 @@ import React, { useState, useEffect } from 'react' import { connect } from 'react-redux' import SessionItem from 'Shared/SessionItem' import { fetchSessions, fetchSessionsFiltered } from 'Duck/funnels' +import { setFunnelPage } from 'Duck/sessions' import { LoadMoreButton, NoContent, Loader } from 'UI' import FunnelSessionsHeader from '../FunnelSessionsHeader' const PER_PAGE = 10; function FunnelSessionList(props) { - const { list, sessionsTotal, sessionsSort, inDetails = false } = props; + const { funnelId, issueId, list, sessionsTotal, sessionsSort, inDetails = false } = props; const [showPages, setShowPages] = useState(1) const displayedCount = Math.min(showPages * PER_PAGE, list.size); const addPage = () => setShowPages(showPages + 1); + useEffect(() => { + props.setFunnelPage({ + funnelId, + issueId + }) + }, []) + return (
@@ -24,7 +32,7 @@ function FunnelSessionList(props) { subtext="Please try changing your search parameters." icon="exclamation-circle" show={ list.size === 0} - > + > { list.take(displayedCount).map(session => ( + />
) @@ -51,4 +59,4 @@ export default connect(state => ({ liveFilters: state.getIn(['funnelFilters', 'appliedFilter']), funnelFilters: state.getIn(['funnels', 'funnelFilters']), sessionsSort: state.getIn(['funnels', 'sessionsSort']), -}), { fetchSessions, fetchSessionsFiltered })(FunnelSessionList) +}), { fetchSessions, fetchSessionsFiltered, setFunnelPage })(FunnelSessionList) diff --git a/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js b/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js index c598b1543..3a58ccca0 100644 --- a/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js +++ b/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js @@ -11,7 +11,6 @@ const sortOptions = Object.entries(sortOptionsMap) .map(([ value, text ]) => ({ value, text })); function FunnelSessionsHeader({ sessionsCount, inDetails = false }) { - const onSort = () => {} return (
diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index ba5a6a410..33074a390 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -4,28 +4,41 @@ import { Icon, Loader, Button, Link } from 'UI'; import { login } from 'Duck/user'; import { forgotPassword, signup } from 'App/routes'; import ReCAPTCHA from 'react-google-recaptcha'; +import { withRouter } from 'react-router-dom'; import stl from './login.css'; import cn from 'classnames'; +import { setJwt } from 'Duck/jwt'; const FORGOT_PASSWORD = forgotPassword(); const SIGNUP_ROUTE = signup(); const recaptchaRef = React.createRef(); @connect( - state => ({ + (state, props) => ({ errors: state.getIn([ 'user', 'loginRequest', 'errors' ]), loading: state.getIn([ 'user', 'loginRequest', 'loading' ]), - existingTenant: state.getIn(['user', 'existingTenant']) + authDetails: state.getIn(['user', 'authDetails']), + params: new URLSearchParams(props.location.search) }), - { login, }, + { login, setJwt }, ) @withPageTitle('Login - OpenReplay') +@withRouter export default class Login extends React.Component { state = { email: '', password: '', }; + componentDidMount() { + const { params } = this.props; + const jwt = params.get('jwt') + if (jwt) { + this.props.setJwt(jwt); + window.location.href = '/'; + } + } + handleSubmit = (token) => { const { email, password } = this.state; this.props.login({ email: email.trim(), password, 'g-recaptcha-response': token }).then(() => { @@ -45,7 +58,8 @@ export default class Login extends React.Component { write = ({ target: { value, name } }) => this.setState({ [ name ]: value }) render() { - const { errors, loading, existingTenant } = this.props; + const { errors, loading, authDetails } = this.props; + return (
@@ -63,7 +77,7 @@ export default class Login extends React.Component {

Login to OpenReplay

- { !existingTenant &&
Don't have an account? Sign up
} + { !authDetails.tenants &&
Don't have an account? Sign up
}
{ window.ENV.CAPTCHA_ENABLED && ( @@ -126,6 +140,14 @@ export default class Login extends React.Component {
+ { authDetails.sso && ( + + )}
diff --git a/frontend/app/components/Login/login.css b/frontend/app/components/Login/login.css index 4ab843f18..04a0768c7 100644 --- a/frontend/app/components/Login/login.css +++ b/frontend/app/components/Login/login.css @@ -144,4 +144,8 @@ > & label { margin-bottom: 10px !important; } +} + +.sso { + border-top: solid thin $gray-light; } \ No newline at end of file diff --git a/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js b/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js index 014fac088..fddccaf51 100644 --- a/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js +++ b/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js @@ -28,7 +28,7 @@ const MetadataList = (props) => { header: 'Metadata', confirmation: `Are you sure you want to remove?` })) { - this.props.remove(site.id, field.index); + props.remove(site.id, field.index); } } diff --git a/frontend/app/components/Session/LivePlayer.js b/frontend/app/components/Session/LivePlayer.js index 2cccce911..5664a6abd 100644 --- a/frontend/app/components/Session/LivePlayer.js +++ b/frontend/app/components/Session/LivePlayer.js @@ -2,7 +2,8 @@ import { useEffect } from 'react'; import { connect } from 'react-redux'; import { Loader } from 'UI'; import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player'; -import { +import { withRequest } from 'HOCs' +import { PlayerProvider, connectPlayer, init as initPlayer, @@ -30,17 +31,24 @@ const InitLoader = connectPlayer(state => ({ }))(Loader); -function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, live, fullscreen, jwt, config }) { +function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, live, fullscreen, jwt, loadingCredentials, assistCredendials, request }) { useEffect(() => { - initPlayer(session, jwt, config); + if (!loadingCredentials) { + initPlayer(session, jwt, assistCredendials); + } return () => cleanPlayer() - }, [ session.sessionId ]); + }, [ session.sessionId, loadingCredentials, assistCredendials ]); // LAYOUT (TODO: local layout state - useContext or something..) - useEffect(() => () => { - toggleFullscreen(false); - closeBottomBlock(); + useEffect(() => { + request(); + return () => { + toggleFullscreen(false); + closeBottomBlock(); + } }, []) + + return ( @@ -54,15 +62,18 @@ function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, l ); } - -export default connect(state => ({ - session: state.getIn([ 'sessions', 'current' ]), - showAssist: state.getIn([ 'sessions', 'showChatWindow' ]), - jwt: state.get('jwt'), - config: state.getIn([ 'user', 'account', 'iceServers' ]), - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), -}), { - toggleFullscreen, - closeBottomBlock, -})(WebPlayer) - +export default withRequest({ + initialData: null, + endpoint: '/assist/credentials', + dataWrapper: data => data, + dataName: 'assistCredendials', + loadingName: 'loadingCredentials', +})(connect( + state => ({ + session: state.getIn([ 'sessions', 'current' ]), + showAssist: state.getIn([ 'sessions', 'showChatWindow' ]), + jwt: state.get('jwt'), + fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), + }), + { toggleFullscreen, closeBottomBlock }, +)(WebPlayer)); \ No newline at end of file diff --git a/frontend/app/components/Session_/Autoplay/Autoplay.js b/frontend/app/components/Session_/Autoplay/Autoplay.js index 654626cff..ef501f3e6 100644 --- a/frontend/app/components/Session_/Autoplay/Autoplay.js +++ b/frontend/app/components/Session_/Autoplay/Autoplay.js @@ -30,7 +30,7 @@ function Autoplay(props) { tooltip={'Autoplay'} /> - +
diff --git a/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js b/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js index 6d24be57b..8b26459f3 100644 --- a/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js +++ b/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js @@ -1,6 +1,6 @@ import React, { useCallback, useState } from 'react'; import { connect } from 'react-redux'; -import { NoContent, IconButton } from 'UI'; +import { NoContent, IconButton, Popup } from 'UI'; import withToggle from 'HOCs/withToggle'; import MetadataItem from './MetadataItem'; import stl from './metadata.css'; @@ -9,19 +9,32 @@ export default connect(state => ({ metadata: state.getIn([ 'sessions', 'current', 'metadata' ]), }))(function Metadata ({ metadata }) { const [ visible, setVisible ] = useState(false); - const toggle = useCallback(() => setVisible(v => !v), []); + const toggle = useCallback(() => metadata.size > 0 && setVisible(v => !v), []); return ( <> - + } + content={ +
+ Metadata must be explicitly specified from the dashboard from Preferences > Metadata. +
+ } + on="click" + disabled={metadata.length > 0} + size="tiny" + inverted + position="top center" /> { visible &&
diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index 266cf1cd1..e3e3bcc7a 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -97,12 +97,17 @@ function getStorageName(type) { showExceptions: state.exceptionsList.length > 0, showLongtasks: state.longtasksList.length > 0, })) -@connect((state, props) => ({ - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), - bottomBlock: state.getIn([ 'components', 'player', 'bottomBlock' ]), - showStorage: props.showStorage || !state.getIn(['components', 'player', 'hiddenHints', 'storage']), - showStack: props.showStack || !state.getIn(['components', 'player', 'hiddenHints', 'stack']), -}), { +@connect((state, props) => { + const permissions = state.getIn([ 'user', 'account', 'permissions' ]) || []; + const isEnterprise = state.getIn([ 'user', 'client', 'edition' ]) === 'ee'; + return { + disabled: props.disabled || (isEnterprise && !permissions.includes('DEV_TOOLS')), + fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), + bottomBlock: state.getIn([ 'components', 'player', 'bottomBlock' ]), + showStorage: props.showStorage || !state.getIn(['components', 'player', 'hiddenHints', 'storage']), + showStack: props.showStack || !state.getIn(['components', 'player', 'hiddenHints', 'stack']), + } +}, { fullscreenOn, fullscreenOff, toggleBottomBlock, diff --git a/frontend/app/components/Session_/PlayerBlockHeader.js b/frontend/app/components/Session_/PlayerBlockHeader.js index dee980e00..9a14541b2 100644 --- a/frontend/app/components/Session_/PlayerBlockHeader.js +++ b/frontend/app/components/Session_/PlayerBlockHeader.js @@ -2,7 +2,7 @@ import { connect } from 'react-redux'; import { withRouter } from 'react-router-dom'; import { browserIcon, osIcon, deviceTypeIcon } from 'App/iconNames'; import { formatTimeOrDate } from 'App/date'; -import { sessions as sessionsRoute, funnelIssue as funnelIssueRoute, withSiteId } from 'App/routes'; +import { sessions as sessionsRoute, funnel as funnelRoute, funnelIssue as funnelIssueRoute, withSiteId } from 'App/routes'; import { Icon, CountryFlag, IconButton, BackLink } from 'UI'; import { toggleFavorite } from 'Duck/sessions'; import cn from 'classnames'; @@ -36,12 +36,13 @@ function capitalise(str) { local: state.getIn(['sessions', 'timezone']), funnelRef: state.getIn(['funnels', 'navRef']), siteId: state.getIn([ 'user', 'siteId' ]), + funnelPage: state.getIn(['sessions', 'funnelPage']), }), { toggleFavorite, fetchListIntegration }) @withRouter export default class PlayerBlockHeader extends React.PureComponent { - componentDidMount() { + componentDidMount() { if (!this.props.issuesFetched) this.props.fetchListIntegration('issues') } @@ -53,10 +54,13 @@ export default class PlayerBlockHeader extends React.PureComponent { ); backHandler = () => { - const { history, siteId } = this.props; - if (history.action !== 'POP') - history.goBack(); - else + const { history, siteId, funnelPage } = this.props; + if (funnelPage) { + if (funnelPage.get('issueId')) { + history.push(withSiteId(funnelIssueRoute(funnelPage.get('funnelId'), funnelPage.get('issueId')), siteId)) + } else + history.push(withSiteId(funnelRoute(funnelPage.get('funnelId')), siteId)); + } else history.push(withSiteId(SESSIONS_ROUTE), siteId); } @@ -87,6 +91,7 @@ export default class PlayerBlockHeader extends React.PureComponent { jiraConfig, fullscreen, } = this.props; + const { history, siteId } = this.props; return (
@@ -111,7 +116,7 @@ export default class PlayerBlockHeader extends React.PureComponent { { live && } { !live && ( <> - +
BaseComponent => +@connect((state, props) => ({ + permissions: state.getIn([ 'user', 'account', 'permissions' ]) || [], + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', +})) +class extends React.PureComponent { + render() { + const hasPermission = this.props.permissions.some(permission => requiredPermissions.includes(permission)); + + return !this.props.isEnterprise || hasPermission ? :
+ } +} \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/NoPermission.tsx b/frontend/app/components/ui/NoPermission/NoPermission.tsx new file mode 100644 index 000000000..eaf43d4aa --- /dev/null +++ b/frontend/app/components/ui/NoPermission/NoPermission.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import stl from './noPermission.css' +import { Icon } from 'UI'; + +function NoPermission(props) { + return ( +
+ +
Not allowed
+ You don’t have the necessary permissions to access this feature. Please check with your admin. +
+ ); +} + +export default NoPermission; \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/index.ts b/frontend/app/components/ui/NoPermission/index.ts new file mode 100644 index 000000000..c826daf1d --- /dev/null +++ b/frontend/app/components/ui/NoPermission/index.ts @@ -0,0 +1 @@ +export { default } from './NoPermission'; \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/noPermission.css b/frontend/app/components/ui/NoPermission/noPermission.css new file mode 100644 index 000000000..f4296757c --- /dev/null +++ b/frontend/app/components/ui/NoPermission/noPermission.css @@ -0,0 +1,59 @@ +.wrapper { + margin: auto; + width: 100%; + text-align: center; + min-height: 100px; + display: flex; + align-items: center; + flex-direction: column; + justify-content: center; + color: $gray-medium; + font-weight: 300; + transition: all 0.2s; + padding-top: 40px; + + &.small { + & .title { + font-size: 20px !important; + } + + & .subtext { + font-size: 16px; + } + } +} + +.title { + font-size: 32px; + margin-bottom: 15px; +} + +.subtext { + font-size: 16px; + margin-bottom: 20px; +} + + +.icon { + display: block; + margin: auto; + background-image: svg-load(no-results.svg, fill=#CCC); + background-repeat: no-repeat; + background-size: contain; + background-position: center center; + width: 166px; + height: 166px; + margin-bottom: 20px; +} + +.emptyIcon { + display: block; + margin: auto; + background-image: svg-load(empty-state.svg, fill=#CCC); + background-repeat: no-repeat; + background-size: contain; + background-position: center center; + width: 166px; + height: 166px; + margin-bottom: 20px; +} diff --git a/frontend/app/components/ui/index.js b/frontend/app/components/ui/index.js index fe9609f16..669be843a 100644 --- a/frontend/app/components/ui/index.js +++ b/frontend/app/components/ui/index.js @@ -52,5 +52,6 @@ export { default as QuestionMarkHint } from './QuestionMarkHint'; export { default as TimelinePointer } from './TimelinePointer'; export { default as CopyButton } from './CopyButton'; export { default as HighlightCode } from './HighlightCode'; +export { default as NoPermission } from './NoPermission'; export { Input, Modal, Form, Message, Card } from 'semantic-ui-react'; diff --git a/frontend/app/duck/index.js b/frontend/app/duck/index.js index 53771ca04..c8d7a7c65 100644 --- a/frontend/app/duck/index.js +++ b/frontend/app/duck/index.js @@ -33,6 +33,7 @@ import announcements from './announcements'; import errors from './errors'; import funnels from './funnels'; import config from './config'; +import roles from './roles'; export default combineReducers({ jwt, @@ -66,6 +67,7 @@ export default combineReducers({ errors, funnels, config, + roles, ...integrations, ...sources, }); diff --git a/frontend/app/duck/jwt.js b/frontend/app/duck/jwt.js index 946dcaa6a..4d4147f34 100644 --- a/frontend/app/duck/jwt.js +++ b/frontend/app/duck/jwt.js @@ -10,3 +10,10 @@ export default (state = null, action = {}) => { } return state; }; + +export function setJwt(data) { + return { + type: UPDATE, + data, + }; +} diff --git a/frontend/app/duck/roles.js b/frontend/app/duck/roles.js new file mode 100644 index 000000000..f1dd1ecec --- /dev/null +++ b/frontend/app/duck/roles.js @@ -0,0 +1,32 @@ +import { List, Map } from 'immutable'; +import Role from 'Types/role'; +import crudDuckGenerator from './tools/crudDuck'; +import { reduceDucks } from 'Duck/tools'; + +const crudDuck = crudDuckGenerator('client/role', Role, { idKey: 'roleId' }); +export const { fetchList, init, edit, remove, } = crudDuck.actions; + +const initialState = Map({ + list: List(), + permissions: List([ + { name: 'Session Replay', value: 'SESSION_REPLAY' }, + { name: 'Developer Tools', value: 'DEV_TOOLS' }, + { name: 'Errors', value: 'ERRORS' }, + { name: 'Metrics', value: 'METRICS' }, + { name: 'Assist Live', value: 'ASSIST_LIVE' }, + { name: 'Assist Call', value: 'ASSIST_CALL' }, + ]) +}); + +const reducer = (state = initialState, action = {}) => { + return state; +}; + +export function save(instance) { + return { + types: crudDuck.actionTypes.SAVE.toArray(), + call: client => instance.roleId ? client.post(`/client/roles/${ instance.roleId }`, instance.toData()) : client.put(`/client/roles`, instance.toData()), + }; +} + +export default reduceDucks(crudDuck, { initialState, reducer }).reducer; diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index b2cbc8a66..e82602799 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -24,6 +24,7 @@ const SET_TIMEZONE = 'sessions/SET_TIMEZONE'; const SET_EVENT_QUERY = 'sessions/SET_EVENT_QUERY'; const SET_AUTOPLAY_VALUES = 'sessions/SET_AUTOPLAY_VALUES'; const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW'; +const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG'; const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB'; @@ -54,7 +55,8 @@ const initialState = Map({ visitedEvents: List(), insights: List(), insightFilters: defaultDateFilters, - host: '' + host: '', + funnelPage: Map(), }); const reducer = (state = initialState, action = {}) => { @@ -117,9 +119,11 @@ const reducer = (state = initialState, action = {}) => { } + const sessionIds = list.map(({ sessionId }) => sessionId ).toJS(); + return state .set('list', list) - .set('sessionIds', list.map(({ sessionId }) => sessionId ).toJS()) + .set('sessionIds', sessionIds) .set('favoriteList', list.filter(({ favorite }) => favorite)) .set('total', total) .set('keyMap', keyMap) @@ -236,7 +240,8 @@ const reducer = (state = initialState, action = {}) => { return state.set('showChatWindow', action.state) case FETCH_INSIGHTS.SUCCESS:  return state.set('insights', List(action.data).sort((a, b) => b.count - a.count)); - + case SET_FUNNEL_PAGE_FLAG: + return state.set('funnelPage', action.funnelPage ? Map(action.funnelPage) : false); default: return state; } @@ -364,3 +369,10 @@ export function setEventFilter(filter) { } } +export function setFunnelPage(funnelPage) { + return { + type: SET_FUNNEL_PAGE_FLAG, + funnelPage + } +} + diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 5edd112ae..13ff44af5 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -31,7 +31,7 @@ const initialState = Map({ passwordRequestError: false, passwordErrors: List(), tenants: [], - existingTenant: true, + authDetails: {}, onboarding: false }); @@ -70,7 +70,7 @@ const reducer = (state = initialState, action = {}) => { case FETCH_ACCOUNT.SUCCESS: return state.set('account', Account(action.data)).set('passwordErrors', List()); case FETCH_TENANTS.SUCCESS: - return state.set('existingTenant', action.data); + return state.set('authDetails', action.data); // return state.set('tenants', action.data.map(i => ({ text: i.name, value: i.tenantId}))); case UPDATE_PASSWORD.FAILURE: return state.set('passwordErrors', List(action.errors)) diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index 5e64ed436..c742c10b5 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -118,11 +118,11 @@ export default class MessageDistributor extends StatedScreen { private navigationStartOffset: number = 0; private lastMessageTime: number = 0; - constructor(private readonly session: any /*Session*/, jwt: string, config: string) { + constructor(private readonly session: any /*Session*/, jwt: string, config) { super(); this.pagesManager = new PagesManager(this, this.session.isMobile) this.mouseManager = new MouseManager(this); - this.assistManager = new AssistManager(session, config, this); + this.assistManager = new AssistManager(session, this, config); this.sessionStart = this.session.startedAt; diff --git a/frontend/app/player/MessageDistributor/PrimitiveReader.ts b/frontend/app/player/MessageDistributor/PrimitiveReader.ts index 6ee5ade4e..b49955074 100644 --- a/frontend/app/player/MessageDistributor/PrimitiveReader.ts +++ b/frontend/app/player/MessageDistributor/PrimitiveReader.ts @@ -1,6 +1,10 @@ export default class PrimitiveReader { protected p = 0 constructor(protected readonly buf: Uint8Array) {} + + hasNext() { + return this.p < this.buf.length + } readUint() { var r = 0, s = 1, b; diff --git a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts index 1ec4f5932..e2cd635fd 100644 --- a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts +++ b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts @@ -76,9 +76,8 @@ export default abstract class BaseScreen { //return this.boundingRect; } - getInternalCoordinates({ x, y }: Point): Point { + getInternalViewportCoordinates({ x, y }: Point): Point { const { x: overlayX, y: overlayY, width } = this.getBoundingClientRect(); - //console.log("x y ", x,y,'ovx y', overlayX, overlayY, width) const screenWidth = this.overlay.offsetWidth; @@ -89,7 +88,19 @@ export default abstract class BaseScreen { return { x: screenX, y: screenY }; } + getInternalCoordinates(p: Point): Point { + const { x, y } = this.getInternalViewportCoordinates(p); + + const docEl = this.document?.documentElement + const scrollX = docEl ? docEl.scrollLeft : 0 + const scrollY = docEl ? docEl.scrollTop : 0 + + return { x: x+scrollX, y: y+scrollY }; + } + getElementFromInternalPoint({ x, y }: Point): Element | null { + // elementFromPoint && elementFromPoints require viewpoint-related coordinates, + // not document-related return this.document?.elementFromPoint(x, y) || null; } @@ -108,16 +119,21 @@ export default abstract class BaseScreen { } getElementFromPoint(point: Point): Element | null { - return this.getElementFromInternalPoint(this.getInternalCoordinates(point)); + return this.getElementFromInternalPoint(this.getInternalViewportCoordinates(point)); } getElementsFromPoint(point: Point): Element[] { - return this.getElementsFromInternalPoint(this.getInternalCoordinates(point)); + return this.getElementsFromInternalPoint(this.getInternalViewportCoordinates(point)); } getElementBySelector(selector: string): Element | null { if (!selector) return null; - return this.document?.querySelector(selector) || null; + try { + return this.document?.querySelector(selector) || null; + } catch (e) { + console.error("Can not select element. ", e) + return null + } } display(flag: boolean = true) { diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 3e05ec7c2..f11c0c70a 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -47,11 +47,13 @@ export function getStatusText(status: ConnectionStatus): string { export interface State { calling: CallingState, peerConnectionStatus: ConnectionStatus, + remoteControl: boolean, } export const INITIAL_STATE: State = { calling: CallingState.False, peerConnectionStatus: ConnectionStatus.Connecting, + remoteControl: false, } const MAX_RECONNECTION_COUNT = 4; @@ -115,10 +117,8 @@ function resolveCSS(baseURL: string, css: string): string { return rewriteCSSLinks(css, rawurl => resolveURL(baseURL, rawurl)); } - export default class AssistManager { - constructor(private session, private config, private md: MessageDistributor) {} - + constructor(private session, private md: MessageDistributor, private config) {} private setStatus(status: ConnectionStatus) { if (status === ConnectionStatus.Connecting) { @@ -148,8 +148,6 @@ export default class AssistManager { } this.setStatus(ConnectionStatus.Connecting) import('peerjs').then(({ default: Peer }) => { - // @ts-ignore - const iceServers = iceServerConfigFromString(this.config); const _config = { // @ts-ignore host: new URL(window.ENV.API_EDP).host, @@ -157,9 +155,9 @@ export default class AssistManager { port: location.protocol === 'https:' ? 443 : 80, } - if (iceServers) { + if (this.config) { _config['config'] = { - iceServers: iceServers, + iceServers: this.config, sdpSemantics: 'unified-plan', iceTransportPolicy: 'relay', }; @@ -351,12 +349,29 @@ export default class AssistManager { } } - private onMouseMove = (e: MouseEvent ): void => { - const conn = this.dataConnection; - if (!conn) { return; } - // @ts-ignore ??? + // private mmtid?:ReturnType + private onMouseMove = (e: MouseEvent): void => { + // this.mmtid && clearTimeout(this.mmtid) + // this.mmtid = setTimeout(() => { const data = this.md.getInternalCoordinates(e); - conn.send({ x: Math.round(data.x), y: Math.round(data.y) }); + this.send({ x: Math.round(data.x), y: Math.round(data.y) }); + // }, 5) + } + + + // private wtid?: ReturnType + // private scrollDelta: [number, number] = [0,0] + private onWheel = (e: WheelEvent): void => { + e.preventDefault() + //throttling makes movements less smooth + // this.wtid && clearTimeout(this.wtid) + // this.scrollDelta[0] += e.deltaX + // this.scrollDelta[1] += e.deltaY + // this.wtid = setTimeout(() => { + this.send({ type: "scroll", delta: [ e.deltaX, e.deltaY ]})//this.scrollDelta }); + this.onMouseMove(e) + // this.scrollDelta = [0,0] + // }, 20) } private onMouseClick = (e: MouseEvent): void => { @@ -364,9 +379,30 @@ export default class AssistManager { if (!conn) { return; } const data = this.md.getInternalCoordinates(e); // const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager + const el = this.md.getElementFromInternalPoint(data) + if (el instanceof HTMLElement) { + el.focus() + el.oninput = e => e.preventDefault(); + el.onkeydown = e => e.preventDefault(); + } conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) }); } + private toggleRemoteControl = (flag?: boolean) => { + const state = getState().remoteControl; + const newState = typeof flag === 'boolean' ? flag : !state; + if (state === newState) { return } + if (newState) { + this.md.overlay.addEventListener("click", this.onMouseClick); + this.md.overlay.addEventListener("wheel", this.onWheel) + update({ remoteControl: true }) + } else { + this.md.overlay.removeEventListener("click", this.onMouseClick); + this.md.overlay.removeEventListener("wheel", this.onWheel); + update({ remoteControl: false }) + } + } + private localCallData: { localStream: LocalStream, onStream: (s: MediaStream)=>void, @@ -375,12 +411,13 @@ export default class AssistManager { onError?: ()=> void } | null = null - call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function { + call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): { end: Function, toggleRemoteControl: Function } { this.localCallData = { localStream, onStream, onCallEnd: () => { onCallEnd(); + this.toggleRemoteControl(false); this.md.overlay.removeEventListener("mousemove", this.onMouseMove); this.md.overlay.removeEventListener("click", this.onMouseClick); update({ calling: CallingState.False }); @@ -390,7 +427,10 @@ export default class AssistManager { onError, } this._call() - return this.initiateCallEnd; + return { + end: this.initiateCallEnd, + toggleRemoteControl: this.toggleRemoteControl, + } } private _call() { @@ -402,7 +442,7 @@ export default class AssistManager { const call = this.peer.call(this.peerID, this.localCallData.localStream.stream); this.localCallData.localStream.onVideoTrack(vTrack => { - const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") + const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") if (!sender) { //logger.warn("No video sender found") return diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index cd5e77b02..f226c1b4e 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -120,16 +120,18 @@ export default class DOMManager extends ListWalker { private applyMessage = (msg: Message): void => { let node; + let doc: Document | null; switch (msg.tp) { case "create_document": - // @ts-ignore ?? - this.screen.document.open(); - // @ts-ignore ?? - this.screen.document.write(`${ msg.doctype || "" }`); - // @ts-ignore ?? - this.screen.document.close(); - // @ts-ignore ?? - const fRoot = this.screen.document.documentElement; + doc = this.screen.document; + if (!doc) { + logger.error("No iframe document found", msg) + return; + } + doc.open(); + doc.write(""); + doc.close(); + const fRoot = doc.documentElement; fRoot.innerText = ''; this.nl = [ fRoot ]; @@ -147,7 +149,7 @@ export default class DOMManager extends ListWalker { this.insertNode(msg); break; case "create_element_node": - // console.log('elementnode', msg) + // console.log('elementnode', msg) if (msg.svg) { this.nl[ msg.id ] = document.createElementNS('http://www.w3.org/2000/svg', msg.tag); } else { @@ -213,7 +215,7 @@ export default class DOMManager extends ListWalker { // @ts-ignore node.data = msg.data; if (node instanceof HTMLStyleElement) { - const doc = this.screen.document + doc = this.screen.document doc && rewriteNodeStyleSheet(doc, node) } break; @@ -229,7 +231,11 @@ export default class DOMManager extends ListWalker { node.sheet.insertRule(msg.rule, msg.index) } catch (e) { logger.warn(e, msg) - node.sheet.insertRule(msg.rule) + try { + node.sheet.insertRule(msg.rule) + } catch (e) { + logger.warn("Cannot insert rule.", e, msg) + } } break; case "css_delete_rule": @@ -247,21 +253,27 @@ export default class DOMManager extends ListWalker { } break; case "create_i_frame_document": - // console.log('ifr', msg) node = this.nl[ msg.frameID ]; - if (!(node instanceof HTMLIFrameElement)) { - logger.warn("create_i_frame_document message. Node is not iframe") - return; - } - console.log("iframe", msg) - // await new Promise(resolve => { node.onload = resolve }) + // console.log('ifr', msg, node) - const doc = node.contentDocument; - if (!doc) { - logger.warn("No iframe doc", msg, node, node.contentDocument); + if (node instanceof HTMLIFrameElement) { + doc = node.contentDocument; + if (!doc) { + logger.warn("No iframe doc", msg, node, node.contentDocument); + return; + } + this.nl[ msg.id ] = doc.documentElement return; + } else if (node instanceof Element) { // shadow DOM + try { + this.nl[ msg.id ] = node.attachShadow({ mode: 'open' }) + } catch(e) { + logger.warn("Can not attach shadow dom", e, msg) + } + } else { + logger.warn("Context message host is not Element", msg) } - this.nl[ msg.id ] = doc.documentElement + break; //not sure what to do with this one //case "disconnected": diff --git a/frontend/app/player/MessageDistributor/messages.ts b/frontend/app/player/MessageDistributor/messages.ts index 6d9f3244c..bb391b9f4 100644 --- a/frontend/app/player/MessageDistributor/messages.ts +++ b/frontend/app/player/MessageDistributor/messages.ts @@ -5,6 +5,7 @@ import PrimitiveReader from './PrimitiveReader'; export const ID_TP_MAP = { 0: "timestamp", + 2: "session_disconnect", 4: "set_page_location", 5: "set_viewport_size", 6: "set_viewport_scroll", @@ -38,6 +39,13 @@ export const ID_TP_MAP = { 59: "long_task", 69: "mouse_click", 70: "create_i_frame_document", + 90: "ios_session_start", + 93: "ios_custom_event", + 96: "ios_screen_changes", + 100: "ios_click_event", + 102: "ios_performance_event", + 103: "ios_log", + 105: "ios_network_call", } as const; @@ -46,6 +54,11 @@ export interface Timestamp { timestamp: number, } +export interface SessionDisconnect { + tp: "session_disconnect", + timestamp: number, +} + export interface SetPageLocation { tp: "set_page_location", url: string, @@ -271,12 +284,82 @@ export interface CreateIFrameDocument { id: number, } +export interface IosSessionStart { + tp: "ios_session_start", + timestamp: number, + projectID: number, + trackerVersion: string, + revID: string, + userUUID: string, + userOS: string, + userOSVersion: string, + userDevice: string, + userDeviceType: string, + userCountry: string, +} -export type Message = Timestamp | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument; +export interface IosCustomEvent { + tp: "ios_custom_event", + timestamp: number, + length: number, + name: string, + payload: string, +} + +export interface IosScreenChanges { + tp: "ios_screen_changes", + timestamp: number, + length: number, + x: number, + y: number, + width: number, + height: number, +} + +export interface IosClickEvent { + tp: "ios_click_event", + timestamp: number, + length: number, + label: string, + x: number, + y: number, +} + +export interface IosPerformanceEvent { + tp: "ios_performance_event", + timestamp: number, + length: number, + name: string, + value: number, +} + +export interface IosLog { + tp: "ios_log", + timestamp: number, + length: number, + severity: string, + content: string, +} + +export interface IosNetworkCall { + tp: "ios_network_call", + timestamp: number, + length: number, + duration: number, + headers: string, + body: string, + url: string, + success: boolean, + method: string, + status: number, +} + + +export type Message = Timestamp | SessionDisconnect | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument | IosSessionStart | IosCustomEvent | IosScreenChanges | IosClickEvent | IosPerformanceEvent | IosLog | IosNetworkCall; export default function (r: PrimitiveReader): Message | null { - const ui= r.readUint() - switch (ui) { + const tp = r.readUint() + switch (tp) { case 0: return { @@ -284,6 +367,12 @@ export default function (r: PrimitiveReader): Message | null { timestamp: r.readUint(), }; + case 2: + return { + tp: ID_TP_MAP[2], + timestamp: r.readUint(), + }; + case 4: return { tp: ID_TP_MAP[4], @@ -542,10 +631,85 @@ export default function (r: PrimitiveReader): Message | null { id: r.readUint(), }; + case 90: + return { + tp: ID_TP_MAP[90], + timestamp: r.readUint(), + projectID: r.readUint(), + trackerVersion: r.readString(), + revID: r.readString(), + userUUID: r.readString(), + userOS: r.readString(), + userOSVersion: r.readString(), + userDevice: r.readString(), + userDeviceType: r.readString(), + userCountry: r.readString(), + }; + + case 93: + return { + tp: ID_TP_MAP[93], + timestamp: r.readUint(), + length: r.readUint(), + name: r.readString(), + payload: r.readString(), + }; + + case 96: + return { + tp: ID_TP_MAP[96], + timestamp: r.readUint(), + length: r.readUint(), + x: r.readUint(), + y: r.readUint(), + width: r.readUint(), + height: r.readUint(), + }; + + case 100: + return { + tp: ID_TP_MAP[100], + timestamp: r.readUint(), + length: r.readUint(), + label: r.readString(), + x: r.readUint(), + y: r.readUint(), + }; + + case 102: + return { + tp: ID_TP_MAP[102], + timestamp: r.readUint(), + length: r.readUint(), + name: r.readString(), + value: r.readUint(), + }; + + case 103: + return { + tp: ID_TP_MAP[103], + timestamp: r.readUint(), + length: r.readUint(), + severity: r.readString(), + content: r.readString(), + }; + + case 105: + return { + tp: ID_TP_MAP[105], + timestamp: r.readUint(), + length: r.readUint(), + duration: r.readUint(), + headers: r.readString(), + body: r.readString(), + url: r.readString(), + success: r.readBoolean(), + method: r.readString(), + status: r.readUint(), + }; + default: - console.log("wtf is this", ui) - r.readUint(); // IOS skip timestamp - r.skip(r.readUint()); + throw new Error(`Unrecognizable message type: ${ tp }`) return null; } } diff --git a/frontend/app/player/ios/ImagePlayer.js b/frontend/app/player/ios/ImagePlayer.js index 824ee397a..4fd22776a 100644 --- a/frontend/app/player/ios/ImagePlayer.js +++ b/frontend/app/player/ios/ImagePlayer.js @@ -21,7 +21,7 @@ import { createListState, createScreenListState, } from './lists'; -import Parser from './parser'; +import Parser from './Parser'; import PerformanceList from './PerformanceList'; const HIGHEST_SPEED = 3; diff --git a/frontend/app/player/ios/Parser.ts b/frontend/app/player/ios/Parser.ts new file mode 100644 index 000000000..f202e9306 --- /dev/null +++ b/frontend/app/player/ios/Parser.ts @@ -0,0 +1,35 @@ +import readMessage from '../MessageDistributor/messages'; +import PrimitiveReader from '../MessageDistributor/PrimitiveReader'; + + +export default class Parser { + private reader: PrimitiveReader + private error: boolean = false + constructor(byteArray) { + this.reader = new PrimitiveReader(byteArray) + } + + parseEach(cb) { + while (this.hasNext()) { + const msg = this.next(); + if (msg !== null) { + cb(msg); + } + } + } + + hasNext() { + return !this.error && this.reader.hasNext(); + } + + next() { + try { + return readMessage(this.reader) + } catch(e) { + console.warn(e) + this.error = true + return null + } + } + +} \ No newline at end of file diff --git a/frontend/app/player/ios/parser.js b/frontend/app/player/ios/parser.js deleted file mode 100644 index 83ddaec50..000000000 --- a/frontend/app/player/ios/parser.js +++ /dev/null @@ -1,31 +0,0 @@ -import readMessage from '../MessageDistributor/messages'; - - -export default class Parser { - _p = 0 - _data - _error = null - constructor(byteArray) { - this._data = byteArray; - } - - parseEach(cb) { - while (this.hasNext()) { - const msg = this.parseNext(); - if (msg !== null) { - cb(msg); - } - } - } - - hasNext() { - return !this._error && this._data.length > this._p; - } - - parseNext() { - let msg; - [ msg, this._p ] = readMessage(this._data, this._p); - return msg - } - -} \ No newline at end of file diff --git a/frontend/app/routes.js b/frontend/app/routes.js index df032fe32..2ca5fd672 100644 --- a/frontend/app/routes.js +++ b/frontend/app/routes.js @@ -60,6 +60,7 @@ export const CLIENT_TABS = { INTEGRATIONS: 'integrations', PROFILE: 'account', MANAGE_USERS: 'manage-users', + MANAGE_ROLES: 'manage-roles', SITES: 'projects', CUSTOM_FIELDS: 'metadata', WEBHOOKS: 'webhooks', diff --git a/frontend/app/svg/icons/remote-control.svg b/frontend/app/svg/icons/remote-control.svg new file mode 100644 index 000000000..64087850c --- /dev/null +++ b/frontend/app/svg/icons/remote-control.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/app/svg/icons/shield-lock.svg b/frontend/app/svg/icons/shield-lock.svg new file mode 100644 index 000000000..1a1a49084 --- /dev/null +++ b/frontend/app/svg/icons/shield-lock.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/types/account/account.js b/frontend/app/types/account/account.js index 3d9374fc1..fc1ff958e 100644 --- a/frontend/app/types/account/account.js +++ b/frontend/app/types/account/account.js @@ -13,6 +13,7 @@ export default Member.extend({ smtp: false, license: '', expirationDate: undefined, + permissions: [], iceServers: undefined }, { fromJS: ({ current = {}, ...account})=> ({ diff --git a/frontend/app/types/client/client.js b/frontend/app/types/client/client.js index c1a33114f..ec0a20aac 100644 --- a/frontend/app/types/client/client.js +++ b/frontend/app/types/client/client.js @@ -8,9 +8,11 @@ export default Record({ loggerOptions: LoggerOptions(), apiKey: undefined, tenantId: undefined, + tenantKey: '', name: undefined, sites: List(), - optOut: true + optOut: true, + edition: '', }, { fromJS: ({ projects, diff --git a/frontend/app/types/member.js b/frontend/app/types/member.js index f712c7347..03495784c 100644 --- a/frontend/app/types/member.js +++ b/frontend/app/types/member.js @@ -11,7 +11,9 @@ export default Record({ superAdmin: false, joined: false, expiredInvitation: false, - invitationLink: '' + roleId: undefined, + roleName: undefined, + invitationLink: '', }, { idKey: 'id', methods: { diff --git a/frontend/app/types/role.js b/frontend/app/types/role.js new file mode 100644 index 000000000..52a74d400 --- /dev/null +++ b/frontend/app/types/role.js @@ -0,0 +1,30 @@ +import Record from 'Types/Record'; +import { validateName } from 'App/validate'; +import { List } from 'immutable'; + +export default Record({ + roleId: undefined, + name: '', + permissions: List(), + protected: false, + description: '' +}, { + idKey: 'roleId', + methods: { + validate() { + return validateName(this.name, { diacritics: true }); + }, + toData() { + const js = this.toJS(); + delete js.key; + delete js.protected; + return js; + }, + }, + fromJS({ permissions, ...rest }) { + return { + ...rest, + permissions: List(permissions) + } + }, +}); diff --git a/frontend/env.js b/frontend/env.js index 9218bcd70..34b577228 100644 --- a/frontend/env.js +++ b/frontend/env.js @@ -13,7 +13,7 @@ const oss = { ORIGIN: () => 'window.location.origin', API_EDP: () => 'window.location.origin + "/api"', ASSETS_HOST: () => 'window.location.origin + "/assets"', - VERSION: '1.3.5', + VERSION: '1.3.6', SOURCEMAP: true, MINIO_ENDPOINT: process.env.MINIO_ENDPOINT, MINIO_PORT: process.env.MINIO_PORT, @@ -21,7 +21,7 @@ const oss = { MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, ICE_SERVERS: process.env.ICE_SERVERS, - TRACKER_VERSION: '3.4.7', // trackerInfo.version, + TRACKER_VERSION: '3.4.10', // trackerInfo.version, } module.exports = { diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 20c75956d..16cba8159 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -22,6 +22,8 @@ "UI/*": ["./app/components/ui/*"], "Duck": ["./app/duck"], "Duck/*": ["./app/duck/*"], + "HOCs": ["./app/components/hocs"], + "HOCs/*": ["./app/components/hocs/*"], "Shared": ["./app/components/shared"], "Shared/*": ["./app/components/shared/*"], "Player": ["./app/player"], diff --git a/scripts/helm/app/chalice.yaml b/scripts/helm/app/chalice.yaml index 3a50d166b..fcbea8ed6 100644 --- a/scripts/helm/app/chalice.yaml +++ b/scripts/helm/app/chalice.yaml @@ -57,10 +57,12 @@ env: # Enable logging for python app # Ref: https://stackoverflow.com/questions/43969743/logs-in-kubernetes-pod-not-showing-up PYTHONUNBUFFERED: '0' - version_number: '1.3.5' + version_number: '1.3.6' SAML2_MD_URL: '' idp_entityId: '' idp_sso_url: '' idp_x509cert: '' idp_sls_url: '' + idp_name: '' + assist_secret: '' iceServers: '' diff --git a/scripts/helm/app/http.yaml b/scripts/helm/app/http.yaml index b34105a6c..7f2792018 100644 --- a/scripts/helm/app/http.yaml +++ b/scripts/helm/app/http.yaml @@ -24,7 +24,7 @@ resources: env: ASSETS_ORIGIN: /sessions-assets # TODO: full path (with the minio prefix) TOKEN_SECRET: secret_token_string # TODO: generate on buld - S3_BUCKET_IMAGES_IOS: sessions-mobile-assets + S3_BUCKET_IOS_IMAGES: sessions-mobile-assets AWS_ACCESS_KEY_ID: "minios3AccessKeyS3cr3t" AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" AWS_REGION: us-east-1 diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql new file mode 100644 index 000000000..cc4704f26 --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -0,0 +1,16 @@ +BEGIN; + +CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; +CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; +CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + + +CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); +CREATE INDEX errors_error_id_idx ON errors (error_id); +CREATE INDEX errors_error_id_idx ON events.errors (error_id); + +CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues(issue_id,timestamp); +CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp); +CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 06cd47dd7..5bdb3f1c7 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1,8 +1,8 @@ BEGIN; --- --- public.sql --- +-- Schemas and functions definitions: +CREATE SCHEMA IF NOT EXISTS events_common; +CREATE SCHEMA IF NOT EXISTS events; -CREATE EXTENSION IF NOT EXISTS pg_trgm; -CREATE EXTENSION IF NOT EXISTS pgcrypto; -- --- accounts.sql --- CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS @@ -23,797 +23,7 @@ begin end; $$ LANGUAGE plpgsql; - - -CREATE TABLE public.tenants -( - tenant_id integer NOT NULL DEFAULT 1, - user_id text NOT NULL DEFAULT generate_api_key(20), - name text NOT NULL, - api_key text NOT NULL DEFAULT generate_api_key(20), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - edition varchar(3) NOT NULL, - version_number text NOT NULL, - license text NULL, - opt_out bool NOT NULL DEFAULT FALSE, - t_projects integer NOT NULL DEFAULT 1, - t_sessions bigint NOT NULL DEFAULT 0, - t_users integer NOT NULL DEFAULT 1, - t_integrations integer NOT NULL DEFAULT 0, - CONSTRAINT onerow_uni CHECK (tenant_id = 1) -); - -CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); - -CREATE TABLE users -( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, - api_key text UNIQUE default generate_api_key(20) not null, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT '{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE -); - -CREATE TABLE basic_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, - changed_at timestamp, - UNIQUE (user_id) -); - -CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); -CREATE TABLE oauth_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - provider oauth_provider NOT NULL, - provider_user_id text NOT NULL, - token text NOT NULL, - UNIQUE (user_id, provider) -); - --- --- projects.sql --- - -CREATE TABLE projects -( - project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), - name text NOT NULL, - active boolean NOT NULL, - sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - max_session_duration integer NOT NULL DEFAULT 7200000, - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL, - gdpr jsonb NOT NULL DEFAULT '{ - "maskEmails": true, - "sampleRate": 33, - "maskNumbers": false, - "defaultInputMode": "plain" - }'::jsonb -- ?????? -); -CREATE INDEX ON public.projects (project_key); - -CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS -$$ -BEGIN - PERFORM pg_notify('project', row_to_json(NEW)::text); - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update - AFTER INSERT OR UPDATE - ON projects - FOR EACH ROW -EXECUTE PROCEDURE notify_project(); - --- --- alerts.sql --- - -CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); - -CREATE TABLE alerts -( - alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - name text NOT NULL, - description text NULL DEFAULT NULL, - active boolean NOT NULL DEFAULT TRUE, - detection_method alert_detection_method NOT NULL, - query jsonb NOT NULL, - deleted_at timestamp NULL DEFAULT NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{ - "renotifyInterval": 1440 - }'::jsonb -); - - -CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS -$$ -DECLARE - clone jsonb; -BEGIN - clone = to_jsonb(NEW); - clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); - IF NEW.deleted_at NOTNULL THEN - clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); - END IF; - PERFORM pg_notify('alert', clone::text); - RETURN NEW; -END ; -$$ LANGUAGE plpgsql; - - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON alerts - FOR EACH ROW -EXECUTE PROCEDURE notify_alert(); - --- --- webhooks.sql --- - -create type webhook_type as enum ('webhook', 'slack', 'email'); - -create table webhooks -( - webhook_id integer generated by default as identity - constraint webhooks_pkey - primary key, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - auth_header text, - type webhook_type not null, - index integer default 0 not null, - name varchar(100) -); - --- --- notifications.sql --- - -CREATE TABLE notifications -( - notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id integer REFERENCES users (user_id) ON DELETE CASCADE, - title text NOT NULL, - description text NOT NULL, - button_text varchar(80) NULL, - button_url text NULL, - image_url text NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{}'::jsonb -); - -CREATE INDEX notifications_user_id_index ON notifications (user_id); -CREATE INDEX notifications_created_at_index ON notifications (created_at DESC); -CREATE INDEX notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); - -CREATE TABLE user_viewed_notifications -( - user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, - notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, - constraint user_viewed_notifications_pkey primary key (user_id, notification_id) -); - --- --- funnels.sql --- - -CREATE TABLE funnels -( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False -); - -CREATE INDEX ON public.funnels (user_id, is_public); - --- --- announcements.sql --- - -create type announcement_type as enum ('notification', 'alert'); - -create table announcements -( - announcement_id serial not null - constraint announcements_pk - primary key, - title text not null, - description text not null, - button_text varchar(30), - button_url text, - image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null -); - --- --- integrations.sql --- - -CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); -CREATE TABLE integrations -( - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - provider integration_provider NOT NULL, - options jsonb NOT NULL, - request_data jsonb NOT NULL DEFAULT '{}'::jsonb, - PRIMARY KEY (project_id, provider) -); - -CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS -$$ -BEGIN - IF NEW IS NULL THEN - PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); - ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN - PERFORM pg_notify('integration', row_to_json(NEW)::text); - END IF; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON integrations - FOR EACH ROW -EXECUTE PROCEDURE notify_integration(); - - -create table jira_cloud -( - user_id integer not null - constraint jira_cloud_pk - primary key - constraint jira_cloud_users_fkey - references users - on delete cascade, - username text not null, - token text not null, - url text -); - --- --- issues.sql --- - -CREATE TYPE issue_type AS ENUM ( - 'click_rage', - 'dead_click', - 'excessive_scrolling', - 'bad_request', - 'missing_resource', - 'memory', - 'cpu', - 'slow_resource', - 'slow_page_load', - 'crash', - 'ml_cpu', - 'ml_memory', - 'ml_dead_click', - 'ml_click_rage', - 'ml_mouse_thrashing', - 'ml_excessive_scrolling', - 'ml_slow_resources', - 'custom', - 'js_exception' - ); - -CREATE TABLE issues -( - issue_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - type issue_type NOT NULL, - context_string text NOT NULL, - context jsonb DEFAULT NULL -); -CREATE INDEX ON issues (issue_id, type); -CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); -CREATE INDEX issues_project_id_idx ON issues (project_id); - --- --- errors.sql --- - -CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); -CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); -CREATE TABLE errors -( - error_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - source error_source NOT NULL, - name text DEFAULT NULL, - message text NOT NULL, - payload jsonb NOT NULL, - status error_status NOT NULL DEFAULT 'unresolved', - parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, - stacktrace jsonb, --to save the stacktrace and not query S3 another time - stacktrace_parsed_at timestamp -); -CREATE INDEX ON errors (project_id, source); -CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); -CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); -CREATE INDEX errors_project_id_idx ON public.errors (project_id); -CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); -CREATE INDEX errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception'; -CREATE INDEX errors_project_id_error_id_idx ON public.errors (project_id, error_id); -CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception'; - -CREATE TABLE user_favorite_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); - -CREATE TABLE user_viewed_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); -CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); -CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - - --- --- sessions.sql --- -CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); -CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); -CREATE TYPE platform AS ENUM ('web','ios','android'); - -CREATE TABLE sessions -( - session_id bigint PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - tracker_version text NOT NULL, - start_ts bigint NOT NULL, - duration integer NULL, - rev_id text DEFAULT NULL, - platform platform NOT NULL DEFAULT 'web', - is_snippet boolean NOT NULL DEFAULT FALSE, - user_id text DEFAULT NULL, - user_anonymous_id text DEFAULT NULL, - user_uuid uuid NOT NULL, - user_agent text DEFAULT NULL, - user_os text NOT NULL, - user_os_version text DEFAULT NULL, - user_browser text DEFAULT NULL, - user_browser_version text DEFAULT NULL, - user_device text NOT NULL, - user_device_type device_type NOT NULL, - user_device_memory_size integer DEFAULT NULL, - user_device_heap_size bigint DEFAULT NULL, - user_country country NOT NULL, - pages_count integer NOT NULL DEFAULT 0, - events_count integer NOT NULL DEFAULT 0, - errors_count integer NOT NULL DEFAULT 0, - watchdogs_score bigint NOT NULL DEFAULT 0, - issue_score bigint NOT NULL DEFAULT 0, - issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL --- , --- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL -); -CREATE INDEX ON sessions (project_id, start_ts); -CREATE INDEX ON sessions (project_id, user_id); -CREATE INDEX ON sessions (project_id, user_anonymous_id); -CREATE INDEX ON sessions (project_id, user_device); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX ON sessions (project_id, metadata_1); -CREATE INDEX ON sessions (project_id, metadata_2); -CREATE INDEX ON sessions (project_id, metadata_3); -CREATE INDEX ON sessions (project_id, metadata_4); -CREATE INDEX ON sessions (project_id, metadata_5); -CREATE INDEX ON sessions (project_id, metadata_6); -CREATE INDEX ON sessions (project_id, metadata_7); -CREATE INDEX ON sessions (project_id, metadata_8); -CREATE INDEX ON sessions (project_id, metadata_9); -CREATE INDEX ON sessions (project_id, metadata_10); --- CREATE INDEX ON sessions (rehydration_id); -CREATE INDEX ON sessions (project_id, watchdogs_score DESC); -CREATE INDEX platform_idx ON public.sessions (platform); - -CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); -CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); -CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); -CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); -CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); -CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); -CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); -CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); -CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); -CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); -CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); -CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); -CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); -CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); -CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); -CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; -CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; -CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0; -CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; - -ALTER TABLE public.sessions - ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_browser ISNULL)); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_agent_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_agent ISNULL)); - - - -CREATE TABLE user_viewed_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - -CREATE TABLE user_favorite_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - - --- --- assignments.sql --- - -create table assigned_sessions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - issue_id text NOT NULL, - provider oauth_provider NOT NULL, - created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default '{}'::jsonb NOT NULL -); -CREATE INDEX ON assigned_sessions (session_id); - --- --- events_common.sql --- - -CREATE SCHEMA events_common; - -CREATE TYPE events_common.custom_level AS ENUM ('info','error'); - -CREATE TABLE events_common.customs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - name text NOT NULL, - payload jsonb NOT NULL, - level events_common.custom_level NOT NULL DEFAULT 'info', - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.customs (name); -CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); -CREATE INDEX ON events_common.customs (timestamp); - - -CREATE TABLE events_common.issues -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, - payload jsonb DEFAULT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); - - -CREATE TABLE events_common.requests -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - url text NOT NULL, - duration integer NOT NULL, - success boolean NOT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.requests (url); -CREATE INDEX ON events_common.requests (duration); -CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); -CREATE INDEX ON events_common.requests (timestamp); -CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); - -- --- events.sql --- -CREATE SCHEMA events; - -CREATE TABLE events.pages -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - host text NOT NULL, - path text NOT NULL, - base_path text NOT NULL, - referrer text DEFAULT NULL, - base_referrer text DEFAULT NULL, - dom_building_time integer DEFAULT NULL, - dom_content_loaded_time integer DEFAULT NULL, - load_time integer DEFAULT NULL, - first_paint_time integer DEFAULT NULL, - first_contentful_paint_time integer DEFAULT NULL, - speed_index integer DEFAULT NULL, - visually_complete integer DEFAULT NULL, - time_to_interactive integer DEFAULT NULL, - response_time bigint DEFAULT NULL, - response_end bigint DEFAULT NULL, - ttfb integer DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.pages (session_id); -CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); -CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); -CREATE INDEX ON events.pages (timestamp); -CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); -CREATE INDEX pages_base_path_idx ON events.pages (base_path); -CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); -CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); -CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, length(base_referrer) - (CASE - WHEN base_referrer LIKE 'http://%' - THEN 7 - WHEN base_referrer LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); -CREATE INDEX ON events.pages (response_time); -CREATE INDEX ON events.pages (response_end); -CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); -CREATE INDEX pages_path_idx ON events.pages (path); -CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; -CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; -CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; -CREATE INDEX pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0; -CREATE INDEX pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0; -CREATE INDEX pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0; -CREATE INDEX pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0; -CREATE INDEX pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0; -CREATE INDEX pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL; -CREATE INDEX pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL; -CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR first_paint_time > 0 OR - dom_content_loaded_time > 0 OR ttfb > 0 OR - time_to_interactive > 0; -CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; -CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; -CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); - - -CREATE TABLE events.clicks -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, - selector text DEFAULT '' NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.clicks (session_id); -CREATE INDEX ON events.clicks (label); -CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); -CREATE INDEX ON events.clicks (timestamp); -CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); -CREATE INDEX clicks_url_idx ON events.clicks (url); -CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); -CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); - - -CREATE TABLE events.inputs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.inputs (session_id); -CREATE INDEX ON events.inputs (label, value); -CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); -CREATE INDEX inputs_label_idx ON events.inputs (label); -CREATE INDEX ON events.inputs (timestamp); -CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); - -CREATE TABLE events.errors -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.errors (session_id); -CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id); -CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp); -CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id); -CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); - -CREATE TABLE events.graphql -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.graphql (name); -CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.graphql (timestamp); - -CREATE TABLE events.state_actions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.state_actions (name); -CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.state_actions (timestamp); - -CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); -CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); -CREATE TABLE events.resources -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - duration bigint NULL, - type events.resource_type NOT NULL, - url text NOT NULL, - url_host text NOT NULL, - url_hostpath text NOT NULL, - success boolean NOT NULL, - status smallint NULL, - method events.resource_method NULL, - ttfb bigint NULL, - header_size bigint NULL, - encoded_body_size integer NULL, - decoded_body_size integer NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.resources (session_id); -CREATE INDEX ON events.resources (status); -CREATE INDEX ON events.resources (type); -CREATE INDEX ON events.resources (duration) WHERE duration > 0; -CREATE INDEX ON events.resources (url_host); - -CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); -CREATE INDEX resources_url_idx ON events.resources (url); -CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); -CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); -CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL; -CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp); -CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type); -CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch'; -CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE; -CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script'); -CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img'; -CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id); - -CREATE TABLE events.performance -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - message_id bigint NOT NULL, - min_fps smallint NOT NULL, - avg_fps smallint NOT NULL, - max_fps smallint NOT NULL, - min_cpu smallint NOT NULL, - avg_cpu smallint NOT NULL, - max_cpu smallint NOT NULL, - min_total_js_heap_size bigint NOT NULL, - avg_total_js_heap_size bigint NOT NULL, - max_total_js_heap_size bigint NOT NULL, - min_used_js_heap_size bigint NOT NULL, - avg_used_js_heap_size bigint NOT NULL, - max_used_js_heap_size bigint NOT NULL, - PRIMARY KEY (session_id, message_id) -); - CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS $$ @@ -838,39 +48,875 @@ BEGIN END; $$ LANGUAGE plpgsql IMMUTABLE; +-- --- integrations.sql --- + +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +-- --- alerts.sql --- + +CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS +$$ +DECLARE + clone jsonb; +BEGIN + clone = to_jsonb(NEW); + clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); + IF NEW.deleted_at NOTNULL THEN + clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); + END IF; + PERFORM pg_notify('alert', clone::text); + RETURN NEW; +END ; +$$ LANGUAGE plpgsql; + +-- --- projects.sql --- + +CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS +$$ +BEGIN + PERFORM pg_notify('project', row_to_json(NEW)::text); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- All tables and types: + +DO +$$ + BEGIN + IF EXISTS(SELECT + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = 'tenants') THEN + raise notice 'DB exists, skipping creation query'; + ELSE + raise notice 'Creating DB'; + + -- --- public.sql --- + + CREATE EXTENSION IF NOT EXISTS pg_trgm; + CREATE EXTENSION IF NOT EXISTS pgcrypto; +-- --- accounts.sql --- + + CREATE TABLE IF NOT EXISTS public.tenants + ( + tenant_id integer NOT NULL DEFAULT 1, + user_id text NOT NULL DEFAULT generate_api_key(20), + name text NOT NULL, + api_key text NOT NULL DEFAULT generate_api_key(20), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + edition varchar(3) NOT NULL, + version_number text NOT NULL, + license text NULL, + opt_out bool NOT NULL DEFAULT FALSE, + t_projects integer NOT NULL DEFAULT 1, + t_sessions bigint NOT NULL DEFAULT 0, + t_users integer NOT NULL DEFAULT 1, + t_integrations integer NOT NULL DEFAULT 0, + CONSTRAINT onerow_uni CHECK (tenant_id = 1) + ); + + CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); + + CREATE TABLE users + ( + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + appearance jsonb NOT NULL default '{ + "role": "dev", + "dashboard": { + "cpu": true, + "fps": false, + "avgCpu": true, + "avgFps": true, + "errors": true, + "crashes": true, + "overview": true, + "sessions": true, + "topMetrics": true, + "callsErrors": true, + "pageMetrics": true, + "performance": true, + "timeToRender": false, + "userActivity": false, + "avgFirstPaint": false, + "countSessions": true, + "errorsPerType": true, + "slowestImages": true, + "speedLocation": true, + "slowestDomains": true, + "avgPageLoadTime": true, + "avgTillFirstBit": false, + "avgTimeToRender": true, + "avgVisitedPages": false, + "avgImageLoadTime": true, + "busiestTimeOfDay": true, + "errorsPerDomains": true, + "missingResources": true, + "resourcesByParty": true, + "sessionsFeedback": false, + "slowestResources": true, + "avgUsedJsHeapSize": true, + "domainsErrors_4xx": true, + "domainsErrors_5xx": true, + "memoryConsumption": true, + "pagesDomBuildtime": false, + "pagesResponseTime": true, + "avgRequestLoadTime": true, + "avgSessionDuration": false, + "sessionsPerBrowser": false, + "applicationActivity": true, + "sessionsFrustration": false, + "avgPagesDomBuildtime": true, + "avgPagesResponseTime": false, + "avgTimeToInteractive": true, + "resourcesCountByType": true, + "resourcesLoadingTime": true, + "avgDomContentLoadStart": true, + "avgFirstContentfulPixel": false, + "resourceTypeVsResponseEnd": true, + "impactedSessionsByJsErrors": true, + "impactedSessionsBySlowPages": true, + "resourcesVsVisuallyComplete": true, + "pagesResponseTimeDistribution": true + }, + "sessionsLive": false, + "sessionsDevtools": true + }'::jsonb, + api_key text UNIQUE default generate_api_key(20) not null, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT '{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE + ); + + CREATE TABLE basic_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + password text DEFAULT NULL, + generated_password boolean NOT NULL DEFAULT false, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + changed_at timestamp, + UNIQUE (user_id) + ); + + CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); + CREATE TABLE oauth_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + provider oauth_provider NOT NULL, + provider_user_id text NOT NULL, + token text NOT NULL, + UNIQUE (user_id, provider) + ); + +-- --- projects.sql --- + + CREATE TABLE projects + ( + project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), + name text NOT NULL, + active boolean NOT NULL, + sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + max_session_duration integer NOT NULL DEFAULT 7200000, + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL, + gdpr jsonb NOT NULL DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "plain" + }'::jsonb -- ?????? + ); + CREATE INDEX ON public.projects (project_key); + + CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS + $$ + BEGIN + PERFORM pg_notify('project', row_to_json(NEW)::text); + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + + CREATE TRIGGER on_insert_or_update + AFTER INSERT OR UPDATE + ON projects + FOR EACH ROW + EXECUTE PROCEDURE notify_project(); + +-- --- alerts.sql --- + + CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); + + CREATE TABLE alerts + ( + alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + description text NULL DEFAULT NULL, + active boolean NOT NULL DEFAULT TRUE, + detection_method alert_detection_method NOT NULL, + query jsonb NOT NULL, + deleted_at timestamp NULL DEFAULT NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{ + "renotifyInterval": 1440 + }'::jsonb + ); + + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON alerts + FOR EACH ROW + EXECUTE PROCEDURE notify_alert(); + +-- --- webhooks.sql --- + + create type webhook_type as enum ('webhook', 'slack', 'email'); + + create table webhooks + ( + webhook_id integer generated by default as identity + constraint webhooks_pkey + primary key, + endpoint text not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + auth_header text, + type webhook_type not null, + index integer default 0 not null, + name varchar(100) + ); + +-- --- notifications.sql --- + + CREATE TABLE notifications + ( + notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + user_id integer REFERENCES users (user_id) ON DELETE CASCADE, + title text NOT NULL, + description text NOT NULL, + button_text varchar(80) NULL, + button_url text NULL, + image_url text NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{}'::jsonb + ); + + CREATE INDEX notifications_user_id_index ON notifications (user_id); + CREATE INDEX notifications_created_at_index ON notifications (created_at DESC); + CREATE INDEX notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); + + CREATE TABLE user_viewed_notifications + ( + user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, + notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, + constraint user_viewed_notifications_pkey primary key (user_id, notification_id) + ); + +-- --- funnels.sql --- + + CREATE TABLE funnels + ( + funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + + CREATE INDEX ON public.funnels (user_id, is_public); + +-- --- announcements.sql --- + + create type announcement_type as enum ('notification', 'alert'); + + create table announcements + ( + announcement_id serial not null + constraint announcements_pk + primary key, + title text not null, + description text not null, + button_text varchar(30), + button_url text, + image_url text, + created_at timestamp default timezone('utc'::text, now()) not null, + type announcement_type default 'notification'::announcement_type not null + ); + +-- --- integrations.sql --- + + CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); + CREATE TABLE integrations + ( + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + provider integration_provider NOT NULL, + options jsonb NOT NULL, + request_data jsonb NOT NULL DEFAULT '{}'::jsonb, + PRIMARY KEY (project_id, provider) + ); + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON integrations + FOR EACH ROW + EXECUTE PROCEDURE notify_integration(); + + + create table jira_cloud + ( + user_id integer not null + constraint jira_cloud_pk + primary key + constraint jira_cloud_users_fkey + references users + on delete cascade, + username text not null, + token text not null, + url text + ); + +-- --- issues.sql --- + + CREATE TYPE issue_type AS ENUM ( + 'click_rage', + 'dead_click', + 'excessive_scrolling', + 'bad_request', + 'missing_resource', + 'memory', + 'cpu', + 'slow_resource', + 'slow_page_load', + 'crash', + 'ml_cpu', + 'ml_memory', + 'ml_dead_click', + 'ml_click_rage', + 'ml_mouse_thrashing', + 'ml_excessive_scrolling', + 'ml_slow_resources', + 'custom', + 'js_exception' + ); + + CREATE TABLE issues + ( + issue_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + type issue_type NOT NULL, + context_string text NOT NULL, + context jsonb DEFAULT NULL + ); + CREATE INDEX ON issues (issue_id, type); + CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); + CREATE INDEX issues_project_id_idx ON issues (project_id); + +-- --- errors.sql --- + + CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); + CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); + CREATE TABLE errors + ( + error_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + source error_source NOT NULL, + name text DEFAULT NULL, + message text NOT NULL, + payload jsonb NOT NULL, + status error_status NOT NULL DEFAULT 'unresolved', + parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, + stacktrace jsonb, --to save the stacktrace and not query S3 another time + stacktrace_parsed_at timestamp + ); + CREATE INDEX ON errors (project_id, source); + CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); + CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); + CREATE INDEX errors_project_id_idx ON public.errors (project_id); + CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); + CREATE INDEX errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception'; + CREATE INDEX errors_project_id_error_id_idx ON public.errors (project_id, error_id); + CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception'; + + CREATE TABLE user_favorite_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + + CREATE TABLE user_viewed_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); + CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + + +-- --- sessions.sql --- + CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); + CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); + CREATE TYPE platform AS ENUM ('web','ios','android'); + + CREATE TABLE sessions + ( + session_id bigint PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + tracker_version text NOT NULL, + start_ts bigint NOT NULL, + duration integer NULL, + rev_id text DEFAULT NULL, + platform platform NOT NULL DEFAULT 'web', + is_snippet boolean NOT NULL DEFAULT FALSE, + user_id text DEFAULT NULL, + user_anonymous_id text DEFAULT NULL, + user_uuid uuid NOT NULL, + user_agent text DEFAULT NULL, + user_os text NOT NULL, + user_os_version text DEFAULT NULL, + user_browser text DEFAULT NULL, + user_browser_version text DEFAULT NULL, + user_device text NOT NULL, + user_device_type device_type NOT NULL, + user_device_memory_size integer DEFAULT NULL, + user_device_heap_size bigint DEFAULT NULL, + user_country country NOT NULL, + pages_count integer NOT NULL DEFAULT 0, + events_count integer NOT NULL DEFAULT 0, + errors_count integer NOT NULL DEFAULT 0, + watchdogs_score bigint NOT NULL DEFAULT 0, + issue_score bigint NOT NULL DEFAULT 0, + issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL +-- , +-- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL + ); + CREATE INDEX ON sessions (project_id, start_ts); + CREATE INDEX ON sessions (project_id, user_id); + CREATE INDEX ON sessions (project_id, user_anonymous_id); + CREATE INDEX ON sessions (project_id, user_device); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX ON sessions (project_id, metadata_1); + CREATE INDEX ON sessions (project_id, metadata_2); + CREATE INDEX ON sessions (project_id, metadata_3); + CREATE INDEX ON sessions (project_id, metadata_4); + CREATE INDEX ON sessions (project_id, metadata_5); + CREATE INDEX ON sessions (project_id, metadata_6); + CREATE INDEX ON sessions (project_id, metadata_7); + CREATE INDEX ON sessions (project_id, metadata_8); + CREATE INDEX ON sessions (project_id, metadata_9); + CREATE INDEX ON sessions (project_id, metadata_10); +-- CREATE INDEX ON sessions (rehydration_id); + CREATE INDEX ON sessions (project_id, watchdogs_score DESC); + CREATE INDEX platform_idx ON public.sessions (platform); + + CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); + CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); + CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); + CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); + CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); + CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); + CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); + CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); + CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); + CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); + CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); + CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); + CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); + CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); + CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); + CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; + CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; + CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0; + CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; + CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; + CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; + + ALTER TABLE public.sessions + ADD CONSTRAINT web_browser_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_browser ISNULL)); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_agent_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_agent ISNULL)); + + + CREATE TABLE user_viewed_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + CREATE TABLE user_favorite_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + +-- --- assignments.sql --- + + create table assigned_sessions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL + ); + CREATE INDEX ON assigned_sessions (session_id); + +-- --- events_common.sql --- + + CREATE SCHEMA IF NOT EXISTS events_common; + + CREATE TYPE events_common.custom_level AS ENUM ('info','error'); + + CREATE TABLE events_common.customs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + name text NOT NULL, + payload jsonb NOT NULL, + level events_common.custom_level NOT NULL DEFAULT 'info', + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.customs (name); + CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); + CREATE INDEX ON events_common.customs (timestamp); + + + CREATE TABLE events_common.issues + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, + payload jsonb DEFAULT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + + + CREATE TABLE events_common.requests + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + url text NOT NULL, + duration integer NOT NULL, + success boolean NOT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.requests (url); + CREATE INDEX ON events_common.requests (duration); + CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); + CREATE INDEX ON events_common.requests (timestamp); + CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE + WHEN url LIKE 'http://%' + THEN 7 + WHEN url LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + +-- --- events.sql --- + CREATE SCHEMA IF NOT EXISTS events; + + CREATE TABLE events.pages + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + host text NOT NULL, + path text NOT NULL, + base_path text NOT NULL, + referrer text DEFAULT NULL, + base_referrer text DEFAULT NULL, + dom_building_time integer DEFAULT NULL, + dom_content_loaded_time integer DEFAULT NULL, + load_time integer DEFAULT NULL, + first_paint_time integer DEFAULT NULL, + first_contentful_paint_time integer DEFAULT NULL, + speed_index integer DEFAULT NULL, + visually_complete integer DEFAULT NULL, + time_to_interactive integer DEFAULT NULL, + response_time bigint DEFAULT NULL, + response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.pages (session_id); + CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); + CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); + CREATE INDEX ON events.pages (timestamp); + CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); + CREATE INDEX pages_base_path_idx ON events.pages (base_path); + CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); + CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); + CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, + length(base_referrer) - (CASE + WHEN base_referrer LIKE 'http://%' + THEN 7 + WHEN base_referrer LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + CREATE INDEX ON events.pages (response_time); + CREATE INDEX ON events.pages (response_end); + CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); + CREATE INDEX pages_path_idx ON events.pages (path); + CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; + CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; + CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; + CREATE INDEX pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0; + CREATE INDEX pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0; + CREATE INDEX pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0; + CREATE INDEX pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0; + CREATE INDEX pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0; + CREATE INDEX pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL; + CREATE INDEX pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL; + CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR + first_paint_time > 0 OR + dom_content_loaded_time > 0 OR + ttfb > 0 OR + time_to_interactive > 0; + CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; + CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; + CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); + CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + + + CREATE TABLE events.clicks + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.clicks (session_id); + CREATE INDEX ON events.clicks (label); + CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); + CREATE INDEX ON events.clicks (timestamp); + CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); + CREATE INDEX clicks_url_idx ON events.clicks (url); + CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); + CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); + + + CREATE TABLE events.inputs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.inputs (session_id); + CREATE INDEX ON events.inputs (label, value); + CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); + CREATE INDEX inputs_label_idx ON events.inputs (label); + CREATE INDEX ON events.inputs (timestamp); + CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); + + CREATE TABLE events.errors + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.errors (session_id); + CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id); + CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp); + CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id); + CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); + + CREATE TABLE events.graphql + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.graphql (name); + CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.graphql (timestamp); + + CREATE TABLE events.state_actions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.state_actions (name); + CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.state_actions (timestamp); + + CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); + CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); + CREATE TABLE events.resources + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + duration bigint NULL, + type events.resource_type NOT NULL, + url text NOT NULL, + url_host text NOT NULL, + url_hostpath text NOT NULL, + success boolean NOT NULL, + status smallint NULL, + method events.resource_method NULL, + ttfb bigint NULL, + header_size bigint NULL, + encoded_body_size integer NULL, + decoded_body_size integer NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.resources (session_id); + CREATE INDEX ON events.resources (status); + CREATE INDEX ON events.resources (type); + CREATE INDEX ON events.resources (duration) WHERE duration > 0; + CREATE INDEX ON events.resources (url_host); + + CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); + CREATE INDEX resources_url_idx ON events.resources (url); + CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); + CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); + CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL; + CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp); + CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type); + CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch'; + CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE; + CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script'); + CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img'; + CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id); + + CREATE TABLE events.performance + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + message_id bigint NOT NULL, + min_fps smallint NOT NULL, + avg_fps smallint NOT NULL, + max_fps smallint NOT NULL, + min_cpu smallint NOT NULL, + avg_cpu smallint NOT NULL, + max_cpu smallint NOT NULL, + min_total_js_heap_size bigint NOT NULL, + avg_total_js_heap_size bigint NOT NULL, + max_total_js_heap_size bigint NOT NULL, + min_used_js_heap_size bigint NOT NULL, + avg_used_js_heap_size bigint NOT NULL, + max_used_js_heap_size bigint NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + -- --- autocomplete.sql --- -CREATE TABLE autocomplete -( - value text NOT NULL, - type text NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE -); + CREATE TABLE autocomplete + ( + value text NOT NULL, + type text NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE + ); -CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); -CREATE index autocomplete_project_id_idx ON autocomplete (project_id); -CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); -CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); + CREATE index autocomplete_project_id_idx ON autocomplete (project_id); + CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); + CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); -- --- jobs.sql --- -CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); -CREATE TYPE job_action AS ENUM ('delete_user_data'); -CREATE TABLE jobs -( - job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - description text NOT NULL, - status job_status NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - action job_action NOT NULL, - reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, - start_at timestamp NOT NULL, - errors text NULL -); -CREATE INDEX ON jobs (status); -CREATE INDEX ON jobs (start_at); -CREATE INDEX jobs_project_id_idx ON jobs (project_id); + CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); + CREATE TYPE job_action AS ENUM ('delete_user_data'); + CREATE TABLE jobs + ( + job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + description text NOT NULL, + status job_status NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + action job_action NOT NULL, + reference_id text NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + updated_at timestamp default timezone('utc'::text, now()) NULL, + start_at timestamp NOT NULL, + errors text NULL + ); + CREATE INDEX ON jobs (status); + CREATE INDEX ON jobs (start_at); + CREATE INDEX jobs_project_id_idx ON jobs (project_id); -COMMIT; + + raise notice 'DB created'; + END IF; + END; + +$$ +LANGUAGE plpgsql; + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 2e4e993c8..bf41a28c2 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -5,10 +5,13 @@ metadata: namespace: {{ .Release.Namespace }} data: location.list: |- + location ~* /general_stats { + deny all; + } location /healthz { return 200 'OK'; } - location ~ ^/(mobs|sessions-assets|frontend|static|sourcemaps)/ { + location ~ ^/(mobs|sessions-assets|frontend|static|sourcemaps|ios-images)/ { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; @@ -59,6 +62,7 @@ data: proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "Upgrade"; proxy_set_header Host $host; + proxy_set_header X-Forwarded-Proto $scheme; proxy_pass http://chalice-openreplay.app.svc.cluster.local:8000; } location /assist/ { diff --git a/scripts/helm/roles/openreplay/tasks/install-dbs.yaml b/scripts/helm/roles/openreplay/tasks/install-dbs.yaml index 899f0eda2..56448c43b 100644 --- a/scripts/helm/roles/openreplay/tasks/install-dbs.yaml +++ b/scripts/helm/roles/openreplay/tasks/install-dbs.yaml @@ -21,7 +21,7 @@ file="{{ item|basename }}" kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "rm -rf /tmp/$file" kubectl cp -n db $file postgresql-postgresql-0:/tmp/ - kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log + kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -v ON_ERROR_STOP=1 -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log args: chdir: db/init_dbs/postgresql with_fileglob: diff --git a/scripts/helm/roles/openreplay/templates/alerts.yaml b/scripts/helm/roles/openreplay/templates/alerts.yaml index 09f33c8bd..b2a91832b 100644 --- a/scripts/helm/roles/openreplay/templates/alerts.yaml +++ b/scripts/helm/roles/openreplay/templates/alerts.yaml @@ -5,6 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}/{{ postgres_db_name }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/assets.yaml b/scripts/helm/roles/openreplay/templates/assets.yaml index 6383f4f2c..740617166 100644 --- a/scripts/helm/roles/openreplay/templates/assets.yaml +++ b/scripts/helm/roles/openreplay/templates/assets.yaml @@ -6,7 +6,13 @@ image: env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" + S3_BUCKET_ASSETS: "{{ assets_bucket }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_ENDPOINT: "{{ s3_endpoint }}" + AWS_REGION: "{{ aws_region }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/chalice.yaml b/scripts/helm/roles/openreplay/templates/chalice.yaml index 3ab238e72..68c1cf9ff 100644 --- a/scripts/helm/roles/openreplay/templates/chalice.yaml +++ b/scripts/helm/roles/openreplay/templates/chalice.yaml @@ -12,9 +12,34 @@ env: S3_SECRET: "{{ minio_secret_key }}" sourcemaps_bucket_key: "{{ minio_access_key }}" sourcemaps_bucket_secret: "{{ minio_secret_key }}" - S3_HOST: "https://{{ domain_name }}" SITE_URL: "https://{{ domain_name }}" jwt_secret: "{{ jwt_secret_key }}" + pg_host: "{{ postgres_endpoint }}" + pg_port: "{{ postgres_port }}" + pg_dbname: "{{ postgres_db_name }}" + pg_user: "{{ postgres_db_user }}" + pg_password: "{{ postgres_db_password }}" + EMAIL_HOST: "{{ email_host }}" + EMAIL_PORT: "{{ email_port }}" + EMAIL_USER: "{{ email_user }}" + EMAIL_PASSWORD: "{{ email_password }}" + EMAIL_USE_TLS: "{{ email_use_tls }}" + EMAIL_USE_SSL: "{{ email_use_ssl }}" + EMAIL_SSL_KEY: "{{ email_ssl_key }}" + EMAIL_SSL_CERT: "{{ email_ssl_cert }}" + EMAIL_FROM: "{{ email_from }}" + AWS_DEFAULT_REGION: "{{ aws_region }}" + sessions_region: "{{ aws_region }}" + sessions_bucket: "{{ recordings_bucket }}" + sourcemaps_bucket: "{{ sourcemaps_bucket }}" + js_cache_bucket: "{{ assets_bucket }}" + # In case of minio, the instance is running inside kuberntes, + # which is accessible via nginx ingress. +{% if s3_endpoint == "http://minio.db.svc.cluster.local:9000" %} + S3_HOST: "https://{{ domain_name }}" +{% else %} + S3_HOST: "{{ s3_endpoint }}" +{% endif %} {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/db.yaml b/scripts/helm/roles/openreplay/templates/db.yaml index ab8609111..bc128593d 100644 --- a/scripts/helm/roles/openreplay/templates/db.yaml +++ b/scripts/helm/roles/openreplay/templates/db.yaml @@ -5,6 +5,10 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/ender.yaml b/scripts/helm/roles/openreplay/templates/ender.yaml index 5749e4a52..b5d256b2d 100644 --- a/scripts/helm/roles/openreplay/templates/ender.yaml +++ b/scripts/helm/roles/openreplay/templates/ender.yaml @@ -5,6 +5,9 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/http.yaml b/scripts/helm/roles/openreplay/templates/http.yaml index 6383f4f2c..a6f9d86b4 100644 --- a/scripts/helm/roles/openreplay/templates/http.yaml +++ b/scripts/helm/roles/openreplay/templates/http.yaml @@ -7,6 +7,11 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_REGION: "{{ aws_region }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/integrations.yaml b/scripts/helm/roles/openreplay/templates/integrations.yaml index 5749e4a52..f7ea17428 100644 --- a/scripts/helm/roles/openreplay/templates/integrations.yaml +++ b/scripts/helm/roles/openreplay/templates/integrations.yaml @@ -5,6 +5,11 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" + # + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/sink.yaml b/scripts/helm/roles/openreplay/templates/sink.yaml index 5749e4a52..b5d256b2d 100644 --- a/scripts/helm/roles/openreplay/templates/sink.yaml +++ b/scripts/helm/roles/openreplay/templates/sink.yaml @@ -5,6 +5,9 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/storage.yaml b/scripts/helm/roles/openreplay/templates/storage.yaml index 6383f4f2c..7a4e080d5 100644 --- a/scripts/helm/roles/openreplay/templates/storage.yaml +++ b/scripts/helm/roles/openreplay/templates/storage.yaml @@ -7,6 +7,14 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_ENDPOINT: "{{ s3_endpoint }}" + AWS_REGION_WEB: "{{ aws_region }}" + AWS_REGION_IOS: "{{ aws_region }}" + S3_BUCKET_WEB: "{{ recordings_bucket }}" + S3_BUCKET_IOS: "{{ recordings_bucket }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/utilities.yaml b/scripts/helm/roles/openreplay/templates/utilities.yaml index 3ae1efca8..fb7eb0ae0 100644 --- a/scripts/helm/roles/openreplay/templates/utilities.yaml +++ b/scripts/helm/roles/openreplay/templates/utilities.yaml @@ -12,6 +12,7 @@ env: S3_SECRET: "{{ minio_secret_key }}" S3_HOST: "https://{{ domain_name }}" jwt_secret: "{{ jwt_secret_key }}" + AWS_DEFAULT_REGION: "{{ aws_region }}" {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/vars.yaml b/scripts/helm/vars.yaml index 23fcf8035..12e2fcac9 100644 --- a/scripts/helm/vars.yaml +++ b/scripts/helm/vars.yaml @@ -86,3 +86,27 @@ db_resource_override: # memory: 256Mi redis: {} clickhouse: {} + +## Sane defaults +s3_endpoint: "http://minio.db.svc.cluster.local:9000" +aws_region: "us-east-1" +assets_bucket: sessions-assets +recordings_bucket: mobs +sourcemaps_bucket: sourcemaps +kafka_endpoint: kafka.db.svc.cluster.local:9042 +kafka_ssl: false +postgres_endpoint: postgresql.db.svc.cluster.local +postgres_port: 5432 +postgres_db_name: postgres +postgres_db_user: postgres +postgres_db_password: asayerPostgres +redis_endpoint: redis-master.db.svc.cluster.local:6379 +email_host: '' +email_port: '587' +email_user: '' +email_password: '' +email_use_tls: 'true' +email_use_ssl: 'false' +email_ssl_key: '' +email_ssl_cert: '' +email_from: OpenReplay diff --git a/scripts/helm/vars_template.yaml b/scripts/helm/vars_template.yaml index 6b8fa167d..dc63067f1 100644 --- a/scripts/helm/vars_template.yaml +++ b/scripts/helm/vars_template.yaml @@ -86,3 +86,27 @@ db_resource_override: # memory: 256Mi redis: {{ db_resource_override.redis|default({}) }} clickhouse: {{ db_resource_override.clickhouse|default({}) }} + +## Sane defaults +s3_endpoint: "{{ s3_endpoint }}" +aws_region: "{{ aws_region }}" +assets_bucket: "{{ assets_bucket }}" +recordings_bucket: "{{ recordings_bucket }}" +sourcemaps_bucket: "{{ sourcemaps_bucket }}" +kafka_endpoint: "{{ kafka_endpoint }}" +kafka_ssl: "{{ kafka_ssl }}" +postgres_endpoint: "{{ postgres_endpoint }}" +postgres_port: "{{ postgres_port }}" +postgres_db_name: "{{ postgres_db_name }}" +postgres_db_user: "{{ postgres_db_user }}" +postgres_db_password: "{{ postgres_db_password }}" +redis_endpoint: "{{ redis_endpoint }}" +email_host: "{{ email_host }}" +email_port: "{{ email_port }}" +email_user: "{{ email_user }}" +email_password: "{{ email_password }}" +email_use_tls: "{{ email_use_tls }}" +email_use_ssl: "{{ email_use_ssl }}" +email_ssl_key: "{{ email_ssl_key }}" +email_ssl_cert: "{{ email_ssl_cert }}" +email_from: "{{ email_from }}" diff --git a/tracker/tracker-assist/package-lock.json b/tracker/tracker-assist/package-lock.json index e791c4134..cb3354060 100644 --- a/tracker/tracker-assist/package-lock.json +++ b/tracker/tracker-assist/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-assist", - "version": "3.2.0", + "version": "3.4.8", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.4.7", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.7.tgz", - "integrity": "sha512-E9ZwjPwo9WbThV9nAQbK8EKLwJcLBgQG51ND3LB+p21xaz0WcMETIaJDFFmHhhwvkCQ1Vi43gK3cjoOoHF4XFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -811,9 +811,9 @@ } }, "typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index d068dbc6b..345d42ecb 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.7", + "version": "3.4.9", "keywords": [ "WebRTC", "assistance", @@ -24,12 +24,12 @@ "peerjs": "^1.3.2" }, "peerDependencies": { - "@openreplay/tracker": "^3.4.0" + "@openreplay/tracker": "^3.4.8" }, "devDependencies": { - "@openreplay/tracker": "^3.4.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-assist/src/BufferingConnection.ts b/tracker/tracker-assist/src/BufferingConnection.ts new file mode 100644 index 000000000..e90970c21 --- /dev/null +++ b/tracker/tracker-assist/src/BufferingConnection.ts @@ -0,0 +1,45 @@ +import type { DataConnection } from 'peerjs'; + +// TODO: proper Message type export from tracker in 3.5.0 +interface Message { + encode(w: any): boolean; +} + +// 16kb should be max according to specification +const crOrFf: boolean = + typeof navigator !== "undefined" && + (navigator.userAgent.indexOf("Chrom") !== -1 || // Chrome && Chromium + navigator.userAgent.indexOf("Firefox") !== -1); + +const MESSAGES_PER_SEND = crOrFf ? 500 : 100 + +// Bffering required in case of webRTC +export default class BufferingConnection { + private readonly buffer: Message[][] = [] + private buffering: boolean = false + + constructor(readonly conn: DataConnection, + private readonly msgsPerSend: number = MESSAGES_PER_SEND){} + private sendNext() { + if (this.buffer.length) { + setTimeout(() => { + this.conn.send(this.buffer.shift()) + this.sendNext() + }, 15) + } else { + this.buffering = false + } + } + + send(messages: Message[]) { + if (!this.conn.open) { return; } + let i = 0; + while (i < messages.length) { + this.buffer.push(messages.slice(i, i+=this.msgsPerSend)) + } + if (!this.buffering) { + this.buffering = true + this.sendNext(); + } + } +} \ No newline at end of file diff --git a/tracker/tracker-assist/src/CallWindow.ts b/tracker/tracker-assist/src/CallWindow.ts index 3356449ce..f22aa6a7a 100644 --- a/tracker/tracker-assist/src/CallWindow.ts +++ b/tracker/tracker-assist/src/CallWindow.ts @@ -129,7 +129,7 @@ export default class CallWindow { }) } - private aRemote: HTMLAudioElement | null = null; + private aRemote: HTMLAudioElement | null = null; private checkRemoteVideoInterval: ReturnType setRemoteStream(rStream: MediaStream) { this.load.then(() => { @@ -149,14 +149,14 @@ export default class CallWindow { // Hack to determine if the remote video is enabled if (this.checkRemoteVideoInterval) { clearInterval(this.checkRemoteVideoInterval) } // just in case - let enable = false + let enabled = false this.checkRemoteVideoInterval = setInterval(() => { const settings = rStream.getVideoTracks()[0]?.getSettings() //console.log(settings) const isDummyVideoTrack = !!settings && (settings.width === 2 || settings.frameRate === 0) - const shouldEnable = !isDummyVideoTrack - if (enable !== shouldEnable) { - this.toggleRemoteVideoUI(enable=shouldEnable) + const shouldBeEnabled = !isDummyVideoTrack + if (enabled !== shouldBeEnabled) { + this.toggleRemoteVideoUI(enabled=shouldBeEnabled) } }, 1000) }) @@ -178,7 +178,7 @@ export default class CallWindow { private localStream: LocalStream | null = null; // TODO: on construction? - setLocalStream(lStream: LocalStream) { + setLocalStream(lStream: LocalStream) { this.localStream = lStream } @@ -207,7 +207,7 @@ export default class CallWindow { private toggleAudio() { const enabled = this.localStream?.toggleAudio() || false this.toggleAudioUI(enabled) - // if (!this.audioBtn) { return; } + // if (!this.audioBtn) { return; } // if (enabled) { // this.audioBtn.classList.remove("muted"); // this.audioBtn.childNodes[1].textContent = "Mute"; @@ -218,7 +218,7 @@ export default class CallWindow { } private toggleVideoUI(enabled: boolean) { - if (!this.videoBtn || !this.videoContainer) { return; } + if (!this.videoBtn || !this.videoContainer) { return; } if (enabled) { this.videoContainer.classList.add("local") this.videoBtn.classList.remove("off"); @@ -239,7 +239,7 @@ export default class CallWindow { this.vLocal.srcObject = this.localStream.stream } }) - }) + }) } remove() { @@ -247,10 +247,10 @@ export default class CallWindow { clearInterval(this.tsInterval) clearInterval(this.checkRemoteVideoInterval) if (this.iframe.parentElement) { - document.body.removeChild(this.iframe) + document.body.removeChild(this.iframe) } if (this.aRemote && this.aRemote.parentElement) { - document.body.removeChild(this.aRemote) + document.body.removeChild(this.aRemote) } sessionStorage.removeItem(SS_START_TS_KEY) } diff --git a/tracker/tracker-assist/src/Mouse.ts b/tracker/tracker-assist/src/Mouse.ts index 03558ce1c..51fb67e8e 100644 --- a/tracker/tracker-assist/src/Mouse.ts +++ b/tracker/tracker-assist/src/Mouse.ts @@ -1,8 +1,7 @@ - - export default class Mouse { private mouse: HTMLDivElement + private position: [number,number] = [0,0] constructor() { this.mouse = document.createElement('div'); Object.assign(this.mouse.style, { @@ -17,13 +16,18 @@ export default class Mouse { document.body.appendChild(this.mouse); } - move({x, y}: {x?: number, y?: number}) { + move({x, y}: {x: number, y: number}) { + this.position = [x, y]; Object.assign(this.mouse.style, { left: `${x || 0}px`, top: `${y || 0}px` }) } + getPosition(): [ number, number] { + return this.position; + } + remove() { if (this.mouse.parentElement) { document.body.removeChild(this.mouse); diff --git a/tracker/tracker-assist/src/_slim.ts b/tracker/tracker-assist/src/_slim.ts index 72c52dbd3..ce86863be 100644 --- a/tracker/tracker-assist/src/_slim.ts +++ b/tracker/tracker-assist/src/_slim.ts @@ -5,4 +5,4 @@ */ // @ts-ignore -window.parcelRequire = window.parcelRequire || undefined; +typeof window !== "undefined" && (window.parcelRequire = window.parcelRequire || undefined); diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index b7fadeb81..c1e452a34 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -1,19 +1,21 @@ -import './_slim'; -import Peer, { MediaConnection } from 'peerjs'; +import './_slim.js'; +import Peer from 'peerjs'; import type { DataConnection } from 'peerjs'; import { App, Messages } from '@openreplay/tracker'; import type Message from '@openreplay/tracker'; -import Mouse from './Mouse'; -import CallWindow from './CallWindow'; -import ConfirmWindow from './ConfirmWindow'; -import RequestLocalStream from './LocalStream'; +import BufferingConnection from './BufferingConnection.js'; +import Mouse from './Mouse.js'; +import CallWindow from './CallWindow.js'; +import ConfirmWindow from './ConfirmWindow.js'; +import RequestLocalStream from './LocalStream.js'; export interface Options { confirmText: string, confirmStyle: Object, // Styles object session_calling_peer_key: string, - config: Object + config: RTCConfiguration, + __messages_per_send?: number, } enum CallingState { @@ -22,17 +24,26 @@ enum CallingState { False, }; -export default function(opts: Partial = {}) { +//@ts-ignore peerjs hack for webpack5 (?!) TODO: ES/node modules; +Peer = Peer.default || Peer; + +// type IncomeMessages = +// "call_end" | +// { type: "agent_name", name: string } | +// { type: "click", x: number, y: number } | +// { x: number, y: number } + +export default function(opts?: Partial) { const options: Options = Object.assign( { - confirmText: "You have a call. Do you want to answer?", + confirmText: "You have an incoming call. Do you want to answer?", confirmStyle: {}, session_calling_peer_key: "__openreplay_calling_peer", - config: null + config: null, }, opts, ); - return function(app: App | null, appOptions: { __DISABLE_SECURE_MODE?: boolean } = {}) { + return function(app: App | null, appOptions: { __debug_log?: boolean, __DISABLE_SECURE_MODE?: boolean } = {}) { // @ts-ignore if (app === null || !navigator?.mediaDevices?.getUserMedia) { // 93.04% browsers return; @@ -48,6 +59,7 @@ export default function(opts: Partial = {}) { let assistDemandedRestart = false let peer : Peer | null = null + // This is required because internal peerjs connection list is not stable. https://peerjs.com/docs.html#peerconnections const openDataConnections: Record = {} app.addCommitCallback(function(messages) { @@ -56,61 +68,42 @@ export default function(opts: Partial = {}) { app.attachStopCallback(function() { if (assistDemandedRestart) { return; } - peer && peer.destroy(); + if (peer) { + peer.destroy(); + log('Peer destroyed!') + } }); app.attachStartCallback(function() { if (assistDemandedRestart) { return; } - const peerID = `${app.projectKey}-${app.getSessionID()}` + const peerID = `${app.getProjectKey()}-${app.getSessionID()}` const _opt = { // @ts-ignore host: app.getHost(), path: '/assist', port: location.protocol === 'http:' && appOptions.__DISABLE_SECURE_MODE ? 80 : 443, + //debug: // 0 Print nothing //1 Prints only errors. / 2 Prints errors and warnings. / 3 Prints all logs. } if (options.config) { _opt['config'] = options.config } peer = new Peer(peerID, _opt); - console.log('OpenReplay tracker-assist peerID:', peerID) - peer.on('error', e => console.log("OpenReplay tracker-assist peer error: ", e.type, e)) - peer.on('connection', function(conn) { + log('Peer created: ', peer) + peer.on('error', e => warn("Peer error: ", e.type, e)) + peer.on('connection', function(conn) { window.addEventListener("beforeunload", () => conn.open && conn.send("unload")); + log('Connecting...') - console.log('OpenReplay tracker-assist: Connecting...') conn.on('open', function() { - - console.log('OpenReplay tracker-assist: connection opened.') - - // TODO: onClose - const buffer: Message[][] = []; - let buffering = false; - function sendNext() { - if (buffer.length) { - setTimeout(() => { - conn.send(buffer.shift()); - sendNext(); - }, 50); - } else { - buffering = false; - } - } - + log('Connection opened.') assistDemandedRestart = true; app.stop(); - //@ts-ignore (should update tracker dependency) - app.addCommitCallback((messages: Array): void => { - if (!conn.open) { return; } // TODO: clear commit callbacks on connection close - let i = 0; - while (i < messages.length) { - buffer.push(messages.slice(i, i+=1000)); - } - if (!buffering) { - buffering = true; - sendNext(); - } - }); - app.start().then(() => { assistDemandedRestart = false; }); + openDataConnections[conn.peer] = new BufferingConnection(conn, options.__messages_per_send) + conn.on('close', () => { + log("Connection close: ", conn.peer) + delete openDataConnections[conn.peer] // TODO: check if works properly + }) + app.start().then(() => { assistDemandedRestart = false }) }); }); @@ -118,10 +111,11 @@ export default function(opts: Partial = {}) { let callingState: CallingState = CallingState.False; peer.on('call', function(call) { + log("Call: ", call) if (!peer) { return; } - const dataConn: DataConnection | undefined = peer - .connections[call.peer].find(c => c.type === 'data'); - if (callingState !== CallingState.False || !dataConn) { + const dataConn: DataConnection | undefined = + openDataConnections[call.peer]?.conn; + if (callingState !== CallingState.False || !dataConn || !dataConn.open) { call.close(); warn("Call closed instantly: ", callingState, dataConn, dataConn.open) return; @@ -134,7 +128,7 @@ export default function(opts: Partial = {}) { sessionStorage.removeItem(options.session_calling_peer_key); } callingState = newState; - } + } const notifyCallEnd = () => { dataConn.open && dataConn.send("call_end"); @@ -208,26 +202,51 @@ export default function(opts: Partial = {}) { document.addEventListener("click", onInteraction) }); dataConn.on('data', (data: any) => { + if (!data) { return } if (data === "call_end") { - //console.log('receiving callend on call') + log('"call_end" received') onCallEnd(); return; } - // if (data && typeof data.video === 'boolean') { - // log('Recieved video toggle signal: ', data.video) - // callUI.toggleRemoteVideo(data.video) - // } - if (data && typeof data.name === 'string') { - //console.log("name",data) + if (data.name === 'string') { + log("Name received: ", data) callUI.setAssistentName(data.name); } - if (data && typeof data.x === 'number' && typeof data.y === 'number') { + if (data.type === "scroll" && Array.isArray(data.delta)) { + const scrEl = document.scrollingElement || document.documentElement + const [mouseX, mouseY] = mouse.getPosition() + const [dX, dY] = data.delta; + const el = document.elementFromPoint(mouseX-scrEl.scrollLeft, mouseY-scrEl.scrollTop) + let scrolled = false // what would be the browser-like logic? + if (el) { + if(el.scrollWidth > el.clientWidth) { + el.scrollLeft += data.delta[0] + scrolled = true + } + if (el && el.scrollHeight > el.clientHeight) { + el.scrollTop += data.delta[1] + scrolled = true + } + } + if (!scrolled) { + window.scroll(scrEl.scrollLeft + data.delta[0], scrEl.scrollTop + data.delta[1]) + } + } + if (data.type === "click" && typeof data.x === 'number' && typeof data.y === 'number') { + const el = document.elementFromPoint(data.x, data.y) + if (el instanceof HTMLElement) { + el.click() + el.focus() + } + return + } + if (typeof data.x === 'number' && typeof data.y === 'number') { mouse.move(data); } }); lStream.onVideoTrack(vTrack => { - const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") + const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") if (!sender) { warn("No video sender found") return diff --git a/tracker/tracker-assist/tsconfig.json b/tracker/tracker-assist/tsconfig.json index bb8f6a4c4..95d4f9408 100644 --- a/tracker/tracker-assist/tsconfig.json +++ b/tracker/tracker-assist/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "allowSyntheticDefaultImports": true, "declaration": true, "outDir": "./lib" diff --git a/tracker/tracker-axios/README.md b/tracker/tracker-axios/README.md index a092c4032..068fe3190 100644 --- a/tracker/tracker-axios/README.md +++ b/tracker/tracker-axios/README.md @@ -21,22 +21,25 @@ const tracker = new Tracker({ }); tracker.start(); -tracker.use(trackerAxios()); +tracker.use(trackerAxios({ /* options here*/ })); ``` Options: ```ts { instance: AxiosInstance; // default: axios - failuresOnly: boolean; // default: true + failuresOnly: boolean; // default: false captureWhen: (AxiosRequestConfig) => boolean; // default: () => true sessionTokenHeader: string; // default: undefined + ignoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] } ``` By default plugin connects to the static `axios` instance, but you can specify one with the `instance` option. -Set `failuresOnly` option to `false` if you want to record every single request regardless of the status code. By default only failed requests are captured, when the axios' promise is rejected. You can also [regulate](https://github.com/axios/axios#request-config) this axios behaviour with the `validateStatus` option. +Set `failuresOnly` option to `true` if you want to record only failed requests, when the axios' promise is rejected. You can also [regulate](https://github.com/axios/axios#request-config) axios failing behaviour with the `validateStatus` option. `captureWhen` parameter allows you to set a filter on what should be captured. The function will be called with the axios config object and expected to return `true` or `false`. In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each axios request and will contain OpenReplay session identificator value. + +You can define list of headers that you don't want to capture with the `ignoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. diff --git a/tracker/tracker-axios/package-lock.json b/tracker/tracker-axios/package-lock.json index 49bb3588d..9894d6bc8 100644 --- a/tracker/tracker-axios/package-lock.json +++ b/tracker/tracker-axios/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-axios", - "version": "3.0.1", + "version": "3.4.1", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.3.tgz", - "integrity": "sha512-50C2cwJFENeHNjXVV90uIA5YE1bxfGbhI8e76Nfw9Pg+GVN38DcvGhr3PJ3OKjioT9V4gXBbvtE/RDGRaJJWLA==", + "version": "3.4.9", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.9.tgz", + "integrity": "sha512-7w1ddAboWu6NN926ySMUsKG6kmlYM0BYelSRIPM1xdoddLMRKZT4XaggLYjFezNSi9UJ9WYI8qwMHFIkS9lhCQ==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -782,9 +782,9 @@ } }, "typescript": { - "version": "3.9.9", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", - "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-axios/package.json b/tracker/tracker-axios/package.json index 9f1bce79b..06d2b41a0 100644 --- a/tracker/tracker-axios/package.json +++ b/tracker/tracker-axios/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-axios", "description": "Tracker plugin for axios requests recording", - "version": "3.0.1", + "version": "3.4.2", "keywords": [ "axios", "logging", @@ -20,14 +20,14 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "axios": "^0.21.2" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.9", "axios": "^0.21.2", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-axios/src/index.ts b/tracker/tracker-axios/src/index.ts index 3c28260ca..f9d53e025 100644 --- a/tracker/tracker-axios/src/index.ts +++ b/tracker/tracker-axios/src/index.ts @@ -1,24 +1,31 @@ -import { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'; +import type { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'; import axios from 'axios'; import { App, Messages } from '@openreplay/tracker'; -import { getExceptionMessage } from '@openreplay/tracker/lib/modules/exception'; // TODO: export from tracker root -import { buildFullPath } from './url'; +import { getExceptionMessage } from '@openreplay/tracker/lib/modules/exception.js'; // TODO: export from tracker root +import { buildFullPath } from './url.js'; export interface Options { sessionTokenHeader?: string; instance: AxiosInstance; failuresOnly: boolean; captureWhen: (AxiosRequestConfig) => boolean; - //ingoreHeaders: Array | boolean; + ignoreHeaders: Array | boolean; +} + + +function isAxiosResponse(r: any): r is AxiosResponse { + return typeof r === "object" && + typeof r.config === "object" && + typeof r.status === "number" } export default function(opts: Partial = {}) { const options: Options = Object.assign( { instance: axios, - failuresOnly: true, + failuresOnly: false, captureWhen: () => true, - //ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], + ignoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -27,48 +34,80 @@ export default function(opts: Partial = {}) { return; } - const sendFetchMessage = (response: AxiosResponse) => { + const ihOpt = options.ignoreHeaders + const isHIgnoring = Array.isArray(ihOpt) + ? name => ihOpt.includes(name) + : () => ihOpt + + const sendFetchMessage = (res: AxiosResponse) => { // @ts-ignore - const startTime: number = response.config.__openreplayStartTs; + const startTime: number = res.config.__openreplayStartTs; const duration = performance.now() - startTime; if (typeof startTime !== 'number') { return; } - let requestData: string = ''; - if (typeof response.config.data === 'string') { - requestData = response.config.data; + let reqBody: string = ''; + if (typeof res.config.data === 'string') { + reqBody = res.config.data; } else { try { - requestData = JSON.stringify(response.config.data) || ''; - } catch (e) {} + reqBody = JSON.stringify(res.config.data) || ''; + } catch (e) {} // TODO: app debug } - let responseData: string = ''; - if (typeof response.data === 'string') { - responseData = response.data; + let resBody: string = ''; + if (typeof res.data === 'string') { + resBody = res.data; } else { try { - responseData = JSON.stringify(response.data) || ''; + resBody = JSON.stringify(res.data) || ''; } catch (e) {} } + const reqHs: Record = {} + const resHs: Record = {} + // TODO: type safe axios headers + if (ihOpt !== true) { + function writeReqHeader([n, v]: [string, string]) { + if (!isHIgnoring(n)) { reqHs[n] = v } + } + if (res.config.headers instanceof Headers) { + res.config.headers.forEach((v, n) => writeReqHeader([n, v])) + } else if (Array.isArray(res.config.headers)) { + res.config.headers.forEach(writeReqHeader); + } else if (typeof res.config.headers === 'object') { + Object.entries(res.config.headers as Record).forEach(writeReqHeader) + } + + // TODO: type safe axios headers + if (typeof res.headers === 'object') { + Object.entries(res.headers as Record).forEach(([v, n]) => { if (!isHIgnoring(n)) resHs[n] = v }) + } + } + // Why can't axios propogate the final request URL somewhere? - const fullURL = buildFullPath(response.config.baseURL, options.instance.getUri(response.config)); + const fullURL = buildFullPath(res.config.baseURL, options.instance.getUri(res.config)); app.send( Messages.Fetch( - typeof response.config.method === 'string' ? response.config.method.toUpperCase() : 'GET', + typeof res.config.method === 'string' ? res.config.method.toUpperCase() : 'GET', fullURL, - requestData, - responseData, - response.status, + JSON.stringify({ + headers: reqHs, + body: reqBody, + }), + JSON.stringify({ + headers: resHs, + body: resBody, + }), + res.status, startTime + performance.timing.navigationStart, duration, ), ); } - + // TODO: why app.safe doesn't work here? options.instance.interceptors.request.use(function (config) { if (options.sessionTokenHeader) { const sessionToken = app.getSessionToken(); @@ -80,7 +119,7 @@ export default function(opts: Partial = {}) { config.headers.append(options.sessionTokenHeader, sessionToken); } else if (Array.isArray(config.headers)) { config.headers.push([options.sessionTokenHeader, sessionToken]); - } else { + } else if (typeof config.headers === 'object') { config.headers[options.sessionTokenHeader] = sessionToken; } } @@ -113,6 +152,11 @@ export default function(opts: Partial = {}) { app.send(getExceptionMessage(error, [])); } + // TODO: common case (selector) + if (isAxiosResponse(error)) { + sendFetchMessage(error) + } + return Promise.reject(error); }); } diff --git a/tracker/tracker-axios/tsconfig.json b/tracker/tracker-axios/tsconfig.json index ce07a685b..dd1ee258f 100644 --- a/tracker/tracker-axios/tsconfig.json +++ b/tracker/tracker-axios/tsconfig.json @@ -7,6 +7,7 @@ "module": "es6", "moduleResolution": "node", "declaration": true, - "outDir": "./lib" + "outDir": "./lib", + "lib": ["es6", "dom", "es2017"] // is all necessary? } } diff --git a/tracker/tracker-fetch/README.md b/tracker/tracker-fetch/README.md index d72201e35..b7fca2e4b 100644 --- a/tracker/tracker-fetch/README.md +++ b/tracker/tracker-fetch/README.md @@ -1,7 +1,7 @@ # OpenReplay Tracker Fetch plugin Tracker plugin to support tracking of the `fetch` requests payload. -Additionally it populates the requests with `sessionID` header for backend logging. +Additionally it populates the requests with `sessionToken` header for backend logging. ## Installation @@ -23,13 +23,24 @@ const tracker = new Tracker({ }); tracker.start(); -export const fetch = tracker.use(trackerFetch({ - sessionTokenHeader: 'X-Session-ID', // optional - failuresOnly: true //optional -})); +export const fetch = tracker.use(trackerFetch({ /* options here*/ })); fetch('https://my.api.io/resource').then(response => response.json()).then(body => console.log(body)); ``` -In case you use OpenReplay integrations (sentry, bugsnag or others), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each fetch request and will contain OpenReplay session identificator value. -Set `failuresOnly` option to `true` if you want to record only requests with the status code >= 400. \ No newline at end of file +Options: +```ts +{ + failuresOnly: boolean, // default false + sessionTokenHeader: string | undefined, // default undefined + ignoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] +} + +``` + +Set `failuresOnly` option to `true` if you want to record only requests with the status code >= 400. + +In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each fetch request and will contain OpenReplay session identificator value. + +You can define list of headers that you don't want to capture with the `ignoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. + diff --git a/tracker/tracker-fetch/package-lock.json b/tracker/tracker-fetch/package-lock.json index b8d27b87e..16afd2f75 100644 --- a/tracker/tracker-fetch/package-lock.json +++ b/tracker/tracker-fetch/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-fetch", - "version": "3.0.0", + "version": "3.4.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/tracker/tracker-fetch/package.json b/tracker/tracker-fetch/package.json index 237a6e326..1d650bf6e 100644 --- a/tracker/tracker-fetch/package.json +++ b/tracker/tracker-fetch/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-fetch", "description": "Tracker plugin for fetch requests recording ", - "version": "3.0.0", + "version": "3.4.1", "keywords": [ "fetch", "logging", diff --git a/tracker/tracker-fetch/src/index.ts b/tracker/tracker-fetch/src/index.ts index 3bfa30034..fbce7ac31 100644 --- a/tracker/tracker-fetch/src/index.ts +++ b/tracker/tracker-fetch/src/index.ts @@ -2,13 +2,17 @@ import { App, Messages } from '@openreplay/tracker'; export interface Options { sessionTokenHeader?: string; - failuresOnly?: boolean; + replaceDefault: boolean; // overrideDefault ? + failuresOnly: boolean; + ignoreHeaders: Array | boolean; } export default function(opts: Partial = {}) { const options: Options = Object.assign( { + replaceDefault: false, failuresOnly: false, + ignoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -18,7 +22,12 @@ export default function(opts: Partial = {}) { return window.fetch; } - return async (input: RequestInfo, init: RequestInit = {}) => { + const ihOpt = options.ignoreHeaders + const isHIgnoring = Array.isArray(ihOpt) + ? name => ihOpt.includes(name) + : () => ihOpt + + const fetch = async (input: RequestInfo, init: RequestInit = {}) => { if (typeof input !== 'string') { return window.fetch(input, init); } @@ -44,20 +53,50 @@ export default function(opts: Partial = {}) { return response } const r = response.clone(); - r.text().then(text => + + r.text().then(text => { + const reqHs: Record = {} + const resHs: Record = {} + if (ihOpt !== true) { + function writeReqHeader([n, v]) { + if (!isHIgnoring(n)) { reqHs[n] = v } + } + if (init.headers instanceof Headers) { + init.headers.forEach((v, n) => writeReqHeader([n, v])) + } else if (Array.isArray(init.headers)) { + init.headers.forEach(writeReqHeader); + } else if (typeof init.headers === 'object') { + Object.entries(init.headers).forEach(writeReqHeader) + } + + r.headers.forEach((v, n) => { if (!isHIgnoring(n)) resHs[n] = v }) + } + const req = JSON.stringify({ + headers: reqHs, + body: typeof init.body === 'string' ? init.body : '', + }) + const res = JSON.stringify({ + headers: resHs, + body: text, + }) app.send( Messages.Fetch( - typeof init.method === 'string' ? init.method : 'GET', + typeof init.method === 'string' ? init.method.toUpperCase() : 'GET', input, - typeof init.body === 'string' ? init.body : '', - text, + req, + res, r.status, startTime + performance.timing.navigationStart, duration, ), - ), - ); + ) + }); return response; }; + if (options.replaceDefault) { + window.fetch = fetch + } + return fetch; }; + } diff --git a/tracker/tracker-fetch/tsconfig.json b/tracker/tracker-fetch/tsconfig.json index ce07a685b..258c2f510 100644 --- a/tracker/tracker-fetch/tsconfig.json +++ b/tracker/tracker-fetch/tsconfig.json @@ -7,6 +7,7 @@ "module": "es6", "moduleResolution": "node", "declaration": true, - "outDir": "./lib" + "outDir": "./lib", + "lib": ["es6", "dom", "es2017"] } } diff --git a/tracker/tracker-ngrx/package-lock.json b/tracker/tracker-ngrx/package-lock.json index 7870facb0..60cfabefe 100644 --- a/tracker/tracker-ngrx/package-lock.json +++ b/tracker/tracker-ngrx/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-ngrx/package.json b/tracker/tracker-ngrx/package.json index d79dce81b..614c96f18 100644 --- a/tracker/tracker-ngrx/package.json +++ b/tracker/tracker-ngrx/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-ngrx", "description": "Tracker plugin for NgRx state recording", - "version": "3.0.0", + "version": "3.4.8", "keywords": [ "ngrx", "logging", @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^5.1.0", + "@openreplay/tracker": "^3.4.8", "@ngrx/store": ">=4" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-ngrx/src/index.ts b/tracker/tracker-ngrx/src/index.ts index 508cf24b0..e967bacc7 100644 --- a/tracker/tracker-ngrx/src/index.ts +++ b/tracker/tracker-ngrx/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from './syncod'; +import { Encoder, sha1 } from './syncod/index.js'; export interface Options { actionFilter: (action: any) => boolean; diff --git a/tracker/tracker-ngrx/tsconfig.json b/tracker/tracker-ngrx/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-ngrx/tsconfig.json +++ b/tracker/tracker-ngrx/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } diff --git a/tracker/tracker-redux/package-lock.json b/tracker/tracker-redux/package-lock.json index 4d070f717..3cb97282f 100644 --- a/tracker/tracker-redux/package-lock.json +++ b/tracker/tracker-redux/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-redux/package.json b/tracker/tracker-redux/package.json index 62c806e2f..87a365754 100644 --- a/tracker/tracker-redux/package.json +++ b/tracker/tracker-redux/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-redux", "description": "Tracker plugin for Redux state recording", - "version": "3.0.0", + "version": "3.4.8", "keywords": [ "redux", "logging", @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "redux": "^4.0.0" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-redux/src/index.ts b/tracker/tracker-redux/src/index.ts index a3b5fee3a..5a4749e71 100644 --- a/tracker/tracker-redux/src/index.ts +++ b/tracker/tracker-redux/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from './syncod'; +import { Encoder, sha1 } from './syncod/index.js'; export interface Options { actionFilter: (action: any) => boolean; diff --git a/tracker/tracker-redux/tsconfig.json b/tracker/tracker-redux/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-redux/tsconfig.json +++ b/tracker/tracker-redux/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } diff --git a/tracker/tracker-vuex/package-lock.json b/tracker/tracker-vuex/package-lock.json index 04e6eb201..35fd251fe 100644 --- a/tracker/tracker-vuex/package-lock.json +++ b/tracker/tracker-vuex/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", - "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-vuex/package.json b/tracker/tracker-vuex/package.json index a9f09f100..005a9393c 100644 --- a/tracker/tracker-vuex/package.json +++ b/tracker/tracker-vuex/package.json @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "@ngrx/store": ">=4" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-vuex/src/index.ts b/tracker/tracker-vuex/src/index.ts index 20b1e2b83..30333904e 100644 --- a/tracker/tracker-vuex/src/index.ts +++ b/tracker/tracker-vuex/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from "./syncod"; +import { Encoder, sha1 } from "./syncod/index.js"; export interface Options { filter: (mutation: any, state: any) => boolean; diff --git a/tracker/tracker-vuex/tsconfig.json b/tracker/tracker-vuex/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-vuex/tsconfig.json +++ b/tracker/tracker-vuex/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } diff --git a/tracker/tracker/package-lock.json b/tracker/tracker/package-lock.json index 6eba67e3c..287203b30 100644 --- a/tracker/tracker/package-lock.json +++ b/tracker/tracker/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker", - "version": "3.3.0", + "version": "3.4.7", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -293,11 +293,6 @@ "to-fast-properties": "^2.0.0" } }, - "@medv/finder": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@medv/finder/-/finder-2.0.0.tgz", - "integrity": "sha512-gV4jOsGpiWNDGd8Dw7tod1Fc9Gc7StaOT4oZ/6srHRWtsHU+HYWzmkYsa3Qy/z0e9tY1WpJ9wWdBFGskfbzoug==" - }, "@nodelib/fs.scandir": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz", @@ -480,9 +475,9 @@ } }, "acorn": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", - "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true }, "acorn-jsx": { @@ -504,18 +499,26 @@ } }, "ansi-escapes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", - "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "requires": { - "type-fest": "^0.8.1" + "type-fest": "^0.21.3" + }, + "dependencies": { + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + } } }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true }, "ansi-styles": { @@ -615,9 +618,9 @@ } }, "cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", "dev": true }, "co": { @@ -1089,9 +1092,9 @@ } }, "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" @@ -1184,24 +1187,84 @@ "dev": true }, "inquirer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", - "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", + "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", "dev": true, "requires": { "ansi-escapes": "^4.2.1", - "chalk": "^2.4.2", + "chalk": "^4.1.0", "cli-cursor": "^3.1.0", - "cli-width": "^2.0.0", + "cli-width": "^3.0.0", "external-editor": "^3.0.3", "figures": "^3.0.0", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "mute-stream": "0.0.8", - "run-async": "^2.2.0", - "rxjs": "^6.5.3", + "run-async": "^2.4.0", + "rxjs": "^6.6.0", "string-width": "^4.1.0", - "strip-ansi": "^5.1.0", + "strip-ansi": "^6.0.0", "through": "^2.3.6" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "is-extglob": { @@ -1231,12 +1294,6 @@ "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=", "dev": true }, - "is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", - "dev": true - }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -1343,9 +1400,9 @@ } }, "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "dev": true }, "merge-stream": { @@ -1376,18 +1433,18 @@ } }, "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true }, "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "dev": true, "requires": { - "minimist": "0.0.8" + "minimist": "^1.2.5" } }, "ms": { @@ -1424,9 +1481,9 @@ } }, "onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, "requires": { "mimic-fn": "^2.1.0" @@ -1474,9 +1531,9 @@ "dev": true }, "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, "picomatch": { @@ -1617,13 +1674,10 @@ } }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", - "dev": true, - "requires": { - "is-promise": "^2.1.0" - } + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true }, "run-parallel": { "version": "1.1.9", @@ -1632,9 +1686,9 @@ "dev": true }, "rxjs": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", - "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", + "version": "6.6.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", + "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", "dev": true, "requires": { "tslib": "^1.9.0" @@ -1683,9 +1737,9 @@ "dev": true }, "signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", "dev": true }, "slash": { @@ -1749,23 +1803,23 @@ "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==" }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "strip-ansi": "^6.0.1" }, "dependencies": { "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } } } @@ -1922,9 +1976,9 @@ "dev": true }, "typescript": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.4.tgz", - "integrity": "sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "uri-js": { diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index d88d366da..e128ee0e0 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.0", + "version": "3.4.9", "keywords": [ "logging", "replay" @@ -35,10 +35,9 @@ "rollup": "^2.17.0", "rollup-plugin-terser": "^6.1.0", "semver": "^6.3.0", - "typescript": "^4.3.4" + "typescript": "^4.6.0-dev.20211126" }, "dependencies": { - "@medv/finder": "^2.0.0", "error-stack-parser": "^2.0.6" }, "engines": { diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 5efea1cf9..54fe9050f 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -1,17 +1,17 @@ -import { timestamp, log, warn } from '../utils'; -import { Timestamp, TechnicalInfo, PageClose } from '../../messages'; -import Message from '../../messages/message'; -import Nodes from './nodes'; -import Observer from './observer'; -import Ticker from './ticker'; +import { timestamp, log, warn } from "../utils.js"; +import { Timestamp, PageClose } from "../../messages/index.js"; +import Message from "../../messages/message.js"; +import Nodes from "./nodes.js"; +import Observer from "./observer.js"; +import Ticker from "./ticker.js"; -import { deviceMemory, jsHeapSizeLimit } from '../modules/performance'; +import { deviceMemory, jsHeapSizeLimit } from "../modules/performance.js"; -import type { Options as ObserverOptions } from './observer'; +import type { Options as ObserverOptions } from "./observer.js"; -import type { Options as WebworkerOptions, WorkerMessageData } from '../../messages/webworker'; +import type { Options as WebworkerOptions, WorkerMessageData } from "../../messages/webworker.js"; -interface OnStartInfo { +export interface OnStartInfo { sessionID: string, sessionToken: string, userUUID: string, @@ -24,10 +24,11 @@ export type Options = { session_pageno_key: string; local_uuid_key: string; ingestPoint: string; - resourceBaseHref: string, // resourceHref? + resourceBaseHref: string | null, // resourceHref? //resourceURLRewriter: (url: string) => string | boolean, __is_snippet: boolean; __debug_report_edp: string | null; + __debug_log: boolean; onStart?: (info: OnStartInfo) => void; } & ObserverOptions & WebworkerOptions; @@ -43,7 +44,7 @@ export default class App { readonly ticker: Ticker; readonly projectKey: string; private readonly messages: Array = []; - private readonly observer: Observer; + /*private*/ readonly observer: Observer; // temp, for fast security fix. TODO: separate security/obscure module with nodeCallback that incapsulates `textMasked` functionality from Observer private readonly startCallbacks: Array = []; private readonly stopCallbacks: Array = []; private readonly commitCallbacks: Array = []; @@ -67,9 +68,10 @@ export default class App { session_pageno_key: '__openreplay_pageno', local_uuid_key: '__openreplay_uuid', ingestPoint: DEFAULT_INGEST_POINT, - resourceBaseHref: '', + resourceBaseHref: null, __is_snippet: false, __debug_report_edp: null, + __debug_log: false, obscureTextEmails: true, obscureTextNumbers: false, captureIFrames: false, @@ -90,10 +92,9 @@ export default class App { new Blob([`WEBWORKER_BODY`], { type: 'text/javascript' }), ), ); - // this.worker.onerror = e => { - // this.send(new TechnicalInfo("webworker_error", JSON.stringify(e))); - // /* TODO: send report */ - // } + this.worker.onerror = e => { + this._debug("webworker_error", e) + } let lastTs = timestamp(); let fileno = 0; this.worker.onmessage = ({ data }: MessageEvent) => { @@ -114,11 +115,11 @@ export default class App { this.attachEventListener(document, 'mouseleave', alertWorker, false, false); this.attachEventListener(document, 'visibilitychange', alertWorker, false); } catch (e) { - this.sendDebugReport("worker_start", e); + this._debug("worker_start", e); } } - private sendDebugReport(context: string, e: any) { + private _debug(context: string, e: any) { if(this.options.__debug_report_edp !== null) { fetch(this.options.__debug_report_edp, { method: 'POST', @@ -129,6 +130,9 @@ export default class App { }) }); } + if(this.options.__debug_log) { + warn("OpenReplay error: ", context, e) + } } send(message: Message, urgent = false): void { @@ -149,9 +153,13 @@ export default class App { } } - addCommitCallback(cb: CommitCallback): void { + attachCommitCallback(cb: CommitCallback): void { this.commitCallbacks.push(cb) } + // @Depricated (TODO: remove in 3.5.*) + addCommitCallback(cb: CommitCallback): void { + this.attachCommitCallback(cb) + } safe void>(fn: T): T { @@ -160,12 +168,11 @@ export default class App { try { fn.apply(this, args); } catch (e) { - app.send(new TechnicalInfo("error", JSON.stringify({ - time: timestamp(), - name: e.name, - message: e.message, - stack: e.stack - }))); + app._debug("safe_fn_call", e) + // time: timestamp(), + // name: e.name, + // message: e.message, + // stack: e.stack } } as any // TODO: correct typing } @@ -210,8 +217,10 @@ export default class App { return this.projectKey } getBaseHref(): string { - if (this.options.resourceBaseHref) { + if (typeof this.options.resourceBaseHref === 'string') { return this.options.resourceBaseHref + } else if (typeof this.options.resourceBaseHref === 'object') { + //switch between types } if (document.baseURI) { return document.baseURI @@ -221,6 +230,12 @@ export default class App { ?.getElementsByTagName("base")[0] ?.getAttribute("href") || location.origin + location.pathname } + resolveResourceURL(resourceURL: string): string { + const base = new URL(this.getBaseHref()) + base.pathname += "/" + new URL(resourceURL).pathname + base.pathname.replace(/\/+/g, "/") + return base.toString() + } isServiceURL(url: string): boolean { return url.startsWith(this.options.ingestPoint) @@ -308,13 +323,14 @@ export default class App { return onStartInfo; }) .catch(e => { - this.stop(); + sessionStorage.removeItem(this.options.session_token_key) + this.stop() warn("OpenReplay was unable to start. ", e) - this.sendDebugReport("session_start", e); - throw e; + this._debug("session_start", e); + throw e }) } - return Promise.reject("Player is active"); + return Promise.reject("Player is already active"); } start(reset: boolean = false): Promise { diff --git a/tracker/tracker/src/main/app/observer.ts b/tracker/tracker/src/main/app/observer.ts index 493c7aaac..3ed5088af 100644 --- a/tracker/tracker/src/main/app/observer.ts +++ b/tracker/tracker/src/main/app/observer.ts @@ -1,4 +1,4 @@ -import { stars, hasOpenreplayAttribute } from '../utils'; +import { stars, hasOpenreplayAttribute } from "../utils.js"; import { CreateDocument, CreateElementNode, @@ -11,8 +11,8 @@ import { MoveNode, RemoveNode, CreateIFrameDocument, -} from '../../messages'; -import App from './index'; +} from "../../messages/index.js"; +import App from "./index.js"; interface Window extends WindowProxy { HTMLInputElement: typeof HTMLInputElement, @@ -234,30 +234,43 @@ export default class Observer { this.app.send(new SetNodeAttribute(id, name, value)); } + /* TODO: abstract sanitation */ + getInnerTextSecure(el: HTMLElement): string { + const id = this.app.nodes.getID(el) + if (!id) { return '' } + return this.checkObscure(id, el.innerText) + + } + + private checkObscure(id: number, data: string): string { + if (this.textMasked.has(id)) { + return data.replace( + /[^\f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]/g, + '█', + ); + } + if (this.options.obscureTextNumbers) { + data = data.replace(/\d/g, '0'); + } + if (this.options.obscureTextEmails) { + data = data.replace( + /([^\s]+)@([^\s]+)\.([^\s]+)/g, + (...f: Array) => + stars(f[1]) + '@' + stars(f[2]) + '.' + stars(f[3]), + ); + } + return data + } + private sendNodeData(id: number, parentElement: Element, data: string): void { if (this.isInstance(parentElement, HTMLStyleElement) || this.isInstance(parentElement, SVGStyleElement)) { this.app.send(new SetCSSDataURLBased(id, data, this.app.getBaseHref())); return; } - if (this.textMasked.has(id)) { - data = data.replace( - /[^\f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]/g, - '█', - ); - } else { - if (this.options.obscureTextNumbers) { - data = data.replace(/\d/g, '0'); - } - if (this.options.obscureTextEmails) { - data = data.replace( - /([^\s]+)@([^\s]+)\.([^\s]+)/g, - (...f: Array) => - stars(f[1]) + '@' + stars(f[2]) + '.' + stars(f[3]), - ); - } - } + data = this.checkObscure(id, data) this.app.send(new SetNodeData(id, data)); } + /* end TODO: abstract sanitation */ private bindNode(node: Node): void { const r = this.app.nodes.registerNode(node); @@ -279,6 +292,7 @@ export default class Observer { ? NodeFilter.FILTER_REJECT : NodeFilter.FILTER_ACCEPT, }, + // @ts-ignore false, ); while (walker.nextNode()) { @@ -411,15 +425,17 @@ export default class Observer { private iframeObservers: Observer[] = []; private handleIframe(iframe: HTMLIFrameElement): void { - const handle = () => { - const context = iframe.contentWindow as Window | null + let context: Window | null = null + const handle = this.app.safe(() => { const id = this.app.nodes.getID(iframe) - if (!context || id === undefined) { return } - + if (id === undefined) { return } + if (iframe.contentWindow === context) { return } + context = iframe.contentWindow as Window | null; + if (!context) { return } const observer = new Observer(this.app, this.options, context) this.iframeObservers.push(observer) observer.observeIframe(id, context) - } + }) this.app.attachEventListener(iframe, "load", handle) handle() } diff --git a/tracker/tracker/src/main/app/ticker.ts b/tracker/tracker/src/main/app/ticker.ts index 51fa7f579..62ca69af9 100644 --- a/tracker/tracker/src/main/app/ticker.ts +++ b/tracker/tracker/src/main/app/ticker.ts @@ -1,4 +1,4 @@ -import App from './index'; +import App from "./index.js"; type Callback = () => void; function wrap(callback: Callback, n: number): Callback { diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index 79e2cbee8..6af325e57 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -1,33 +1,35 @@ -import App, { DEFAULT_INGEST_POINT } from './app'; -export { default as App } from './app'; +import App, { DEFAULT_INGEST_POINT } from "./app/index.js"; +export { default as App } from './app/index.js'; -import { UserID, UserAnonymousID, Metadata, RawCustomEvent, CustomIssue } from '../messages'; -import * as _Messages from '../messages'; +import { UserID, UserAnonymousID, Metadata, RawCustomEvent, CustomIssue } from "../messages/index.js"; +import * as _Messages from "../messages/index.js"; export const Messages = _Messages; -import Connection from './modules/connection'; -import Console from './modules/console'; -import Exception, { getExceptionMessageFromEvent, getExceptionMessage } from './modules/exception'; -import Img from './modules/img'; -import Input from './modules/input'; -import Mouse from './modules/mouse'; -import Timing from './modules/timing'; -import Performance from './modules/performance'; -import Scroll from './modules/scroll'; -import Viewport from './modules/viewport'; -import Longtasks from './modules/longtasks'; -import CSSRules from './modules/cssrules'; -import { IN_BROWSER, deprecationWarn } from './utils'; +import Connection from "./modules/connection.js"; +import Console from "./modules/console.js"; +import Exception, { getExceptionMessageFromEvent, getExceptionMessage } from "./modules/exception.js"; +import Img from "./modules/img.js"; +import Input from "./modules/input.js"; +import Mouse from "./modules/mouse.js"; +import Timing from "./modules/timing.js"; +import Performance from "./modules/performance.js"; +import Scroll from "./modules/scroll.js"; +import Viewport from "./modules/viewport.js"; +import Longtasks from "./modules/longtasks.js"; +import CSSRules from "./modules/cssrules.js"; +import { IN_BROWSER, deprecationWarn, DOCS_HOST } from "./utils.js"; + +import { Options as AppOptions } from "./app/index.js"; +import { Options as ConsoleOptions } from "./modules/console.js"; +import { Options as ExceptionOptions } from "./modules/exception.js"; +import { Options as InputOptions } from "./modules/input.js"; +import { Options as PerformanceOptions } from "./modules/performance.js"; +import { Options as TimingOptions } from "./modules/timing.js"; + +export type { OnStartInfo } from './app/index.js'; -import { Options as AppOptions } from './app'; -import { Options as ConsoleOptions } from './modules/console'; -import { Options as ExceptionOptions } from './modules/exception'; -import { Options as InputOptions } from './modules/input'; -import { Options as MouseOptions } from './modules/mouse'; -import { Options as PerformanceOptions } from './modules/performance'; -import { Options as TimingOptions } from './modules/timing'; export type Options = Partial< - AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & MouseOptions & PerformanceOptions & TimingOptions + AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & PerformanceOptions & TimingOptions > & { projectID?: number; // For the back compatibility only (deprecated) projectKey: string; @@ -41,13 +43,13 @@ const DOCS_SETUP = '/installation/setup-or'; function processOptions(obj: any): obj is Options { if (obj == null) { - console.error(`OpenReplay: invalid options argument type. Please, check documentation on https://docs.openreplay.com${ DOCS_SETUP }`); + console.error(`OpenReplay: invalid options argument type. Please, check documentation on ${DOCS_HOST}${DOCS_SETUP}`); return false; } if (typeof obj.projectKey !== 'string') { if (typeof obj.projectKey !== 'number') { if (typeof obj.projectID !== 'number') { // Back compatability - console.error(`OpenReplay: projectKey is missing or wrong type (string is expected). Please, check https://docs.openreplay.com${ DOCS_SETUP } for more information.`) + console.error(`OpenReplay: projectKey is missing or wrong type (string is expected). Please, check ${DOCS_HOST}${DOCS_SETUP} for more information.`) return false } else { obj.projectKey = obj.projectID.toString(); @@ -59,7 +61,7 @@ function processOptions(obj: any): obj is Options { } } if (typeof obj.sessionToken !== 'string' && obj.sessionToken != null) { - console.warn(`OpenReplay: invalid options argument type. Please, check documentation on https://docs.openreplay.com${ DOCS_SETUP }`) + console.warn(`OpenReplay: invalid options argument type. Please, check documentation on ${DOCS_HOST}${DOCS_SETUP}`) } return true; } @@ -70,11 +72,18 @@ export default class API { if (!IN_BROWSER || !processOptions(options)) { return; } + if ((window as any).__OPENREPLAY__) { + console.error("OpenReplay: one tracker instance has been initialised already") + return + } if (!options.__DISABLE_SECURE_MODE && location.protocol !== 'https:') { console.error("OpenReplay: Your website must be publicly accessible and running on SSL in order for OpenReplay to properly capture and replay the user session. You can disable this check by setting `__DISABLE_SECURE_MODE` option to `true` if you are testing in localhost. Keep in mind, that asset files on a local machine are not available to the outside world. This might affect tracking if you use css files.") return; } - const doNotTrack = options.respectDoNotTrack && (navigator.doNotTrack == '1' || window.doNotTrack == '1'); + const doNotTrack = options.respectDoNotTrack && + (navigator.doNotTrack == '1' + // @ts-ignore + || window.doNotTrack == '1'); this.app = doNotTrack || !('Map' in window) || !('Set' in window) || @@ -94,14 +103,14 @@ export default class API { Exception(this.app, options); Img(this.app); Input(this.app, options); - Mouse(this.app, options); + Mouse(this.app); Timing(this.app, options); Performance(this.app, options); Scroll(this.app); Longtasks(this.app); - (window as any).__OPENREPLAY__ = (window as any).__OPENREPLAY__ || this; + (window as any).__OPENREPLAY__ = this; } else { - console.log("OpenReplay: browser doesn't support API required for tracking.") + console.log("OpenReplay: browser doesn't support API required for tracking or doNotTrack is set to 1.") const req = new XMLHttpRequest(); const orig = options.ingestPoint || DEFAULT_INGEST_POINT; req.open("POST", orig + "/v1/web/not-started"); @@ -131,15 +140,15 @@ export default class API { return this.isActive(); } - start(): void { + start() /*: Promise*/ { if (!IN_BROWSER) { - console.error(`OpenReplay: you are trying to start Tracker on a node.js environment. If you want to use OpenReplay with SSR, please, use componentDidMount or useEffect API for placing the \`tracker.start()\` line. Check documentation on https://docs.openreplay.com${ DOCS_SETUP }`) - return; + console.error(`OpenReplay: you are trying to start Tracker on a node.js environment. If you want to use OpenReplay with SSR, please, use componentDidMount or useEffect API for placing the \`tracker.start()\` line. Check documentation on ${DOCS_HOST}${DOCS_SETUP}`) + return Promise.reject("Trying to start not in browser."); } if (this.app === null) { - return; + return Promise.reject("Browser doesn't support required api, or doNotTrack is active."); } - this.app.start(); + return this.app.start(); } stop(): void { if (this.app === null) { diff --git a/tracker/tracker/src/main/modules/connection.ts b/tracker/tracker/src/main/modules/connection.ts index bd582954f..a2767790c 100644 --- a/tracker/tracker/src/main/modules/connection.ts +++ b/tracker/tracker/src/main/modules/connection.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { ConnectionInformation } from '../../messages'; +import App from "../app/index.js"; +import { ConnectionInformation } from "../../messages/index.js"; export default function(app: App): void { const connection: diff --git a/tracker/tracker/src/main/modules/console.ts b/tracker/tracker/src/main/modules/console.ts index 251ff8ca1..bbf259735 100644 --- a/tracker/tracker/src/main/modules/console.ts +++ b/tracker/tracker/src/main/modules/console.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { IN_BROWSER } from '../utils'; -import { ConsoleLog } from '../../messages'; +import App from "../app/index.js"; +import { IN_BROWSER } from "../utils.js"; +import { ConsoleLog } from "../../messages/index.js"; const printError: (e: Error) => string = IN_BROWSER && 'InstallTrigger' in window // detect Firefox @@ -110,7 +110,7 @@ export default function (app: App, opts: Partial): void { return; } - const sendConsoleLog = app.safe((level: string, args: any[]): void => + const sendConsoleLog = app.safe((level: string, args: unknown[]): void => app.send(new ConsoleLog(level, printf(args))), ); @@ -121,18 +121,35 @@ export default function (app: App, opts: Partial): void { app.attachStartCallback(reset); app.ticker.attach(reset, 33, false); - options.consoleMethods.forEach((method) => { - if (consoleMethods.indexOf(method) === -1) { - console.error(`Asayer: unsupported console method ${method}`); - return; - } - const fn = (console as any)[method]; - (console as any)[method] = function (...args: any[]): void { - fn.apply(this, args); - if (n++ > options.consoleThrottling) { + const patchConsole = (console: Console) => + options.consoleMethods!.forEach((method) => { + if (consoleMethods.indexOf(method) === -1) { + console.error(`OpenReplay: unsupported console method "${method}"`); return; } - sendConsoleLog(method, args); - }; - }); + const fn = (console as any)[method]; + (console as any)[method] = function (...args: unknown[]): void { + fn.apply(this, args); + if (n++ > options.consoleThrottling) { + return; + } + sendConsoleLog(method, args); + }; + }); + patchConsole(window.console); + + app.nodes.attachNodeCallback(app.safe(node => { + if (node instanceof HTMLIFrameElement) { + let context = node.contentWindow + if (context) { + patchConsole((context as (Window & typeof globalThis)).console) + } + app.attachEventListener(node, "load", () => { + if (node.contentWindow !== context) { + context = node.contentWindow + patchConsole((context as (Window & typeof globalThis)).console) + } + }) + } + })) } diff --git a/tracker/tracker/src/main/modules/cssrules.ts b/tracker/tracker/src/main/modules/cssrules.ts index 54166f717..18aa3f154 100644 --- a/tracker/tracker/src/main/modules/cssrules.ts +++ b/tracker/tracker/src/main/modules/cssrules.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { CSSInsertRuleURLBased, CSSDeleteRule, TechnicalInfo } from '../../messages'; +import App from "../app/index.js"; +import { CSSInsertRuleURLBased, CSSDeleteRule, TechnicalInfo } from "../../messages/index.js"; export default function(app: App | null) { if (app === null) { diff --git a/tracker/tracker/src/main/modules/exception.ts b/tracker/tracker/src/main/modules/exception.ts index 6a4720c35..45fe37465 100644 --- a/tracker/tracker/src/main/modules/exception.ts +++ b/tracker/tracker/src/main/modules/exception.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { JSException } from '../../messages'; -import Message from '../../messages/message'; +import App from "../app/index.js"; +import { JSException } from "../../messages/index.js"; +import Message from "../../messages/message.js"; import ErrorStackParser from 'error-stack-parser'; export interface Options { diff --git a/tracker/tracker/src/main/modules/img.ts b/tracker/tracker/src/main/modules/img.ts index e20a4d531..61e793b89 100644 --- a/tracker/tracker/src/main/modules/img.ts +++ b/tracker/tracker/src/main/modules/img.ts @@ -1,6 +1,6 @@ -import { timestamp, isURL } from '../utils'; -import App from '../app'; -import { ResourceTiming, SetNodeAttributeURLBased } from '../../messages'; +import { timestamp, isURL } from "../utils.js"; +import App from "../app/index.js"; +import { ResourceTiming, SetNodeAttributeURLBased } from "../../messages/index.js"; export default function (app: App): void { const sendImgSrc = app.safe(function (this: HTMLImageElement): void { diff --git a/tracker/tracker/src/main/modules/input.ts b/tracker/tracker/src/main/modules/input.ts index 96ca2f7c2..746c26f8f 100644 --- a/tracker/tracker/src/main/modules/input.ts +++ b/tracker/tracker/src/main/modules/input.ts @@ -1,6 +1,6 @@ -import { normSpaces, IN_BROWSER, getLabelAttribute, hasOpenreplayAttribute } from '../utils'; -import App from '../app'; -import { SetInputTarget, SetInputValue, SetInputChecked } from '../../messages'; +import { normSpaces, IN_BROWSER, getLabelAttribute, hasOpenreplayAttribute } from "../utils.js"; +import App from "../app/index.js"; +import { SetInputTarget, SetInputValue, SetInputChecked } from "../../messages/index.js"; function isInput(node: any): node is HTMLInputElement { if (!(node instanceof HTMLInputElement)) { diff --git a/tracker/tracker/src/main/modules/longtasks.ts b/tracker/tracker/src/main/modules/longtasks.ts index e74110b71..0f3a7e82a 100644 --- a/tracker/tracker/src/main/modules/longtasks.ts +++ b/tracker/tracker/src/main/modules/longtasks.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { LongTask } from '../../messages'; +import App from "../app/index.js"; +import { LongTask } from "../../messages/index.js"; // https://w3c.github.io/performance-timeline/#the-performanceentry-interface interface TaskAttributionTiming extends PerformanceEntry { @@ -47,5 +47,5 @@ export default function (app: App): void { const observer: PerformanceObserver = new PerformanceObserver((list) => list.getEntries().forEach(longTask), ); - observer.observe({ entryTypes: ['longtask'], buffered: true }); + observer.observe({ entryTypes: ['longtask'] }); } \ No newline at end of file diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index a0a526b93..3ec70e844 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -1,10 +1,30 @@ -import type { Options as FinderOptions } from '../vendors/finder/finder'; -import { finder } from '../vendors/finder/finder'; -import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils'; -import App from '../app'; -import { MouseMove, MouseClick } from '../../messages'; -import { getInputLabel } from './input'; +import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from "../utils.js"; +import App from "../app/index.js"; +import { MouseMove, MouseClick } from "../../messages/index.js"; +import { getInputLabel } from "./input.js"; +function _getSelector(target: Element): string { + let el: Element | null = target + let selector: string | null = null + do { + if (el.id) { + return `#${el.id}` + (selector ? ` > ${selector}` : '') + } + selector = + el.className.split(' ') + .map(cn => cn.trim()) + .filter(cn => cn !== '') + .reduce((sel, cn) => `${sel}.${cn}`, el.tagName.toLowerCase()) + + (selector ? ` > ${selector}` : ''); + if (el === document.body) { + return selector + } + el = el.parentElement + } while (el !== document.body && el !== null) + return selector +} + +//TODO: fix (typescript doesn't allow work when the guard is inside the function) function getTarget(target: EventTarget | null): Element | null { if (target instanceof Element) { return _getTarget(target); @@ -51,47 +71,32 @@ function _getTarget(target: Element): Element | null { return target === document.documentElement ? null : target; } -function getTargetLabel(target: Element): string { - const dl = getLabelAttribute(target); - if (dl !== null) { - return dl; - } - const tag = target.tagName.toUpperCase(); - if (tag === 'INPUT') { - return getInputLabel(target as HTMLInputElement) - } - if (tag === 'BUTTON' || - tag === 'A' || - tag === 'LI' || - (target as HTMLElement).onclick != null || - target.getAttribute('role') === 'button' - ) { - const label: string = (target as HTMLElement).innerText || ''; - return normSpaces(label).slice(0, 100); - } - return ''; -} +export default function (app: App): void { + // const options: Options = Object.assign( + // {}, + // opts, + // ); -interface HeatmapsOptions { - finder: FinderOptions, -} - -export interface Options { - heatmaps: boolean | HeatmapsOptions; -} - -export default function (app: App, opts: Partial): void { - const options: Options = Object.assign( - { - heatmaps: { - finder: { - threshold: 5, - maxNumberOfTries: 600, - }, - }, - }, - opts, - ); + function getTargetLabel(target: Element): string { + const dl = getLabelAttribute(target); + if (dl !== null) { + return dl; + } + const tag = target.tagName.toUpperCase(); + if (tag === 'INPUT') { + return getInputLabel(target as HTMLInputElement) + } + if (tag === 'BUTTON' || + tag === 'A' || + tag === 'LI' || + (target as HTMLElement).onclick != null || + target.getAttribute('role') === 'button' + ) { + const label: string = app.observer.getInnerTextSecure(target as HTMLElement); + return normSpaces(label).slice(0, 100); + } + return ''; + } let mousePositionX = -1; let mousePositionY = -1; @@ -115,9 +120,7 @@ export default function (app: App, opts: Partial): void { const selectorMap: {[id:number]: string} = {}; function getSelector(id: number, target: Element): string { - if (options.heatmaps === false) { return '' } - return selectorMap[id] = selectorMap[id] || - finder(target, options.heatmaps === true ? undefined : options.heatmaps.finder); + return selectorMap[id] = selectorMap[id] || _getSelector(target); } app.attachEventListener( diff --git a/tracker/tracker/src/main/modules/performance.ts b/tracker/tracker/src/main/modules/performance.ts index 7deac3ef5..8eb7701eb 100644 --- a/tracker/tracker/src/main/modules/performance.ts +++ b/tracker/tracker/src/main/modules/performance.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { IN_BROWSER } from '../utils'; -import { PerformanceTrack } from '../../messages'; +import App from "../app/index.js"; +import { IN_BROWSER } from "../utils.js"; +import { PerformanceTrack } from "../../messages/index.js"; type Perf = { diff --git a/tracker/tracker/src/main/modules/scroll.ts b/tracker/tracker/src/main/modules/scroll.ts index c3e4b37cc..0f54ba8f9 100644 --- a/tracker/tracker/src/main/modules/scroll.ts +++ b/tracker/tracker/src/main/modules/scroll.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { SetViewportScroll, SetNodeScroll } from '../../messages'; +import App from "../app/index.js"; +import { SetViewportScroll, SetNodeScroll } from "../../messages/index.js"; export default function (app: App): void { let documentScroll = false; diff --git a/tracker/tracker/src/main/modules/timing.ts b/tracker/tracker/src/main/modules/timing.ts index e6e6df8ea..60e30019f 100644 --- a/tracker/tracker/src/main/modules/timing.ts +++ b/tracker/tracker/src/main/modules/timing.ts @@ -1,7 +1,7 @@ -import { isURL } from '../utils'; -import App from '../app'; -import { ResourceTiming, PageLoadTiming, PageRenderTiming } from '../../messages'; -import type Message from '../../messages/message'; +import { isURL } from "../utils.js"; +import App from "../app/index.js"; +import { ResourceTiming, PageLoadTiming, PageRenderTiming } from "../../messages/index.js"; +import type Message from "../../messages/message.js"; // Inspired by https://github.com/WPO-Foundation/RUM-SpeedIndex/blob/master/src/rum-speedindex.js @@ -122,7 +122,7 @@ export default function (app: App, opts: Partial): void { let resources: ResourcesTimeMap | null = {} function resourceTiming(entry: PerformanceResourceTiming): void { - if (entry.duration <= 0 || !isURL(entry.name) || app.isServiceURL(entry.name)) return; + if (entry.duration < 0 || !isURL(entry.name) || app.isServiceURL(entry.name)) return; if (resources !== null) { resources[entry.name] = entry.startTime + entry.duration; } diff --git a/tracker/tracker/src/main/modules/viewport.ts b/tracker/tracker/src/main/modules/viewport.ts index c780c65e4..626eadd12 100644 --- a/tracker/tracker/src/main/modules/viewport.ts +++ b/tracker/tracker/src/main/modules/viewport.ts @@ -1,9 +1,9 @@ -import App from '../app'; +import App from "../app/index.js"; import { SetPageLocation, SetViewportSize, SetPageVisibility, -} from '../../messages'; +} from "../../messages/index.js"; export default function (app: App): void { let url: string, width: number, height: number; diff --git a/tracker/tracker/src/main/tsconfig.json b/tracker/tracker/src/main/tsconfig.json index 0a92ed914..f6ac938a6 100644 --- a/tracker/tracker/src/main/tsconfig.json +++ b/tracker/tracker/src/main/tsconfig.json @@ -6,5 +6,6 @@ }, "references": [ { "path": "../messages" } - ] + ], + "exclude": ["app/observer"] } diff --git a/tracker/tracker/src/main/utils.ts b/tracker/tracker/src/main/utils.ts index 586d02ecb..5a8700f31 100644 --- a/tracker/tracker/src/main/utils.ts +++ b/tracker/tracker/src/main/utils.ts @@ -21,7 +21,8 @@ export const IN_BROWSER = !(typeof window === "undefined"); export const log = console.log export const warn = console.warn -const DOCS_HOST = 'https://docs.openreplay.com'; +export const DOCS_HOST = 'https://docs.openreplay.com'; + const warnedFeatures: { [key: string]: boolean; } = {}; export function deprecationWarn(nameOfFeature: string, useInstead: string, docsPath: string = "/"): void { if (warnedFeatures[ nameOfFeature ]) { @@ -56,3 +57,4 @@ export function hasOpenreplayAttribute(e: Element, name: string): boolean { return false; } + diff --git a/tracker/tracker/src/messages/index.ts b/tracker/tracker/src/messages/index.ts index 210f534cb..f3267bee0 100644 --- a/tracker/tracker/src/messages/index.ts +++ b/tracker/tracker/src/messages/index.ts @@ -1,6 +1,6 @@ // Auto-generated, do not edit -import Message from './message'; -import Writer from './writer'; +import Message from "./message.js"; +import Writer from "./writer.js"; function bindNew( Class: C & { new(...args: A): T } diff --git a/tracker/tracker/src/messages/message.ts b/tracker/tracker/src/messages/message.ts index a2bf0864d..aeb8619de 100644 --- a/tracker/tracker/src/messages/message.ts +++ b/tracker/tracker/src/messages/message.ts @@ -1,4 +1,4 @@ -import Writer from './writer'; +import Writer from "./writer.js"; export default interface Message { encode(w: Writer): boolean; diff --git a/tracker/tracker/src/messages/writer.ts b/tracker/tracker/src/messages/writer.ts index 5ce52d330..6947420bc 100644 --- a/tracker/tracker/src/messages/writer.ts +++ b/tracker/tracker/src/messages/writer.ts @@ -77,6 +77,9 @@ export default class Writer { return this.offset <= this.size; } uint(value: number): boolean { + if (value < 0 || value > Number.MAX_SAFE_INTEGER) { + value = 0 + } while (value >= 0x80) { this.data[this.offset++] = value % 0x100 | 0x80; value = Math.floor(value / 128); diff --git a/tracker/tracker/src/webworker/index.ts b/tracker/tracker/src/webworker/index.ts index 1c6cde40f..d94d77577 100644 --- a/tracker/tracker/src/webworker/index.ts +++ b/tracker/tracker/src/webworker/index.ts @@ -1,8 +1,8 @@ -import { classes, BatchMeta, Timestamp, SetPageVisibility, CreateDocument } from '../messages'; -import Message from '../messages/message'; -import Writer from '../messages/writer'; +import { classes, BatchMeta, Timestamp, SetPageVisibility, CreateDocument } from "../messages/index.js"; +import Message from "../messages/message.js"; +import Writer from "../messages/writer.js"; -import type { WorkerMessageData } from '../messages/webworker'; +import type { WorkerMessageData } from "../messages/webworker.js"; const SEND_INTERVAL = 20 * 1000; @@ -49,7 +49,7 @@ function sendBatch(batch: Uint8Array):void { if (this.status >= 400) { // TODO: test workflow. After 400+ it calls /start for some reason reset(); sendQueue.length = 0; - if (this.status === 403) { // Unauthorised (Token expired) + if (this.status === 401) { // Unauthorised (Token expired) self.postMessage("restart") return } @@ -74,6 +74,7 @@ function sendBatch(batch: Uint8Array):void { attemptsCount++; setTimeout(() => sendBatch(batch), ATTEMPT_TIMEOUT); } + // TODO: handle offline exception req.send(batch.buffer); } diff --git a/tracker/tracker/src/webworker/transformer.js.temp b/tracker/tracker/src/webworker/transformer.js.temp deleted file mode 100644 index cf80d681b..000000000 --- a/tracker/tracker/src/webworker/transformer.js.temp +++ /dev/null @@ -1,21 +0,0 @@ -import Message from '../messages/message'; - - - - -class MessageTransformer { - private urlRewriter?: URLRewriter - - constructor() { - - } - - transform(m: Message): Message { - if (m instanceof SetNodeAttribute) { - if (m.name == "src" || m.name == "href") { - sendAssetForCache - } - } - } - -} \ No newline at end of file diff --git a/tracker/tracker/tsconfig-base.json b/tracker/tracker/tsconfig-base.json index 3d99959a9..9af9edb73 100644 --- a/tracker/tracker/tsconfig-base.json +++ b/tracker/tracker/tsconfig-base.json @@ -9,6 +9,6 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node" + "moduleResolution": "nodenext" } } diff --git a/utilities/servers/sourcemaps-handler.js b/utilities/servers/sourcemaps-handler.js index fca74758b..91917104d 100644 --- a/utilities/servers/sourcemaps-handler.js +++ b/utilities/servers/sourcemaps-handler.js @@ -46,7 +46,7 @@ module.exports.sourcemapReader = async event => { console.log(err); return reject(err); } - const sourcemap = data.Body.toString(); + let sourcemap = data.Body.toString(); return new sourceMap.SourceMapConsumer(sourcemap) .then(consumer => { @@ -91,10 +91,14 @@ module.exports.sourcemapReader = async event => { // console.log(result); results.push(result); } - + consumer = undefined; // Use this code if you don't use the http event with the LAMBDA-PROXY integration return resolve(results); - }); + }) + .finally(() => { + sourcemap = undefined; + }) + }); }); }; \ No newline at end of file