From 3f52cefad2b313a53f55e0ba8c32e03bc61a545e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Sun, 5 Sep 2021 11:39:53 +0100 Subject: [PATCH 001/218] feat(db): added new foreign-key indexes --- scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql | 4 ++++ scripts/helm/db/init_dbs/postgresql/init_schema.sql | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql b/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql index dd8f6f318..fa0fdebdc 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql @@ -1,5 +1,9 @@ BEGIN; CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); +CREATE INDEX projects_tenant_id_idx ON projects(tenant_id); +CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id); +CREATE INDEX issues_project_id_idx ON issues(project_id); + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 419ed9c9b..babaf93e6 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -172,6 +172,7 @@ CREATE TABLE projects "defaultInputMode": "plain" }'::jsonb -- ?????? ); +CREATE INDEX projects_tenant_id_idx ON projects(tenant_id); CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS $$ @@ -247,7 +248,7 @@ create table webhooks index integer default 0 not null, name varchar(100) ); - +CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id); -- --- notifications.sql --- @@ -387,6 +388,7 @@ CREATE TABLE issues ); CREATE INDEX ON issues (issue_id, type); CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); +CREATE INDEX issues_project_id_idx ON issues(project_id); -- --- errors.sql --- From cc84329547524ad7fd847d2b7ff89861cf48b474 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Sun, 5 Sep 2021 11:41:16 +0100 Subject: [PATCH 002/218] feat(db): added new foreign-key indexes --- scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql | 1 + scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + 2 files changed, 2 insertions(+) diff --git a/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql b/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql index fa0fdebdc..5b5a8b3de 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql @@ -4,6 +4,7 @@ CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timesta CREATE INDEX projects_tenant_id_idx ON projects(tenant_id); CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id); CREATE INDEX issues_project_id_idx ON issues(project_id); +CREATE INDEX jobs_project_id_idx ON jobs(project_id); COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index babaf93e6..65811f7ba 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -872,5 +872,6 @@ CREATE TABLE jobs ); CREATE INDEX ON jobs (status); CREATE INDEX ON jobs (start_at); +CREATE INDEX jobs_project_id_idx ON jobs(project_id); COMMIT; From bec4bc37d55fb45c23b67e74c21dd0735d2e811b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Sun, 5 Sep 2021 14:25:43 +0100 Subject: [PATCH 003/218] feat(api): pg_client fixed exception handler --- api/chalicelib/utils/pg_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index ba72868d6..c54e514ec 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -51,7 +51,7 @@ class PostgresClient: try: self.connection.commit() self.cursor.close() - except: + except Exception as error: print("Error while committing/closing PG-connection", error) raise error finally: From f147ab0bd1f5888540a04ec65a0980f7b2eb2e2a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 6 Sep 2021 23:56:51 +0100 Subject: [PATCH 004/218] feat(api): changed change password response --- api/chalicelib/core/users.py | 23 +++++++++++++++++++++-- ee/api/chalicelib/core/users.py | 24 ++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index a87a6023f..917328910 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -426,8 +426,27 @@ def change_password(tenant_id, user_id, email, old_password, new_password): if auth is None: return {"errors": ["wrong password"]} changes = {"password": new_password, "generatedPassword": False} - return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes), - "jwt": authenticate(email, new_password)["jwt"]} + user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) + r = authenticate(user['email'], new_password) + tenant_id = r.pop("tenantId") + + r["limits"] = { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} + + c = tenants.get_by_tenant_id(tenant_id) + c.pop("createdAt") + c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, + stack_integrations=True) + c["smtp"] = helper.has_smtp() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r, + "client": c + } + } def set_password_invitation(user_id, new_password): diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 034a9549d..6c3434255 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -436,8 +436,27 @@ def change_password(tenant_id, user_id, email, old_password, new_password): if auth is None: return {"errors": ["wrong password"]} changes = {"password": new_password, "generatedPassword": False} - return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes), - "jwt": authenticate(email, new_password)["jwt"]} + user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) + r = authenticate(user['email'], new_password) + + tenant_id = r.pop("tenantId") + r["limits"] = { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} + + c = tenants.get_by_tenant_id(tenant_id) + c.pop("createdAt") + c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, + stack_integrations=True) + c["smtp"] = helper.has_smtp() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r, + "client": c, + } + } def set_password_invitation(tenant_id, user_id, new_password): @@ -457,6 +476,7 @@ def set_password_invitation(tenant_id, user_id, new_password): c.pop("createdAt") c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) + c["smtp"] = helper.has_smtp() return { 'jwt': r.pop('jwt'), 'data': { From c5a27794150f735b864e257b8c4ad348d3dccf42 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Sep 2021 20:33:30 +0200 Subject: [PATCH 005/218] feat(api): insights retention --- api/chalicelib/blueprints/subs/bp_insights.py | 11 ++++ api/chalicelib/core/insights.py | 55 ++++++++++++++++++- 2 files changed, 65 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 062e8fdc8..8c79e2663 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -28,6 +28,17 @@ def get_insights_journey(projectId, context): return {"data": insights.get_journey(project_id=projectId, **{**data, **args})} + +@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) +def get_users_retention(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.get_retention(project_id=projectId, **{**data, **args})} + # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index d470e1537..37042e64d 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -98,8 +98,61 @@ def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimest params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) + # print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() return __transform_journey(rows) + + +def __compute_retention_percentage(rows): + if rows is None or len(rows) == 0: + return rows + t = -1 + for r in rows: + if r["week"] == 0: + t = r["usersCount"] + r["percentage"] = r["usersCount"] / t + return rows + + +@dev.timed +def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts Date: Tue, 21 Sep 2021 17:19:12 +0200 Subject: [PATCH 006/218] feat(api): insights retention: fill missing values feat(api): insights retention: max date restrictions --- api/chalicelib/core/insights.py | 61 ++++++++++++++++++++++++++++++--- api/chalicelib/utils/TimeUTC.py | 9 +++++ 2 files changed, 66 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 37042e64d..8e8611dfb 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -116,9 +116,61 @@ def __compute_retention_percentage(rows): return rows +def __complete_retention(rows, start_date, end_date=None): + if rows is None or len(rows) == 0: + return rows + max_week = 10 + week = 0 + delta_date = 0 + while max_week > 0: + start_date += TimeUTC.MS_WEEK + if end_date is not None and start_date >= end_date: + break + delta = 0 + if delta_date + week >= len(rows) \ + or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + rows.insert(delta_date + week + i, neutral) + delta = i + else: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if delta_date + week + i < len(rows) \ + and i != rows[delta_date + week + i]["week"]: + rows.insert(delta_date + week + i, neutral) + elif delta_date + week + i >= len(rows): + rows.append(neutral) + delta = i + week += delta + max_week -= 1 + delta_date += 1 + return rows + + @dev.timed -def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], +def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * 7 * 24 * 60 * 60 * 1000 pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) @@ -130,6 +182,7 @@ def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTime FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week FROM sessions WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL AND NOT EXISTS((SELECT 1 FROM sessions AS bsess WHERE bsess.start_ts Date: Tue, 21 Sep 2021 17:22:53 +0200 Subject: [PATCH 007/218] feat(api): insights retention changed end date --- api/chalicelib/core/insights.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 8e8611dfb..c39fb5cea 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -170,7 +170,7 @@ def __complete_retention(rows, start_date, end_date=None): def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * 7 * 24 * 60 * 60 * 1000 + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) From 09176692d081cf44ec63f75624a29b6c6d2c2b54 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Sep 2021 20:32:52 +0200 Subject: [PATCH 008/218] feat(api): insights feature retention --- api/chalicelib/blueprints/subs/bp_insights.py | 13 ++- api/chalicelib/core/insights.py | 95 ++++++++++++++++++- 2 files changed, 102 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 8c79e2663..cdb207c21 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -37,7 +37,18 @@ def get_users_retention(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_retention(project_id=projectId, **{**data, **args})} + return {"data": insights.get_users_retention(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) +def get_feature_retention(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.get_feature_retention(project_id=projectId, **{**data, **args})} # # diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index c39fb5cea..1df47eda2 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -167,13 +167,13 @@ def __complete_retention(rows, start_date, end_date=None): @dev.timed -def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): +def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) - + pg_sub_query.append("user_id IS NOT NULL") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, @@ -181,8 +181,7 @@ def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTim ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL + WHERE {" AND ".join(pg_sub_query)} AND NOT EXISTS((SELECT 1 FROM sessions AS bsess WHERE bsess.start_ts= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_table = JOURNEY_TYPES["PAGES"]["table"] + event_column = JOURNEY_TYPES["PAGES"]["column"] + extra_values = {"value": "/"} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + # TODO: This will change later when the search is clear + default = False + extra_values["value"] = f["value"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + extra_values["value"] = row["value"] + + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_retention_percentage(helper.list_to_camel_case(rows)) + return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) From d4ffd10af67318ee5bf772c38f9058b09a57f408 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 22 Sep 2021 15:04:35 +0200 Subject: [PATCH 009/218] feat(api): changed insights feature retention query feat(db): optimized indexes for feature retention --- api/chalicelib/core/insights.py | 28 +++++++++---------- .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 7 +++++ .../db/init_dbs/postgresql/init_schema.sql | 11 +++++--- 3 files changed, 28 insertions(+), 18 deletions(-) create mode 100644 scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 1df47eda2..18390d987 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -249,7 +249,6 @@ def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70) LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() if row is not None: @@ -259,19 +258,20 @@ def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70) FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, COUNT(DISTINCT connexions_list.user_id) AS users_count, ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts 0; +CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 65811f7ba..586f84e74 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -172,7 +172,7 @@ CREATE TABLE projects "defaultInputMode": "plain" }'::jsonb -- ?????? ); -CREATE INDEX projects_tenant_id_idx ON projects(tenant_id); +CREATE INDEX projects_tenant_id_idx ON projects (tenant_id); CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS $$ @@ -248,7 +248,7 @@ create table webhooks index integer default 0 not null, name varchar(100) ); -CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id); +CREATE INDEX webhooks_tenant_id_idx ON webhooks (tenant_id); -- --- notifications.sql --- @@ -388,7 +388,7 @@ CREATE TABLE issues ); CREATE INDEX ON issues (issue_id, type); CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); -CREATE INDEX issues_project_id_idx ON issues(project_id); +CREATE INDEX issues_project_id_idx ON issues (project_id); -- --- errors.sql --- @@ -522,6 +522,8 @@ CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0; CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; +CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; +CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; ALTER TABLE public.sessions ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR @@ -679,6 +681,7 @@ CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE respon CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); +CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; CREATE TABLE events.clicks @@ -872,6 +875,6 @@ CREATE TABLE jobs ); CREATE INDEX ON jobs (status); CREATE INDEX ON jobs (start_at); -CREATE INDEX jobs_project_id_idx ON jobs(project_id); +CREATE INDEX jobs_project_id_idx ON jobs (project_id); COMMIT; From 1769ee6b328fa09e32d29e2da6fc198c568bfe90 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 22 Sep 2021 19:13:42 +0200 Subject: [PATCH 010/218] feat(api): changed insights feature popularity frequency --- api/chalicelib/blueprints/subs/bp_insights.py | 11 ++++ api/chalicelib/core/insights.py | 61 +++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index cdb207c21..64030dd3d 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -50,6 +50,17 @@ def get_feature_retention(projectId, context): return {"data": insights.get_feature_retention(project_id=projectId, **{**data, **args})} + +@app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) +def get_feature_popularity_frequency(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} + # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 18390d987..a94e75fdb 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -294,3 +294,64 @@ def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70) rows = cur.fetchall() rows = __compute_retention_percentage(helper.list_to_camel_case(rows)) return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + + +@dev.timed +def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return [] + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL + GROUP BY value + ORDER BY count DESC + LIMIT 7;""" + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + popularity = cur.fetchall() + pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value;""" + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + frequencies = cur.fetchall() + total_usage = sum([f["count"] for f in frequencies]) + frequencies = {f["value"]: f["count"] for f in frequencies} + for p in popularity: + p["popularity"] = p.pop("count") / all_user_count + p["frequency"] = frequencies[p["value"]] / total_usage + + return popularity From fbc2677fc6c1adf4651c85cf5cb952cacd622a4c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 22 Sep 2021 20:06:28 +0200 Subject: [PATCH 011/218] feat(api): changed insights users acquisition --- api/chalicelib/blueprints/subs/bp_insights.py | 17 +++- api/chalicelib/core/insights.py | 89 ++++++++++++++++--- 2 files changed, 92 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 64030dd3d..c4b38c7f6 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -29,6 +29,17 @@ def get_insights_journey(projectId, context): return {"data": insights.get_journey(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/users_acquisition', methods=['GET', 'POST']) +def get_users_acquisition(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.get_users_acquisition(project_id=projectId, **{**data, **args})} + + @app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) def get_users_retention(projectId, context): data = app.current_request.json_body @@ -40,15 +51,15 @@ def get_users_retention(projectId, context): return {"data": insights.get_users_retention(project_id=projectId, **{**data, **args})} -@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) -def get_feature_retention(projectId, context): +@app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) +def get_feature_acquisition(projectId, context): data = app.current_request.json_body if data is None: data = {} params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_feature_retention(project_id=projectId, **{**data, **args})} + return {"data": insights.get_feature_acquisition(project_id=projectId, **{**data, **args})} @app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index a94e75fdb..e13608762 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -105,7 +105,7 @@ def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimest return __transform_journey(rows) -def __compute_retention_percentage(rows): +def __compute_weekly_percentage(rows): if rows is None or len(rows) == 0: return rows t = -1 @@ -117,8 +117,30 @@ def __compute_retention_percentage(rows): def __complete_retention(rows, start_date, end_date=None): - if rows is None or len(rows) == 0: - return rows + if rows is None: + return [] + max_week = 10 + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if i < len(rows) \ + and i != rows[i]["week"]: + rows.insert(i, neutral) + elif i >= len(rows): + rows.append(neutral) + return rows + + +def __complete_acquisition(rows, start_date, end_date=None): + if rows is None: + return [] max_week = 10 week = 0 delta_date = 0 @@ -174,6 +196,51 @@ def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts < %(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <=sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id + ) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + + +@dev.timed +def get_users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, @@ -184,7 +251,7 @@ def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), WHERE {" AND ".join(pg_sub_query)} AND NOT EXISTS((SELECT 1 FROM sessions AS bsess - WHERE bsess.start_ts Date: Thu, 23 Sep 2021 18:53:36 +0200 Subject: [PATCH 012/218] feat(api): changed insights feature acquisition result feat(api): insights feature retention --- api/chalicelib/blueprints/subs/bp_insights.py | 10 ++ api/chalicelib/core/insights.py | 137 +++++++++++++++--- 2 files changed, 130 insertions(+), 17 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index c4b38c7f6..a7529908d 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -51,6 +51,16 @@ def get_users_retention(projectId, context): return {"data": insights.get_users_retention(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) +def get_feature_rentention(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.get_feature_retention(project_id=projectId, **{**data, **args})} + @app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) def get_feature_acquisition(projectId, context): data = app.current_request.json_body diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index e13608762..e4ed90e24 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -204,6 +204,7 @@ def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), FROM (SELECT DISTINCT user_id FROM sessions WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 AND NOT EXISTS((SELECT 1 FROM sessions AS bsess WHERE bsess.start_ts < %(startTimestamp)s @@ -225,11 +226,14 @@ def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) + # print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + return { + "startTimestamp": startTimestamp, + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } @dev.timed @@ -274,13 +278,16 @@ def get_users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + return { + "startTimestamp": startTimestamp, + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } @dev.timed -def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): +def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -288,20 +295,21 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 pg_sub_query.append("user_id IS NOT NULL") pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_table = JOURNEY_TYPES["PAGES"]["table"] - event_column = JOURNEY_TYPES["PAGES"]["column"] - extra_values = {"value": "/"} + event_type = "PAGES" + event_value = "/" + extra_values = {} default = True for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] + event_type = f["value"] + elif f["type"] == "EVENT_VALUE" and JOURNEY_TYPES.get(f["value"]): + event_value = f["value"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - # TODO: This will change later when the search is clear default = False - extra_values["value"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] pg_sub_query.append(f"feature.{event_column} = %(value)s") with pg_client.PostgresClient() as cur: @@ -319,7 +327,99 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() if row is not None: - extra_values["value"] = row["value"] + event_value = row["value"] + extra_values["value"] = event_value + + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + INNER JOIN events.pages AS feature USING (session_id) + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <= sessions.start_ts + AND sessions.project_id = 1 + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE" and JOURNEY_TYPES.get(f["value"]): + event_value = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + default = False + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, @@ -329,7 +429,6 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week FROM sessions INNER JOIN {event_table} AS feature USING (session_id) WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL AND NOT EXISTS((SELECT 1 FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) WHERE bsess.start_ts<%(startTimestamp)s @@ -345,7 +444,7 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 WHERE users_list.user_id = sessions.user_id AND first_connexion_week <= DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = 1 + AND sessions.project_id = %(project_id)s AND sessions.start_ts < (%(endTimestamp)s - 1) AND feature.timestamp >= %(startTimestamp)s AND feature.timestamp < %(endTimestamp)s @@ -360,7 +459,11 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } @dev.timed From 89ea81ae7743077241bea2eee18a799f22288042 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 23 Sep 2021 19:28:40 +0200 Subject: [PATCH 013/218] feat(api): insights active users --- api/chalicelib/blueprints/subs/bp_insights.py | 22 ++++-- api/chalicelib/core/insights.py | 71 ++++++++++++++----- api/chalicelib/utils/TimeUTC.py | 7 ++ 3 files changed, 78 insertions(+), 22 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index a7529908d..463b52d3d 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -26,7 +26,7 @@ def get_insights_journey(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_journey(project_id=projectId, **{**data, **args})} + return {"data": insights.journey(project_id=projectId, **{**data, **args})} @app.route('/{projectId}/insights/users_acquisition', methods=['GET', 'POST']) @@ -37,7 +37,7 @@ def get_users_acquisition(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_users_acquisition(project_id=projectId, **{**data, **args})} + return {"data": insights.users_acquisition(project_id=projectId, **{**data, **args})} @app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) @@ -48,7 +48,7 @@ def get_users_retention(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_users_retention(project_id=projectId, **{**data, **args})} + return {"data": insights.users_retention(project_id=projectId, **{**data, **args})} @app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) @@ -59,7 +59,8 @@ def get_feature_rentention(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_feature_retention(project_id=projectId, **{**data, **args})} + return {"data": insights.feature_retention(project_id=projectId, **{**data, **args})} + @app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) def get_feature_acquisition(projectId, context): @@ -69,7 +70,7 @@ def get_feature_acquisition(projectId, context): params = app.current_request.query_params args = dashboard.dashboard_args(params) - return {"data": insights.get_feature_acquisition(project_id=projectId, **{**data, **args})} + return {"data": insights.feature_acquisition(project_id=projectId, **{**data, **args})} @app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) @@ -82,6 +83,17 @@ def get_feature_popularity_frequency(projectId, context): return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} + +@app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) +def get_users_active(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_active(project_id=projectId, **{**data, **args})} + # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index e4ed90e24..03f96ea25 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -1,5 +1,4 @@ from chalicelib.core import sessions_metas -from chalicelib.utils import args_transformer from chalicelib.utils import helper, dev from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -32,7 +31,7 @@ JOURNEY_TYPES = { @dev.timed -def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): +def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) event_start = None @@ -189,8 +188,8 @@ def __complete_acquisition(rows, start_date, end_date=None): @dev.timed -def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): +def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -237,9 +236,9 @@ def get_users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), @dev.timed -def get_users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): +def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -285,9 +284,9 @@ def get_users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70) @dev.timed -def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): +def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -302,12 +301,12 @@ def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70) for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] - elif f["type"] == "EVENT_VALUE" and JOURNEY_TYPES.get(f["value"]): + elif f["type"] == "EVENT_VALUE": event_value = f["value"] + default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - default = False event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] pg_sub_query.append(f"feature.{event_column} = %(value)s") @@ -375,9 +374,9 @@ def get_feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70) @dev.timed -def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): +def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -392,12 +391,12 @@ def get_feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-7 for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] - elif f["type"] == "EVENT_VALUE" and JOURNEY_TYPES.get(f["value"]): + elif f["type"] == "EVENT_VALUE": event_value = f["value"] + default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - default = False event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] @@ -525,3 +524,41 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da p["frequency"] = frequencies[p["value"]] / total_usage return popularity + + +@dev.timed +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + period = "DAY" + for f in filters: + if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: + period = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(users), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp) AS chart;""" + params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, + "project_id": project_id, + "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( + startTimestamp), + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return row_users diff --git a/api/chalicelib/utils/TimeUTC.py b/api/chalicelib/utils/TimeUTC.py index 9cd353eb4..c95359a00 100644 --- a/api/chalicelib/utils/TimeUTC.py +++ b/api/chalicelib/utils/TimeUTC.py @@ -115,6 +115,13 @@ class TimeUTC: def get_utc_offset(): return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000) + @staticmethod + def trunc_day(timestamp): + dt = TimeUTC.from_ms_timestamp(timestamp) + return TimeUTC.datetime_to_timestamp(dt + .replace(hour=0, minute=0, second=0, microsecond=0) + .astimezone(pytz.utc)) + @staticmethod def trunc_week(timestamp): dt = TimeUTC.from_ms_timestamp(timestamp) From ff44fa2a8fc2ab1400246f038a71f70c406a5f92 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 23 Sep 2021 20:13:10 +0200 Subject: [PATCH 014/218] feat(api): insights power users --- api/chalicelib/blueprints/subs/bp_insights.py | 11 ++++++++ api/chalicelib/core/insights.py | 26 +++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 463b52d3d..acf4184b2 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -94,6 +94,17 @@ def get_users_active(projectId, context): return {"data": insights.users_active(project_id=projectId, **{**data, **args})} + +@app.route('/{projectId}/insights/users_power', methods=['GET', 'POST']) +def get_users_power(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_power(project_id=projectId, **{**data, **args})} + # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 03f96ea25..a591cbd62 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -533,6 +533,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args) + pg_sub_query_chart.append("user_id IS NOT NULL") period = "DAY" for f in filters: if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: @@ -562,3 +563,28 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime row_users = cur.fetchone() return row_users + + +@dev.timed +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[],**args): + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) + pg_sub_query.append("user_id IS NOT NULL") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition + FROM (SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS day_users_partition;""" + params = {"project_id": project_id, + "startTimestamp": startTimestamp,"endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return helper.dict_to_camel_case(row_users) From f23b273a7073dd618da32080ce24ce214e988ad4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Sep 2021 18:51:14 +0200 Subject: [PATCH 015/218] feat(api): insights users slipping --- api/chalicelib/blueprints/subs/bp_insights.py | 10 +++ api/chalicelib/core/insights.py | 68 ++++++++++++++++++- 2 files changed, 76 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index acf4184b2..f0dc226bc 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -105,6 +105,16 @@ def get_users_power(projectId, context): return {"data": insights.users_power(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/users_slipping', methods=['GET', 'POST']) +def get_users_slipping(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_slipping(project_id=projectId, **{**data, **args})} + # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index a591cbd62..a94a7d429 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -567,7 +567,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime @dev.timed def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[],**args): + filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) pg_sub_query.append("user_id IS NOT NULL") @@ -581,10 +581,74 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes GROUP BY number_of_days ORDER BY number_of_days) AS day_users_partition;""" params = {"project_id": project_id, - "startTimestamp": startTimestamp,"endTimestamp": endTimestamp, **__get_constraint_values(args)} + "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} # print(cur.mogrify(pg_query, params)) # print("---------------------") cur.execute(cur.mogrify(pg_query, params)) row_users = cur.fetchone() return helper.dict_to_camel_case(row_users) + + +@dev.timed +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + + pg_query = f"""SELECT user_id, last_time,interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen + FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY user_id) AS user_last_usage + INNER JOIN sessions USING (user_id) + WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 + GROUP BY user_id, last_time,interactions_count;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": helper.list_to_camel_case(rows) + } From 8e4c6187d5c63df3d6be897365c73c819f7c53c2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Sep 2021 19:18:53 +0200 Subject: [PATCH 016/218] feat(api): insights users slipping added interactions count --- api/chalicelib/core/insights.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index a94a7d429..421072a26 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -633,7 +633,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi event_value = row["value"] extra_values["value"] = event_value - pg_query = f"""SELECT user_id, last_time,interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen + pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count FROM {event_table} AS feature INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} From 0babeb6b60639dc6ac952f416c7e702041033318 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Sep 2021 19:39:58 +0200 Subject: [PATCH 017/218] feat(api): insights feature intensity --- api/chalicelib/blueprints/subs/bp_insights.py | 12 +++++++ api/chalicelib/core/insights.py | 35 +++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index f0dc226bc..fc40885b7 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -84,6 +84,17 @@ def get_feature_popularity_frequency(projectId, context): return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/feature_intensity', methods=['GET', 'POST']) +def get_feature_intensity(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_intensity(project_id=projectId, **{**data, **args})} + + @app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) def get_users_active(projectId, context): data = app.current_request.json_body @@ -105,6 +116,7 @@ def get_users_power(projectId, context): return {"data": insights.users_power(project_id=projectId, **{**data, **args})} + @app.route('/{projectId}/insights/users_slipping', methods=['GET', 'POST']) def get_users_slipping(projectId, context): data = app.current_request.json_body diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 421072a26..7c06e4c8c 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -526,6 +526,41 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da return popularity +@dev.timed +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + pg_sub_query.append(f"length({event_column})>2") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value + ORDER BY avg DESC + LIMIT 7;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return rows + + @dev.timed def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], From 7e6592de3f5e0bc485727bda5eff2b6c7456db63 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Sep 2021 20:06:06 +0200 Subject: [PATCH 018/218] feat(api): insights feature adoption --- api/chalicelib/blueprints/subs/bp_insights.py | 11 +++ api/chalicelib/core/insights.py | 70 +++++++++++++++++++ 2 files changed, 81 insertions(+) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index fc40885b7..30894028b 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -95,6 +95,17 @@ def get_feature_intensity(projectId, context): return {"data": insights.feature_intensity(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/feature_adoption', methods=['GET', 'POST']) +def get_feature_adoption(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} + + @app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) def get_users_active(projectId, context): data = app.current_request.json_body diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 7c06e4c8c..0df8e63b0 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -526,6 +526,76 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da return popularity +@dev.timed +def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + adoption = cur.fetchone()["count"] / all_user_count + return {"target": all_user_count, "adoption": adoption, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + @dev.timed def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], From b64b052fe9c1fc39a0746496b5405fb83a5700b6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Sep 2021 20:26:05 +0200 Subject: [PATCH 019/218] feat(api): insights feature adoption top user --- api/chalicelib/blueprints/subs/bp_insights.py | 10 +++ api/chalicelib/core/insights.py | 61 ++++++++++++++++++- 2 files changed, 69 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 30894028b..1e016c238 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -105,6 +105,16 @@ def get_feature_adoption(projectId, context): return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/feature_adoption_top_users', methods=['GET', 'POST']) +def get_feature_adoption(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption_top_users(project_id=projectId, **{**data, **args})} + @app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) def get_users_active(projectId, context): diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 0df8e63b0..e639ba55a 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -530,8 +530,6 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) event_type = "CLICK" @@ -596,6 +594,65 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +@dev.timed +def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1 + ORDER BY 2 DESC + LIMIT 10;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + @dev.timed def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], From a2f3a4c779eab90ed974addf49c42a8528fe23f6 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Sun, 26 Sep 2021 17:19:25 +0200 Subject: [PATCH 020/218] fix(frontend): remove slowing console --- frontend/app/player/MessageDistributor/managers/DOMManager.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index cd5e77b02..e42041f91 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -253,7 +253,6 @@ export default class DOMManager extends ListWalker { logger.warn("create_i_frame_document message. Node is not iframe") return; } - console.log("iframe", msg) // await new Promise(resolve => { node.onload = resolve }) const doc = node.contentDocument; From 6c6289794202beb5404749c53f24182a56869a81 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 27 Sep 2021 16:32:03 +0200 Subject: [PATCH 021/218] feat(api): optimized weekly report feat(db): changed indexes to optimize weekly report --- api/chalicelib/core/weekly_report.py | 26 ++++++++++++------- .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 11 ++++++++ .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 3 +++ 3 files changed, 30 insertions(+), 10 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index 23ff97446..e0e6e0fa5 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -1,4 +1,5 @@ from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.helper import environ from chalicelib.utils.helper import get_issue_title @@ -30,7 +31,11 @@ def edit_config(user_id, weekly_report): def cron(): with pg_client.PostgresClient() as cur: - cur.execute("""\ + params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), + "1_week_ago": TimeUTC.midnight(delta_days=-7), + "2_week_ago": TimeUTC.midnight(delta_days=-14), + "5_week_ago": TimeUTC.midnight(delta_days=-35)} + cur.execute(cur.mogrify("""\ SELECT project_id, name AS project_name, users.emails AS emails, @@ -44,7 +49,7 @@ def cron(): SELECT sessions.project_id FROM public.sessions WHERE sessions.project_id = projects.project_id - AND start_ts >= (EXTRACT(EPOCH FROM now() - INTERVAL '3 days') * 1000)::BIGINT + AND start_ts >= %(3_days_ago)s LIMIT 1) AS recently_active USING (project_id) INNER JOIN LATERAL ( SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails @@ -54,14 +59,14 @@ def cron(): AND users.weekly_report ) AS users ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT ) AS week_0_issues ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id @@ -69,16 +74,17 @@ def cron(): AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT ) AS week_1_issues ON (TRUE) LEFT JOIN LATERAL ( - SELECT COUNT(issues.*) AS count + SELECT COUNT(1) AS count FROM events_common.issues INNER JOIN public.sessions USING (session_id) WHERE sessions.project_id = projects.project_id AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT ) AS month_1_issues ON (TRUE) - WHERE projects.deleted_at ISNULL;""") + WHERE projects.deleted_at ISNULL;"""), params) projects_data = cur.fetchall() for p in projects_data: + params["project_id"] = p["project_id"] print(f"checking {p['project_name']} : {p['project_id']}") if len(p["emails"]) == 0 \ or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0: @@ -104,7 +110,7 @@ def cron(): DATE_TRUNC('day', now()) - INTERVAL '1 day', '1 day'::INTERVAL ) AS timestamp_i - ORDER BY timestamp_i;""", {"project_id": p["project_id"]})) + ORDER BY timestamp_i;""", params)) days_partition = cur.fetchall() max_days_partition = max(x['issues_count'] for x in days_partition) for d in days_partition: @@ -120,7 +126,7 @@ def cron(): AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT GROUP BY type ORDER BY count DESC, type - LIMIT 4;""", {"project_id": p["project_id"]})) + LIMIT 4;""", params)) issues_by_type = cur.fetchall() max_issues_by_type = sum(i["count"] for i in issues_by_type) for i in issues_by_type: @@ -149,7 +155,7 @@ def cron(): '1 day'::INTERVAL ) AS timestamp_i GROUP BY timestamp_i - ORDER BY timestamp_i;""", {"project_id": p["project_id"]})) + ORDER BY timestamp_i;""", params)) issues_breakdown_by_day = cur.fetchall() for i in issues_breakdown_by_day: i["sum"] = sum(x["count"] for x in i["partition"]) @@ -195,7 +201,7 @@ def cron(): WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT GROUP BY type - ORDER BY issue_count DESC;""", {"project_id": p["project_id"]})) + ORDER BY issue_count DESC;""", params)) issues_breakdown_list = cur.fetchall() if len(issues_breakdown_list) > 4: others = {"type": "Others", diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql new file mode 100644 index 000000000..2139861e3 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -0,0 +1,11 @@ +BEGIN; + +CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; +CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; +CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + +CREATE INDEX users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; +CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues(issue_id,timestamp); +CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp); +CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index cfa7b19a2..48b67870e 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -4,4 +4,7 @@ CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id I CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; +CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues(issue_id,timestamp); +CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp); +CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); COMMIT; \ No newline at end of file From 2838ea5e8702901d2e2a24adadaf53df74ba9939 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Sep 2021 15:31:51 +0200 Subject: [PATCH 022/218] feat(api): reset password reuse the same token for 5min to bypass double request issue --- api/chalicelib/blueprints/bp_core_dynamic.py | 6 +++++- api/chalicelib/core/users.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py index af674f762..b695ab54a 100644 --- a/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/api/chalicelib/blueprints/bp_core_dynamic.py @@ -360,7 +360,11 @@ def process_invitation_link(): return {"errors": ["invitation not found"]} if user["expiredInvitation"]: return {"errors": ["expired invitation, please ask your admin to send a new one"]} - pass_token = users.allow_password_change(user_id=user["userId"]) + if user["expiredChange"] is not None and not user["expiredChange"] \ + and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: + pass_token = user["changePwdToken"] + else: + pass_token = users.allow_password_change(user_id=user["userId"]) return Response( status_code=307, body='', diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 917328910..c001ea5e2 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -526,7 +526,8 @@ def get_by_invitation_token(token, pass_token=None): *, DATE_PART('day',timezone('utc'::text, now()) \ - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, - change_pwd_expire_at <= timezone('utc'::text, now()) AS expired_change + change_pwd_expire_at <= timezone('utc'::text, now()) AS expired_change, + (EXTRACT(EPOCH FROM current_timestamp-basic_authentication.change_pwd_expire_at))::BIGINT AS change_pwd_age FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE invitation_token = %(token)s {"AND change_pwd_token = %(pass_token)s" if pass_token else ""} LIMIT 1;""", From d234c38cc9eed72d4f8a5de0688be8173cbb90f6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Sep 2021 15:34:20 +0200 Subject: [PATCH 023/218] feat(api): EE reset password reuse the same token for 5min to bypass double request issue --- ee/api/chalicelib/blueprints/bp_core_dynamic.py | 6 +++++- ee/api/chalicelib/core/users.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 9abd4cab8..de2ae3bfb 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -364,7 +364,11 @@ def process_invitation_link(): return {"errors": ["invitation not found"]} if user["expiredInvitation"]: return {"errors": ["expired invitation, please ask your admin to send a new one"]} - pass_token = users.allow_password_change(user_id=user["userId"]) + if user["expiredChange"] is not None and not user["expiredChange"] \ + and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: + pass_token = user["changePwdToken"] + else: + pass_token = users.allow_password_change(user_id=user["userId"]) return Response( status_code=307, body='', diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 6c3434255..ec96b68ac 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -541,7 +541,8 @@ def get_by_invitation_token(token, pass_token=None): *, DATE_PART('day',timezone('utc'::text, now()) \ - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, - change_pwd_expire_at <= timezone('utc'::text, now()) AS expired_change + change_pwd_expire_at <= timezone('utc'::text, now()) AS expired_change, + (EXTRACT(EPOCH FROM current_timestamp-basic_authentication.change_pwd_expire_at))::BIGINT AS change_pwd_age FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE invitation_token = %(token)s {"AND change_pwd_token = %(pass_token)s" if pass_token else ""} LIMIT 1;""", From b61f2ed27cd3507e743a2179302935418874a695 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 29 Sep 2021 17:06:15 +0200 Subject: [PATCH 024/218] feat(api): insights auto complete --- api/chalicelib/blueprints/subs/bp_insights.py | 59 ++++----- api/chalicelib/core/insights.py | 115 +++++++++++++++++- 2 files changed, 131 insertions(+), 43 deletions(-) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py index 1e016c238..6546bfd12 100644 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ b/api/chalicelib/blueprints/subs/bp_insights.py @@ -9,15 +9,6 @@ app = Blueprint(__name__) _overrides.chalice_app(app) -# -# @app.route('/{projectId}/dashboard/metadata', methods=['GET']) -# def get_metadata_map(projectId, context): -# metamap = [] -# for m in metadata.get(project_id=projectId): -# metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) -# return {"data": metamap} -# -# @app.route('/{projectId}/insights/journey', methods=['GET', 'POST']) def get_insights_journey(projectId, context): data = app.current_request.json_body @@ -105,6 +96,7 @@ def get_feature_adoption(projectId, context): return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} + @app.route('/{projectId}/insights/feature_adoption_top_users', methods=['GET', 'POST']) def get_feature_adoption(projectId, context): data = app.current_request.json_body @@ -116,6 +108,17 @@ def get_feature_adoption(projectId, context): return {"data": insights.feature_adoption_top_users(project_id=projectId, **{**data, **args})} +@app.route('/{projectId}/insights/feature_adoption_daily_usage', methods=['GET', 'POST']) +def get_feature_adoption_daily_usage(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption_daily_usage(project_id=projectId, **{**data, **args})} + + @app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) def get_users_active(projectId, context): data = app.current_request.json_body @@ -148,31 +151,13 @@ def get_users_slipping(projectId, context): return {"data": insights.users_slipping(project_id=projectId, **{**data, **args})} -# -# -# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) -# def get_dashboard_autocomplete(projectId, widget, context): -# params = app.current_request.query_params -# if params is None or params.get('q') is None or len(params.get('q')) == 0: -# return {"data": []} -# params['q'] = '^' + params['q'] -# -# if widget in ['performance']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=True) -# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', -# 'impacted_sessions_by_slow_pages', 'pages_response_time']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), pages_only=True) -# elif widget in ['resources_loading_time']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=False) -# elif widget in ['time_between_events', 'events']: -# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, -# platform=params.get('platform', None), performance=False, events_only=True) -# elif widget in ['metadata']: -# data = dashboard.search(params.get('q', ''), None, project_id=projectId, -# platform=params.get('platform', None), metadata=True, key=params.get("key")) -# else: -# return {"errors": [f"unsupported widget: {widget}"]} -# return {'data': data} + +@app.route('/{projectId}/insights/search', methods=['GET']) +def get_insights_autocomplete(projectId, context): + params = app.current_request.query_params + if params is None or params.get('q') is None or len(params.get('q')) == 0: + return {"data": []} + # params['q'] = '^' + params['q'] + + return {'data': insights.search(params.get('q', ''), project_id=projectId, + platform=params.get('platform', None), feature_type=params.get("key"))} diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index e639ba55a..a427ce51f 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -11,8 +11,8 @@ def __transform_journey(rows): nodes = [] links = [] for r in rows: - source = r["source_event"][r["source_event"].index("_"):] - target = r["target_event"][r["target_event"].index("_"):] + source = r["source_event"][r["source_event"].index("_") + 1:] + target = r["target_event"][r["target_event"].index("_") + 1:] if source not in nodes: nodes.append(source) if target not in nodes: @@ -25,7 +25,7 @@ JOURNEY_DEPTH = 5 JOURNEY_TYPES = { "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, - "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, + # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} } @@ -97,7 +97,7 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) + print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() @@ -596,8 +596,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end @dev.timed def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) pg_sub_query.append("user_id IS NOT NULL") @@ -653,6 +652,69 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +@dev.timed +def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query_chart.append(f"length({event_column})>2") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(session_id), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT session_id + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + @dev.timed def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], @@ -814,3 +876,44 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], "chart": helper.list_to_camel_case(rows) } + + +@dev.timed +def search(text, feature_type, project_id, platform=None): + if not feature_type: + resource_type = "ALL" + data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) + return data + + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True, + data={} if platform is None else {"platform": platform}) + + params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, + "endTimestamp": TimeUTC.now(), + "project_id": project_id, + "value": helper.string_to_sql_like(text.lower()), + "platform_0": platform} + if feature_type == "ALL": + with pg_client.PostgresClient() as cur: + sub_queries = [] + for e in JOURNEY_TYPES: + sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" + FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s + LIMIT 10)""") + pg_query = "UNION ALL".join(sub_queries) + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + elif JOURNEY_TYPES.get(feature_type) is not None: + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s + LIMIT 10;""" + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + else: + return [] + return [helper.dict_to_camel_case(row) for row in rows] From 7e6229d3777d7d84d31a4f0446787e1b176dcdfe Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 30 Sep 2021 15:12:20 +0200 Subject: [PATCH 025/218] feat(api): insights fixes and optimizations feat(db): insights indexes --- api/chalicelib/core/insights.py | 73 ++++++++++++------- .../db/init_dbs/postgresql/init_schema.sql | 3 +- .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 3 +- 4 files changed, 50 insertions(+), 30 deletions(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index a427ce51f..df9b5c5b7 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -97,7 +97,7 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) + # print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() @@ -225,7 +225,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(cur.mogrify(pg_query, params)) + print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) @@ -273,7 +273,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(cur.mogrify(pg_query, params)) + print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) @@ -328,7 +328,8 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en if row is not None: event_value = row["value"] extra_values["value"] = event_value - + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, COUNT(DISTINCT connexions_list.user_id) AS users_count, ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users @@ -347,8 +348,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en GROUP BY user_id) AS users_list LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, user_id - FROM sessions - INNER JOIN events.pages AS feature USING (session_id) + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) WHERE users_list.user_id = sessions.user_id AND %(startTimestamp)s <= sessions.start_ts AND sessions.project_id = 1 @@ -362,7 +362,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) + print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) @@ -419,7 +419,8 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), if row is not None: event_value = row["value"] extra_values["value"] = event_value - + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, COUNT(DISTINCT connexions_list.user_id) AS users_count, @@ -454,7 +455,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) + print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) @@ -475,12 +476,14 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da time_constraint=True) event_table = JOURNEY_TYPES["CLICK"]["table"] event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] with pg_client.PostgresClient() as cur: pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count @@ -488,7 +491,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da WHERE {" AND ".join(pg_sub_query)} AND user_id IS NOT NULL;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} # print(cur.mogrify(pg_query, params)) # print("---------------------") cur.execute(cur.mogrify(pg_query, params)) @@ -505,16 +508,18 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da GROUP BY value ORDER BY count DESC LIMIT 7;""" - # print(cur.mogrify(pg_query, params)) - # print("---------------------") + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") cur.execute(cur.mogrify(pg_query, params)) popularity = cur.fetchall() pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count FROM {event_table} AS feature INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY value;""" - # print(cur.mogrify(pg_query, params)) - # print("---------------------") + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") cur.execute(cur.mogrify(pg_query, params)) frequencies = cur.fetchall() total_usage = sum([f["count"] for f in frequencies]) @@ -544,6 +549,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] with pg_client.PostgresClient() as cur: @@ -552,7 +558,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end WHERE {" AND ".join(pg_sub_query)} AND user_id IS NOT NULL;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} # print(cur.mogrify(pg_query, params)) # print("---------------------") cur.execute(cur.mogrify(pg_query, params)) @@ -562,7 +568,6 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end {"type": "EVENT_VALUE", "value": event_value}], } pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") if default: # get most used value pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count @@ -579,6 +584,8 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end if row is not None: event_value = row["value"] extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_sub_query.append(f"feature.{event_column} = %(value)s") pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count FROM {event_table} AS feature INNER JOIN sessions USING (session_id) @@ -612,12 +619,12 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] with pg_client.PostgresClient() as cur: pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") if default: # get most used value pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count @@ -634,6 +641,8 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days if row is not None: event_value = row["value"] extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_sub_query.append(f"feature.{event_column} = %(value)s") pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count FROM {event_table} AS feature @@ -671,20 +680,20 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] with pg_client.PostgresClient() as cur: pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query_chart.append(f"length({event_column})>2") pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") if default: # get most used value pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} + AND length({event_column})>2 GROUP BY value ORDER BY count DESC LIMIT 1;""" @@ -695,6 +704,8 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da if row is not None: event_value = row["value"] extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") pg_query = f"""SELECT generated_timestamp AS timestamp, COALESCE(COUNT(session_id), 0) AS count @@ -707,8 +718,8 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da ORDER BY generated_timestamp;""" params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") + print(cur.mogrify(pg_query, params)) + print("---------------------") cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() return {"users": helper.list_to_camel_case(rows), @@ -725,12 +736,14 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en pg_sub_query.append("feature.timestamp < %(endTimestamp)s") event_table = JOURNEY_TYPES["CLICK"]["table"] event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] pg_sub_query.append(f"length({event_column})>2") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg @@ -740,10 +753,10 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en ORDER BY avg DESC LIMIT 7;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - - # print(cur.mogrify(pg_query, params)) - # print("---------------------") + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # TODO: solve full scan issue + print(cur.mogrify(pg_query, params)) + print("---------------------") cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() @@ -759,11 +772,13 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime pg_sub_query_chart.append("user_id IS NOT NULL") period = "DAY" + extra_values = {} for f in filters: if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: period = f["value"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] with pg_client.PostgresClient() as cur: pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart @@ -780,7 +795,8 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime "project_id": project_id, "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( startTimestamp), - "endTimestamp": endTimestamp, **__get_constraint_values(args)} + "endTimestamp": endTimestamp, **__get_constraint_values(args), + **extra_values} # print(cur.mogrify(pg_query, params)) # print("---------------------") cur.execute(cur.mogrify(pg_query, params)) @@ -856,7 +872,8 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi if row is not None: event_value = row["value"] extra_values["value"] = event_value - + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count FROM {event_table} AS feature INNER JOIN sessions USING (session_id) @@ -874,7 +891,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi return { "startTimestamp": startTimestamp, "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": helper.list_to_camel_case(rows) + "list": helper.list_to_camel_case(rows) } diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 03801fb3d..e4a711117 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -682,7 +682,7 @@ CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHE CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); - +CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); CREATE TABLE events.clicks ( @@ -702,6 +702,7 @@ CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, sess CREATE INDEX clicks_url_idx ON events.clicks (url); CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); +CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); CREATE TABLE events.inputs diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index cfa7b19a2..bcbece5c9 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -4,4 +4,5 @@ CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id I CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; +CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 586f84e74..6c3dd1f23 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -682,7 +682,7 @@ CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; - +CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); CREATE TABLE events.clicks ( @@ -702,6 +702,7 @@ CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, sess CREATE INDEX clicks_url_idx ON events.clicks (url); CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); +CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); CREATE TABLE events.inputs From dfe60e5bf3a23d12429c69684f564b12985d9af5 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 30 Sep 2021 19:44:27 +0200 Subject: [PATCH 026/218] fix(backend-http): resolve url if not cachable --- backend/pkg/url/assets/url.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go index cc12b583b..09304db5c 100644 --- a/backend/pkg/url/assets/url.go +++ b/backend/pkg/url/assets/url.go @@ -48,11 +48,11 @@ func isCachable(rawurl string) bool { func GetFullCachableURL(baseURL string, relativeURL string) (string, bool) { if !isRelativeCachable(relativeURL) { - return "", false + return relativeURL, false } fullURL := ResolveURL(baseURL, relativeURL) if !isCachable(fullURL) { - return "", false + return fullURL, false } return fullURL, true } @@ -77,7 +77,7 @@ func GetCachePathForAssets(sessionID uint64, rawurl string) string { func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string { fullURL, cachable := GetFullCachableURL(baseURL, relativeURL) if !cachable { - return relativeURL + return fullURL } u := url.URL{ From 0fcf9e425ab55e6c560d3e76f8092ed58c1778e3 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 30 Sep 2021 21:43:17 +0200 Subject: [PATCH 027/218] feat(tracker): 3.4.1: uint encoding safety check, log fixes, debug logs --- tracker/tracker/package-lock.json | 2 +- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/index.ts | 45 +++++++++++++-------- tracker/tracker/src/main/index.ts | 18 +++++---- tracker/tracker/src/main/modules/console.ts | 2 +- tracker/tracker/src/main/utils.ts | 4 +- tracker/tracker/src/messages/writer.ts | 3 ++ tracker/tracker/src/webworker/index.ts | 3 +- 8 files changed, 50 insertions(+), 29 deletions(-) diff --git a/tracker/tracker/package-lock.json b/tracker/tracker/package-lock.json index 6eba67e3c..8d1c160b5 100644 --- a/tracker/tracker/package-lock.json +++ b/tracker/tracker/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker", - "version": "3.3.0", + "version": "3.4.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index d88d366da..cb70c0082 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.0", + "version": "3.4.1", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 5efea1cf9..b52359dec 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -1,5 +1,5 @@ import { timestamp, log, warn } from '../utils'; -import { Timestamp, TechnicalInfo, PageClose } from '../../messages'; +import { Timestamp, PageClose } from '../../messages'; import Message from '../../messages/message'; import Nodes from './nodes'; import Observer from './observer'; @@ -24,10 +24,11 @@ export type Options = { session_pageno_key: string; local_uuid_key: string; ingestPoint: string; - resourceBaseHref: string, // resourceHref? + resourceBaseHref: string | null, // resourceHref? //resourceURLRewriter: (url: string) => string | boolean, __is_snippet: boolean; __debug_report_edp: string | null; + __debug_log: boolean; onStart?: (info: OnStartInfo) => void; } & ObserverOptions & WebworkerOptions; @@ -67,9 +68,10 @@ export default class App { session_pageno_key: '__openreplay_pageno', local_uuid_key: '__openreplay_uuid', ingestPoint: DEFAULT_INGEST_POINT, - resourceBaseHref: '', + resourceBaseHref: null, __is_snippet: false, __debug_report_edp: null, + __debug_log: false, obscureTextEmails: true, obscureTextNumbers: false, captureIFrames: false, @@ -90,10 +92,9 @@ export default class App { new Blob([`WEBWORKER_BODY`], { type: 'text/javascript' }), ), ); - // this.worker.onerror = e => { - // this.send(new TechnicalInfo("webworker_error", JSON.stringify(e))); - // /* TODO: send report */ - // } + this.worker.onerror = e => { + this._debug("webworker_error", e) + } let lastTs = timestamp(); let fileno = 0; this.worker.onmessage = ({ data }: MessageEvent) => { @@ -114,11 +115,11 @@ export default class App { this.attachEventListener(document, 'mouseleave', alertWorker, false, false); this.attachEventListener(document, 'visibilitychange', alertWorker, false); } catch (e) { - this.sendDebugReport("worker_start", e); + this._debug("worker_start", e); } } - private sendDebugReport(context: string, e: any) { + private _debug(context: string, e: any) { if(this.options.__debug_report_edp !== null) { fetch(this.options.__debug_report_edp, { method: 'POST', @@ -129,6 +130,9 @@ export default class App { }) }); } + if(this.options.__debug_log) { + warn("OpenReplay errror: ", context, e) + } } send(message: Message, urgent = false): void { @@ -160,12 +164,11 @@ export default class App { try { fn.apply(this, args); } catch (e) { - app.send(new TechnicalInfo("error", JSON.stringify({ - time: timestamp(), - name: e.name, - message: e.message, - stack: e.stack - }))); + this._debug("safe_fn_call", e) + // time: timestamp(), + // name: e.name, + // message: e.message, + // stack: e.stack } } as any // TODO: correct typing } @@ -210,8 +213,10 @@ export default class App { return this.projectKey } getBaseHref(): string { - if (this.options.resourceBaseHref) { + if (typeof this.options.resourceBaseHref === 'string') { return this.options.resourceBaseHref + } else if (typeof this.options.resourceBaseHref === 'object') { + //switch between types } if (document.baseURI) { return document.baseURI @@ -221,6 +226,12 @@ export default class App { ?.getElementsByTagName("base")[0] ?.getAttribute("href") || location.origin + location.pathname } + resolveResourceURL(resourceURL: string): string { + const base = new URL(this.getBaseHref()) + base.pathname += "/" + new URL(resourceURL).pathname + base.pathname.replace(/\/+/g, "/") + return base.toString() + } isServiceURL(url: string): boolean { return url.startsWith(this.options.ingestPoint) @@ -310,7 +321,7 @@ export default class App { .catch(e => { this.stop(); warn("OpenReplay was unable to start. ", e) - this.sendDebugReport("session_start", e); + this._debug("session_start", e); throw e; }) } diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index 79e2cbee8..ca0dd9208 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -17,7 +17,7 @@ import Scroll from './modules/scroll'; import Viewport from './modules/viewport'; import Longtasks from './modules/longtasks'; import CSSRules from './modules/cssrules'; -import { IN_BROWSER, deprecationWarn } from './utils'; +import { IN_BROWSER, deprecationWarn, DOCS_HOST } from './utils'; import { Options as AppOptions } from './app'; import { Options as ConsoleOptions } from './modules/console'; @@ -41,13 +41,13 @@ const DOCS_SETUP = '/installation/setup-or'; function processOptions(obj: any): obj is Options { if (obj == null) { - console.error(`OpenReplay: invalid options argument type. Please, check documentation on https://docs.openreplay.com${ DOCS_SETUP }`); + console.error(`OpenReplay: invalid options argument type. Please, check documentation on ${DOCS_HOST}${DOCS_SETUP}`); return false; } if (typeof obj.projectKey !== 'string') { if (typeof obj.projectKey !== 'number') { if (typeof obj.projectID !== 'number') { // Back compatability - console.error(`OpenReplay: projectKey is missing or wrong type (string is expected). Please, check https://docs.openreplay.com${ DOCS_SETUP } for more information.`) + console.error(`OpenReplay: projectKey is missing or wrong type (string is expected). Please, check ${DOCS_HOST}${DOCS_SETUP} for more information.`) return false } else { obj.projectKey = obj.projectID.toString(); @@ -59,7 +59,7 @@ function processOptions(obj: any): obj is Options { } } if (typeof obj.sessionToken !== 'string' && obj.sessionToken != null) { - console.warn(`OpenReplay: invalid options argument type. Please, check documentation on https://docs.openreplay.com${ DOCS_SETUP }`) + console.warn(`OpenReplay: invalid options argument type. Please, check documentation on ${DOCS_HOST}${DOCS_SETUP}`) } return true; } @@ -70,6 +70,10 @@ export default class API { if (!IN_BROWSER || !processOptions(options)) { return; } + if ((window as any).__OPENREPLAY__) { + console.error("OpenReplay: one tracker instance has been initialised already") + return + } if (!options.__DISABLE_SECURE_MODE && location.protocol !== 'https:') { console.error("OpenReplay: Your website must be publicly accessible and running on SSL in order for OpenReplay to properly capture and replay the user session. You can disable this check by setting `__DISABLE_SECURE_MODE` option to `true` if you are testing in localhost. Keep in mind, that asset files on a local machine are not available to the outside world. This might affect tracking if you use css files.") return; @@ -99,9 +103,9 @@ export default class API { Performance(this.app, options); Scroll(this.app); Longtasks(this.app); - (window as any).__OPENREPLAY__ = (window as any).__OPENREPLAY__ || this; + (window as any).__OPENREPLAY__ = this; } else { - console.log("OpenReplay: browser doesn't support API required for tracking.") + console.log("OpenReplay: browser doesn't support API required for tracking or doNotTrack is set to 1.") const req = new XMLHttpRequest(); const orig = options.ingestPoint || DEFAULT_INGEST_POINT; req.open("POST", orig + "/v1/web/not-started"); @@ -133,7 +137,7 @@ export default class API { start(): void { if (!IN_BROWSER) { - console.error(`OpenReplay: you are trying to start Tracker on a node.js environment. If you want to use OpenReplay with SSR, please, use componentDidMount or useEffect API for placing the \`tracker.start()\` line. Check documentation on https://docs.openreplay.com${ DOCS_SETUP }`) + console.error(`OpenReplay: you are trying to start Tracker on a node.js environment. If you want to use OpenReplay with SSR, please, use componentDidMount or useEffect API for placing the \`tracker.start()\` line. Check documentation on ${DOCS_HOST}${DOCS_SETUP}`) return; } if (this.app === null) { diff --git a/tracker/tracker/src/main/modules/console.ts b/tracker/tracker/src/main/modules/console.ts index 251ff8ca1..34be0264a 100644 --- a/tracker/tracker/src/main/modules/console.ts +++ b/tracker/tracker/src/main/modules/console.ts @@ -123,7 +123,7 @@ export default function (app: App, opts: Partial): void { options.consoleMethods.forEach((method) => { if (consoleMethods.indexOf(method) === -1) { - console.error(`Asayer: unsupported console method ${method}`); + console.error(`OpenReplay: unsupported console method "${method}"`); return; } const fn = (console as any)[method]; diff --git a/tracker/tracker/src/main/utils.ts b/tracker/tracker/src/main/utils.ts index 586d02ecb..5a8700f31 100644 --- a/tracker/tracker/src/main/utils.ts +++ b/tracker/tracker/src/main/utils.ts @@ -21,7 +21,8 @@ export const IN_BROWSER = !(typeof window === "undefined"); export const log = console.log export const warn = console.warn -const DOCS_HOST = 'https://docs.openreplay.com'; +export const DOCS_HOST = 'https://docs.openreplay.com'; + const warnedFeatures: { [key: string]: boolean; } = {}; export function deprecationWarn(nameOfFeature: string, useInstead: string, docsPath: string = "/"): void { if (warnedFeatures[ nameOfFeature ]) { @@ -56,3 +57,4 @@ export function hasOpenreplayAttribute(e: Element, name: string): boolean { return false; } + diff --git a/tracker/tracker/src/messages/writer.ts b/tracker/tracker/src/messages/writer.ts index 5ce52d330..6947420bc 100644 --- a/tracker/tracker/src/messages/writer.ts +++ b/tracker/tracker/src/messages/writer.ts @@ -77,6 +77,9 @@ export default class Writer { return this.offset <= this.size; } uint(value: number): boolean { + if (value < 0 || value > Number.MAX_SAFE_INTEGER) { + value = 0 + } while (value >= 0x80) { this.data[this.offset++] = value % 0x100 | 0x80; value = Math.floor(value / 128); diff --git a/tracker/tracker/src/webworker/index.ts b/tracker/tracker/src/webworker/index.ts index 1c6cde40f..d680bfab3 100644 --- a/tracker/tracker/src/webworker/index.ts +++ b/tracker/tracker/src/webworker/index.ts @@ -49,7 +49,7 @@ function sendBatch(batch: Uint8Array):void { if (this.status >= 400) { // TODO: test workflow. After 400+ it calls /start for some reason reset(); sendQueue.length = 0; - if (this.status === 403) { // Unauthorised (Token expired) + if (this.status === 401) { // Unauthorised (Token expired) self.postMessage("restart") return } @@ -74,6 +74,7 @@ function sendBatch(batch: Uint8Array):void { attemptsCount++; setTimeout(() => sendBatch(batch), ATTEMPT_TIMEOUT); } + // TODO: handle offline exception req.send(batch.buffer); } From 6a600eaecb1ac06564d99ae835e1ea0ef9bd2548 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 1 Oct 2021 11:06:10 +0200 Subject: [PATCH 028/218] env(backend): CACHE_ASSETS default true --- backend/Dockerfile | 2 +- scripts/helm/app/http.yaml | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 3e60e0e89..8353b8f63 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -41,7 +41,7 @@ ENV TZ=UTC \ AWS_REGION_WEB=eu-central-1 \ AWS_REGION_IOS=eu-west-1 \ AWS_REGION_ASSETS=eu-central-1 \ - CACHE_ASSETS=false \ + CACHE_ASSETS=true \ ASSETS_SIZE_LIMIT=6291456 \ FS_CLEAN_HRS=72 diff --git a/scripts/helm/app/http.yaml b/scripts/helm/app/http.yaml index 5a9ae09ac..f594df201 100644 --- a/scripts/helm/app/http.yaml +++ b/scripts/helm/app/http.yaml @@ -29,7 +29,6 @@ env: AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" AWS_REGION: us-east-1 POSTGRES_STRING: postgres://postgres:asayerPostgres@postgresql.db.svc.cluster.local:5432 - CACHE_ASSETS: false # REDIS_STRING: redis-master.db.svc.cluster.local:6379 KAFKA_SERVERS: kafka.db.svc.cluster.local:9092 From 577d15dab86ff8fe22a58beec095a460e28a4543 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 1 Oct 2021 11:22:40 +0200 Subject: [PATCH 029/218] dev(backend-http): force component update --- backend/services/http/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 29181718f..dc2eb1720 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -24,7 +24,7 @@ import ( "openreplay/backend/services/http/uaparser" ) - + var rewriter *assets.Rewriter var producer types.Producer var pgconn *cache.PGCache From d6914cc3c8d46309eadafaa090a7931b3e78e9fa Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 1 Oct 2021 19:42:08 +0200 Subject: [PATCH 030/218] feat(tracker-fetch): capture headers --- tracker/tracker-fetch/README.md | 25 +++++++++---- tracker/tracker-fetch/package.json | 2 +- tracker/tracker-fetch/src/index.ts | 55 ++++++++++++++++++++++++----- tracker/tracker-fetch/tsconfig.json | 3 +- 4 files changed, 68 insertions(+), 17 deletions(-) diff --git a/tracker/tracker-fetch/README.md b/tracker/tracker-fetch/README.md index d72201e35..e16980efc 100644 --- a/tracker/tracker-fetch/README.md +++ b/tracker/tracker-fetch/README.md @@ -1,7 +1,7 @@ # OpenReplay Tracker Fetch plugin Tracker plugin to support tracking of the `fetch` requests payload. -Additionally it populates the requests with `sessionID` header for backend logging. +Additionally it populates the requests with `sessionToken` header for backend logging. ## Installation @@ -23,13 +23,24 @@ const tracker = new Tracker({ }); tracker.start(); -export const fetch = tracker.use(trackerFetch({ - sessionTokenHeader: 'X-Session-ID', // optional - failuresOnly: true //optional -})); +export const fetch = tracker.use(trackerFetch({ /* options here*/ })); fetch('https://my.api.io/resource').then(response => response.json()).then(body => console.log(body)); ``` -In case you use OpenReplay integrations (sentry, bugsnag or others), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each fetch request and will contain OpenReplay session identificator value. -Set `failuresOnly` option to `true` if you want to record only requests with the status code >= 400. \ No newline at end of file +Options: +```ts +{ + failuresOnly: boolean, // default false + sessionTokenHeader: string | undefined, // default undefined + ingoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] +} + +``` + +Set `failuresOnly` option to `true` if you want to record only requests with the status code >= 400. + +In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each fetch request and will contain OpenReplay session identificator value. + +You can define list of headers that you don't want to capture with the `ingoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. + diff --git a/tracker/tracker-fetch/package.json b/tracker/tracker-fetch/package.json index 237a6e326..7ad82df8b 100644 --- a/tracker/tracker-fetch/package.json +++ b/tracker/tracker-fetch/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-fetch", "description": "Tracker plugin for fetch requests recording ", - "version": "3.0.0", + "version": "3.4.0", "keywords": [ "fetch", "logging", diff --git a/tracker/tracker-fetch/src/index.ts b/tracker/tracker-fetch/src/index.ts index 3bfa30034..a411d29d6 100644 --- a/tracker/tracker-fetch/src/index.ts +++ b/tracker/tracker-fetch/src/index.ts @@ -2,13 +2,17 @@ import { App, Messages } from '@openreplay/tracker'; export interface Options { sessionTokenHeader?: string; - failuresOnly?: boolean; + replaceDefault: boolean; // overrideDefault ? + failuresOnly: boolean; + ingoreHeaders: Array | boolean; } export default function(opts: Partial = {}) { const options: Options = Object.assign( { + replaceDefault: false, failuresOnly: false, + ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -18,7 +22,12 @@ export default function(opts: Partial = {}) { return window.fetch; } - return async (input: RequestInfo, init: RequestInit = {}) => { + const ihOpt = options.ingoreHeaders + const isHIgnoring = Array.isArray(ihOpt) + ? name => ihOpt.includes(name) + : () => ihOpt + + const fetch = async (input: RequestInfo, init: RequestInit = {}) => { if (typeof input !== 'string') { return window.fetch(input, init); } @@ -44,20 +53,50 @@ export default function(opts: Partial = {}) { return response } const r = response.clone(); - r.text().then(text => + + r.text().then(text => { + const reqHs: Record = {} + const resHs: Record = {} + if (ihOpt !== true) { + function writeReqHeader([n, v]) { + if (!isHIgnoring(n)) { reqHs[n] = v } + } + if (init.headers instanceof Headers) { + init.headers.forEach((v, n) => writeReqHeader([n, v])) + } else if (Array.isArray(init.headers)) { + init.headers.forEach(writeReqHeader); + } else if (typeof init.headers === 'object') { + Object.entries(init.headers).forEach(writeReqHeader) + } + + r.headers.forEach((v, n) => { if (!isHIgnoring(n)) resHs[n] = v }) + } + const req = JSON.stringify({ + headers: reqHs, + body: typeof init.body === 'string' ? init.body : '', + }) + const res = JSON.stringify({ + headers: resHs, + body: text, + }) app.send( Messages.Fetch( - typeof init.method === 'string' ? init.method : 'GET', + typeof init.method === 'string' ? init.method.toUpperCase() : 'GET', input, - typeof init.body === 'string' ? init.body : '', - text, + req, + res, r.status, startTime + performance.timing.navigationStart, duration, ), - ), - ); + ) + }); return response; }; + if (options.replaceDefault) { + window.fetch = fetch + } + return fetch; }; + } diff --git a/tracker/tracker-fetch/tsconfig.json b/tracker/tracker-fetch/tsconfig.json index ce07a685b..258c2f510 100644 --- a/tracker/tracker-fetch/tsconfig.json +++ b/tracker/tracker-fetch/tsconfig.json @@ -7,6 +7,7 @@ "module": "es6", "moduleResolution": "node", "declaration": true, - "outDir": "./lib" + "outDir": "./lib", + "lib": ["es6", "dom", "es2017"] } } From f44f2273a7e3150ecd57bb93700828d41db3d5df Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 1 Oct 2021 19:47:34 +0200 Subject: [PATCH 031/218] feat(tracker-axios): 3.4.0 capture headers --- tracker/tracker-axios/README.md | 9 ++-- tracker/tracker-axios/package.json | 2 +- tracker/tracker-axios/src/index.ts | 74 +++++++++++++++++++++-------- tracker/tracker-axios/tsconfig.json | 3 +- 4 files changed, 62 insertions(+), 26 deletions(-) diff --git a/tracker/tracker-axios/README.md b/tracker/tracker-axios/README.md index a092c4032..3175f3789 100644 --- a/tracker/tracker-axios/README.md +++ b/tracker/tracker-axios/README.md @@ -21,22 +21,25 @@ const tracker = new Tracker({ }); tracker.start(); -tracker.use(trackerAxios()); +tracker.use(trackerAxios({ /* options here*/ })); ``` Options: ```ts { instance: AxiosInstance; // default: axios - failuresOnly: boolean; // default: true + failuresOnly: boolean; // default: false captureWhen: (AxiosRequestConfig) => boolean; // default: () => true sessionTokenHeader: string; // default: undefined + ingoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] } ``` By default plugin connects to the static `axios` instance, but you can specify one with the `instance` option. -Set `failuresOnly` option to `false` if you want to record every single request regardless of the status code. By default only failed requests are captured, when the axios' promise is rejected. You can also [regulate](https://github.com/axios/axios#request-config) this axios behaviour with the `validateStatus` option. +Set `failuresOnly` option to `true` if you want to record only failed requests, when the axios' promise is rejected. You can also [regulate](https://github.com/axios/axios#request-config) axios failing behaviour with the `validateStatus` option. `captureWhen` parameter allows you to set a filter on what should be captured. The function will be called with the axios config object and expected to return `true` or `false`. In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each axios request and will contain OpenReplay session identificator value. + +You can define list of headers that you don't want to capture with the `ingoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. diff --git a/tracker/tracker-axios/package.json b/tracker/tracker-axios/package.json index 9f1bce79b..a10308cf4 100644 --- a/tracker/tracker-axios/package.json +++ b/tracker/tracker-axios/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-axios", "description": "Tracker plugin for axios requests recording", - "version": "3.0.1", + "version": "3.4.0", "keywords": [ "axios", "logging", diff --git a/tracker/tracker-axios/src/index.ts b/tracker/tracker-axios/src/index.ts index 3c28260ca..9a7420a26 100644 --- a/tracker/tracker-axios/src/index.ts +++ b/tracker/tracker-axios/src/index.ts @@ -9,16 +9,16 @@ export interface Options { instance: AxiosInstance; failuresOnly: boolean; captureWhen: (AxiosRequestConfig) => boolean; - //ingoreHeaders: Array | boolean; + ingoreHeaders: Array | boolean; } export default function(opts: Partial = {}) { const options: Options = Object.assign( { instance: axios, - failuresOnly: true, + failuresOnly: false, captureWhen: () => true, - //ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], + ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -27,48 +27,80 @@ export default function(opts: Partial = {}) { return; } - const sendFetchMessage = (response: AxiosResponse) => { + const ihOpt = options.ingoreHeaders + const isHIgnoring = Array.isArray(ihOpt) + ? name => ihOpt.includes(name) + : () => ihOpt + + const sendFetchMessage = (res: AxiosResponse) => { // @ts-ignore - const startTime: number = response.config.__openreplayStartTs; + const startTime: number = res.config.__openreplayStartTs; const duration = performance.now() - startTime; if (typeof startTime !== 'number') { return; } - let requestData: string = ''; - if (typeof response.config.data === 'string') { - requestData = response.config.data; + let reqBody: string = ''; + if (typeof res.config.data === 'string') { + reqBody = res.config.data; } else { try { - requestData = JSON.stringify(response.config.data) || ''; - } catch (e) {} + reqBody = JSON.stringify(res.config.data) || ''; + } catch (e) {} // TODO: app debug } - let responseData: string = ''; - if (typeof response.data === 'string') { - responseData = response.data; + let resBody: string = ''; + if (typeof res.data === 'string') { + resBody = res.data; } else { try { - responseData = JSON.stringify(response.data) || ''; + resBody = JSON.stringify(res.data) || ''; } catch (e) {} } + const reqHs: Record = {} + const resHs: Record = {} + // TODO: type safe axios headers + if (ihOpt !== true) { + function writeReqHeader([n, v]: [string, string]) { + if (!isHIgnoring(n)) { reqHs[n] = v } + } + if (res.config.headers instanceof Headers) { + res.config.headers.forEach((v, n) => writeReqHeader([n, v])) + } else if (Array.isArray(res.config.headers)) { + res.config.headers.forEach(writeReqHeader); + } else if (typeof res.config.headers === 'object') { + Object.entries(res.config.headers as Record).forEach(writeReqHeader) + } + + // TODO: type safe axios headers + if (typeof res.headers === 'object') { + Object.entries(res.headers as Record).forEach(([v, n]) => { if (!isHIgnoring(n)) resHs[n] = v }) + } + } + // Why can't axios propogate the final request URL somewhere? - const fullURL = buildFullPath(response.config.baseURL, options.instance.getUri(response.config)); + const fullURL = buildFullPath(res.config.baseURL, options.instance.getUri(res.config)); app.send( Messages.Fetch( - typeof response.config.method === 'string' ? response.config.method.toUpperCase() : 'GET', + typeof res.config.method === 'string' ? res.config.method.toUpperCase() : 'GET', fullURL, - requestData, - responseData, - response.status, + JSON.stringify({ + headers: reqHs, + body: reqBody, + }), + JSON.stringify({ + headers: resHs, + body: resBody, + }), + res.status, startTime + performance.timing.navigationStart, duration, ), ); } - + // TODO: why app.safe doesn't work here? options.instance.interceptors.request.use(function (config) { if (options.sessionTokenHeader) { const sessionToken = app.getSessionToken(); @@ -80,7 +112,7 @@ export default function(opts: Partial = {}) { config.headers.append(options.sessionTokenHeader, sessionToken); } else if (Array.isArray(config.headers)) { config.headers.push([options.sessionTokenHeader, sessionToken]); - } else { + } else if (typeof config.headers === 'object') { config.headers[options.sessionTokenHeader] = sessionToken; } } diff --git a/tracker/tracker-axios/tsconfig.json b/tracker/tracker-axios/tsconfig.json index ce07a685b..dd1ee258f 100644 --- a/tracker/tracker-axios/tsconfig.json +++ b/tracker/tracker-axios/tsconfig.json @@ -7,6 +7,7 @@ "module": "es6", "moduleResolution": "node", "declaration": true, - "outDir": "./lib" + "outDir": "./lib", + "lib": ["es6", "dom", "es2017"] // is all necessary? } } From e165632f63917909862e5bef1c7e413e91a3ab2e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 4 Oct 2021 15:12:36 +0530 Subject: [PATCH 032/218] fix(tracker) - this._debug err, typo --- tracker/tracker-axios/README.md | 4 ++-- tracker/tracker-axios/src/index.ts | 6 +++--- tracker/tracker-fetch/README.md | 4 ++-- tracker/tracker-fetch/src/index.ts | 6 +++--- tracker/tracker/src/main/app/index.ts | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tracker/tracker-axios/README.md b/tracker/tracker-axios/README.md index 3175f3789..068fe3190 100644 --- a/tracker/tracker-axios/README.md +++ b/tracker/tracker-axios/README.md @@ -31,7 +31,7 @@ Options: failuresOnly: boolean; // default: false captureWhen: (AxiosRequestConfig) => boolean; // default: () => true sessionTokenHeader: string; // default: undefined - ingoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] + ignoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] } ``` By default plugin connects to the static `axios` instance, but you can specify one with the `instance` option. @@ -42,4 +42,4 @@ Set `failuresOnly` option to `true` if you want to record only failed requests, In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each axios request and will contain OpenReplay session identificator value. -You can define list of headers that you don't want to capture with the `ingoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. +You can define list of headers that you don't want to capture with the `ignoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. diff --git a/tracker/tracker-axios/src/index.ts b/tracker/tracker-axios/src/index.ts index 9a7420a26..01eb79dca 100644 --- a/tracker/tracker-axios/src/index.ts +++ b/tracker/tracker-axios/src/index.ts @@ -9,7 +9,7 @@ export interface Options { instance: AxiosInstance; failuresOnly: boolean; captureWhen: (AxiosRequestConfig) => boolean; - ingoreHeaders: Array | boolean; + ignoreHeaders: Array | boolean; } export default function(opts: Partial = {}) { @@ -18,7 +18,7 @@ export default function(opts: Partial = {}) { instance: axios, failuresOnly: false, captureWhen: () => true, - ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], + ignoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -27,7 +27,7 @@ export default function(opts: Partial = {}) { return; } - const ihOpt = options.ingoreHeaders + const ihOpt = options.ignoreHeaders const isHIgnoring = Array.isArray(ihOpt) ? name => ihOpt.includes(name) : () => ihOpt diff --git a/tracker/tracker-fetch/README.md b/tracker/tracker-fetch/README.md index e16980efc..b7fca2e4b 100644 --- a/tracker/tracker-fetch/README.md +++ b/tracker/tracker-fetch/README.md @@ -33,7 +33,7 @@ Options: { failuresOnly: boolean, // default false sessionTokenHeader: string | undefined, // default undefined - ingoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] + ignoreHeaders: Array | boolean, // default [ 'Cookie', 'Set-Cookie', 'Authorization' ] } ``` @@ -42,5 +42,5 @@ Set `failuresOnly` option to `true` if you want to record only requests with the In case you use [OpenReplay integrations (sentry, bugsnag or others)](https://docs.openreplay.com/integrations), you can use `sessionTokenHeader` option to specify the header name. This header will be appended automatically to the each fetch request and will contain OpenReplay session identificator value. -You can define list of headers that you don't want to capture with the `ingoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. +You can define list of headers that you don't want to capture with the `ignoreHeaders` options. Set its value to `false` if you want to catch them all (`true` if opposite). By default plugin ignores the list of headers that might be sensetive such as `[ 'Cookie', 'Set-Cookie', 'Authorization' ]`. diff --git a/tracker/tracker-fetch/src/index.ts b/tracker/tracker-fetch/src/index.ts index a411d29d6..fbce7ac31 100644 --- a/tracker/tracker-fetch/src/index.ts +++ b/tracker/tracker-fetch/src/index.ts @@ -4,7 +4,7 @@ export interface Options { sessionTokenHeader?: string; replaceDefault: boolean; // overrideDefault ? failuresOnly: boolean; - ingoreHeaders: Array | boolean; + ignoreHeaders: Array | boolean; } export default function(opts: Partial = {}) { @@ -12,7 +12,7 @@ export default function(opts: Partial = {}) { { replaceDefault: false, failuresOnly: false, - ingoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], + ignoreHeaders: [ 'Cookie', 'Set-Cookie', 'Authorization' ], }, opts, ); @@ -22,7 +22,7 @@ export default function(opts: Partial = {}) { return window.fetch; } - const ihOpt = options.ingoreHeaders + const ihOpt = options.ignoreHeaders const isHIgnoring = Array.isArray(ihOpt) ? name => ihOpt.includes(name) : () => ihOpt diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index b52359dec..b50bb5731 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -164,7 +164,7 @@ export default class App { try { fn.apply(this, args); } catch (e) { - this._debug("safe_fn_call", e) + app._debug("safe_fn_call", e) // time: timestamp(), // name: e.name, // message: e.message, From bf44bcbef4e90e52b14945fef87031579b369597 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 4 Oct 2021 15:20:53 +0530 Subject: [PATCH 033/218] change(plugins) - version number --- tracker/tracker-axios/package-lock.json | 2 +- tracker/tracker-axios/package.json | 2 +- tracker/tracker-fetch/package-lock.json | 2 +- tracker/tracker-fetch/package.json | 2 +- tracker/tracker/package.json | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tracker/tracker-axios/package-lock.json b/tracker/tracker-axios/package-lock.json index 49bb3588d..244d16daf 100644 --- a/tracker/tracker-axios/package-lock.json +++ b/tracker/tracker-axios/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-axios", - "version": "3.0.1", + "version": "3.4.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/tracker/tracker-axios/package.json b/tracker/tracker-axios/package.json index a10308cf4..0d6de4ca1 100644 --- a/tracker/tracker-axios/package.json +++ b/tracker/tracker-axios/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-axios", "description": "Tracker plugin for axios requests recording", - "version": "3.4.0", + "version": "3.4.1", "keywords": [ "axios", "logging", diff --git a/tracker/tracker-fetch/package-lock.json b/tracker/tracker-fetch/package-lock.json index b8d27b87e..16afd2f75 100644 --- a/tracker/tracker-fetch/package-lock.json +++ b/tracker/tracker-fetch/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-fetch", - "version": "3.0.0", + "version": "3.4.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/tracker/tracker-fetch/package.json b/tracker/tracker-fetch/package.json index 7ad82df8b..1d650bf6e 100644 --- a/tracker/tracker-fetch/package.json +++ b/tracker/tracker-fetch/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-fetch", "description": "Tracker plugin for fetch requests recording ", - "version": "3.4.0", + "version": "3.4.1", "keywords": [ "fetch", "logging", diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index cb70c0082..a72574f17 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.1", + "version": "3.4.3", "keywords": [ "logging", "replay" From 0a0be4c43d9b785968b2438d39ac908b6e9b0ac7 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 4 Oct 2021 18:07:16 +0200 Subject: [PATCH 034/218] feat(tracker): 3.4.4 heatmaps default false --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/modules/mouse.ts | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index a72574f17..41934717c 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.3", + "version": "3.4.4", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index a0a526b93..40bcbeb61 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -83,12 +83,12 @@ export interface Options { export default function (app: App, opts: Partial): void { const options: Options = Object.assign( { - heatmaps: { - finder: { - threshold: 5, - maxNumberOfTries: 600, - }, - }, + heatmaps: false // { + // finder: { + // threshold: 5, + // maxNumberOfTries: 600, + // }, + // }, }, opts, ); From 00ae88b60fda088b2149d0106db53701d4029caf Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 4 Oct 2021 22:32:28 +0200 Subject: [PATCH 035/218] feat(tracker-assist): 3.4.3 data connetions map; buffering code split; optional logs --- tracker/tracker-assist/package-lock.json | 8 +- tracker/tracker-assist/package.json | 6 +- .../tracker-assist/src/BufferingConnection.ts | 36 ++++++++ tracker/tracker-assist/src/index.ts | 84 +++++++++---------- 4 files changed, 81 insertions(+), 53 deletions(-) create mode 100644 tracker/tracker-assist/src/BufferingConnection.ts diff --git a/tracker/tracker-assist/package-lock.json b/tracker/tracker-assist/package-lock.json index 9c778051a..8781de1bf 100644 --- a/tracker/tracker-assist/package-lock.json +++ b/tracker/tracker-assist/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-assist", - "version": "3.1.1", + "version": "3.4.3", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -63,9 +63,9 @@ } }, "@openreplay/tracker": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.3.0.tgz", - "integrity": "sha512-g9sOG01VaiRLw4TcUbux8j3moa7gsGs8rjZPEVJ5SJqxjje9R7tpUD5UId9ne7QdHSoiHfrWFk3TNRLpXyvImg==", + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.4.tgz", + "integrity": "sha512-IcuxwwTt1RtLZw9QlQVAVNqoybv0ZkD2ZDk2FeHEQ/+BItsMhG61/4/lB2yXKLTLr6ydeKTzwYvxfr1vwxn2dw==", "dev": true, "requires": { "@medv/finder": "^2.0.0", diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index fec7cf002..d871a51b4 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.2.0", + "version": "3.4.3", "keywords": [ "WebRTC", "assistance", @@ -24,10 +24,10 @@ "peerjs": "^1.3.2" }, "peerDependencies": { - "@openreplay/tracker": "^3.4.0" + "@openreplay/tracker": "^3.4.3" }, "devDependencies": { - "@openreplay/tracker": "^3.4.0", + "@openreplay/tracker": "^3.4.3", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", "typescript": "^3.6.4" diff --git a/tracker/tracker-assist/src/BufferingConnection.ts b/tracker/tracker-assist/src/BufferingConnection.ts new file mode 100644 index 000000000..766bd7892 --- /dev/null +++ b/tracker/tracker-assist/src/BufferingConnection.ts @@ -0,0 +1,36 @@ +import type { DataConnection } from 'peerjs'; + +// TODO: proper Message type export from tracker in 3.5.0 +interface Message { + encode(w: any): boolean; +} + +// Bffering required in case of webRTC +export default class BufferingConnection { + private readonly buffer: Message[][] = [] + private buffering: boolean = false + + constructor(readonly conn: DataConnection){} + private sendNext() { + if (this.buffer.length) { + setTimeout(() => { + this.conn.send(this.buffer.shift()) + this.sendNext() + }, 50) + } else { + this.buffering = false + } + } + + send(messages: Message[]) { + if (!this.conn.open) { return; } + let i = 0; + while (i < messages.length) { + this.buffer.push(messages.slice(i, i+=1000)) + } + if (!this.buffering) { + this.buffering = true + this.sendNext(); + } + } +} \ No newline at end of file diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index a5c6510fa..c782ed55e 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -4,6 +4,7 @@ import type { DataConnection } from 'peerjs'; import { App, Messages } from '@openreplay/tracker'; import type Message from '@openreplay/tracker'; +import BufferingConnection from './BufferingConnection'; import Mouse from './Mouse'; import CallWindow from './CallWindow'; import ConfirmWindow from './ConfirmWindow'; @@ -31,14 +32,27 @@ export default function(opts: Partial = {}) { }, opts, ); - return function(app: App | null, appOptions: { __DISABLE_SECURE_MODE?: boolean } = {}) { + return function(app: App | null, appOptions: { __debug_log?: boolean, __DISABLE_SECURE_MODE?: boolean } = {}) { // @ts-ignore if (app === null || !navigator?.mediaDevices?.getUserMedia) { // 93.04% browsers return; } - let assistDemandedRestart = false; - let peer : Peer | null = null; + function log(...args) { + // TODO: use warn/log from assist + appOptions.__debug_log && console.log("OpenReplay Assist. ", ...args) + } + function warn(...args) { + appOptions.__debug_log && console.warn("OpenReplay Assist. ", ...args) + } + + let assistDemandedRestart = false + let peer : Peer | null = null + const openDataConnections: Record = {} + + app.addCommitCallback(function(messages) { + Object.values(openDataConnections).forEach(buffConn => buffConn.send(messages)) + }) app.attachStopCallback(function() { if (assistDemandedRestart) { return; } @@ -47,62 +61,40 @@ export default function(opts: Partial = {}) { app.attachStartCallback(function() { if (assistDemandedRestart) { return; } - const peerID = `${app.projectKey}-${app.getSessionID()}` + const peerID = `${app.getProjectKey()}-${app.getSessionID()}` peer = new Peer(peerID, { // @ts-ignore host: app.getHost(), path: '/assist', port: location.protocol === 'http:' && appOptions.__DISABLE_SECURE_MODE ? 80 : 443, }); - console.log('OpenReplay tracker-assist peerID:', peerID) - peer.on('error', e => console.log("OpenReplay tracker-assist peer error: ", e.type, e)) - peer.on('connection', function(conn) { + log('Peer created: ', peer) + peer.on('error', e => warn("Peer error: ", e.type, e)) + peer.on('connection', function(conn) { window.addEventListener("beforeunload", () => conn.open && conn.send("unload")); + log('Connecting...') - console.log('OpenReplay tracker-assist: Connecting...') conn.on('open', function() { - - console.log('OpenReplay tracker-assist: connection opened.') - - // TODO: onClose - const buffer: Message[][] = []; - let buffering = false; - function sendNext() { - if (buffer.length) { - setTimeout(() => { - conn.send(buffer.shift()); - sendNext(); - }, 50); - } else { - buffering = false; - } - } - + log('Connection opened.') assistDemandedRestart = true; app.stop(); - //@ts-ignore (should update tracker dependency) - app.addCommitCallback((messages: Array): void => { - if (!conn.open) { return; } // TODO: clear commit callbacks on connection close - let i = 0; - while (i < messages.length) { - buffer.push(messages.slice(i, i+=1000)); - } - if (!buffering) { - buffering = true; - sendNext(); - } - }); - app.start().then(() => { assistDemandedRestart = false; }); + openDataConnections[conn.peer] = new BufferingConnection(conn) + conn.on('close', () => { + log("Connection close: ", conn.peer) + delete openDataConnections[conn.peer] // TODO: check if works properly + }) + app.start().then(() => { assistDemandedRestart = false }) }); }); let callingState: CallingState = CallingState.False; peer.on('call', function(call) { + log("Call: ", call) if (!peer) { return; } - const dataConn: DataConnection | undefined = peer - .connections[call.peer].find(c => c.type === 'data'); - if (callingState !== CallingState.False || !dataConn) { + const dataConn: DataConnection | undefined = + openDataConnections[call.peer]?.conn; + if (callingState !== CallingState.False || !dataConn || !dataConn.open) { call.close(); return; } @@ -131,7 +123,7 @@ export default function(opts: Partial = {}) { confirmAnswer = confirm.mount(); dataConn.on('data', (data) => { // if call closed by a caller before confirm if (data === "call_end") { - //console.log('OpenReplay tracker-assist: receiving callend onconfirm') + log("Recieved call_end during confirm opened") setCallingState(CallingState.False); confirm.remove(); } @@ -157,7 +149,7 @@ export default function(opts: Partial = {}) { setCallingState(CallingState.False); } const initiateCallEnd = () => { - //console.log("callend initiated") + log("initiateCallEnd") call.close() notifyCallEnd(); onCallEnd(); @@ -204,12 +196,12 @@ export default function(opts: Partial = {}) { }); dataConn.on('data', (data: any) => { if (data === "call_end") { - //console.log('receiving callend on call') + log('Recieved call_end during call') onCallEnd(); return; } if (data && typeof data.name === 'string') { - //console.log("name",data) + log('Recieved name: ', data.name) callUI.setAssistentName(data.name); } if (data && typeof data.x === 'number' && typeof data.y === 'number') { @@ -223,7 +215,7 @@ export default function(opts: Partial = {}) { .catch(_ => { // TODO retry only if specific error navigator.mediaDevices.getUserMedia({audio:true}) // in case there is no camera on device .then(onCallConnect) - .catch(e => console.log("OpenReplay tracker-assist: cant reach media devices. ", e)); + .catch(e => warn("Can't reach media devices. ", e)); }); }).catch(); // in case of Confirm.remove() without any confirmation }); From b4fdea6ddd7d2141f404ae8d66f2d609a5e85f4b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 5 Oct 2021 20:12:16 +0200 Subject: [PATCH 036/218] feat(api): insights journey --- .../chalicelib/blueprints/subs/bp_insights.py | 163 +++ ee/api/chalicelib/core/insights.py | 943 ++++++++++++++++++ ee/api/chalicelib/utils/SAML2_helper.py | 2 +- 3 files changed, 1107 insertions(+), 1 deletion(-) create mode 100644 ee/api/chalicelib/blueprints/subs/bp_insights.py create mode 100644 ee/api/chalicelib/core/insights.py diff --git a/ee/api/chalicelib/blueprints/subs/bp_insights.py b/ee/api/chalicelib/blueprints/subs/bp_insights.py new file mode 100644 index 000000000..6546bfd12 --- /dev/null +++ b/ee/api/chalicelib/blueprints/subs/bp_insights.py @@ -0,0 +1,163 @@ +from chalice import Blueprint +from chalicelib.utils import helper +from chalicelib import _overrides + +from chalicelib.core import dashboard, insights +from chalicelib.core import metadata + +app = Blueprint(__name__) +_overrides.chalice_app(app) + + +@app.route('/{projectId}/insights/journey', methods=['GET', 'POST']) +def get_insights_journey(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.journey(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/users_acquisition', methods=['GET', 'POST']) +def get_users_acquisition(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_acquisition(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) +def get_users_retention(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_retention(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) +def get_feature_rentention(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_retention(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) +def get_feature_acquisition(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_acquisition(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) +def get_feature_popularity_frequency(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_intensity', methods=['GET', 'POST']) +def get_feature_intensity(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_intensity(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_adoption', methods=['GET', 'POST']) +def get_feature_adoption(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_adoption_top_users', methods=['GET', 'POST']) +def get_feature_adoption(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption_top_users(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/feature_adoption_daily_usage', methods=['GET', 'POST']) +def get_feature_adoption_daily_usage(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.feature_adoption_daily_usage(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) +def get_users_active(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_active(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/users_power', methods=['GET', 'POST']) +def get_users_power(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_power(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/users_slipping', methods=['GET', 'POST']) +def get_users_slipping(projectId, context): + data = app.current_request.json_body + if data is None: + data = {} + params = app.current_request.query_params + args = dashboard.dashboard_args(params) + + return {"data": insights.users_slipping(project_id=projectId, **{**data, **args})} + + +@app.route('/{projectId}/insights/search', methods=['GET']) +def get_insights_autocomplete(projectId, context): + params = app.current_request.query_params + if params is None or params.get('q') is None or len(params.get('q')) == 0: + return {"data": []} + # params['q'] = '^' + params['q'] + + return {'data': insights.search(params.get('q', ''), project_id=projectId, + platform=params.get('platform', None), feature_type=params.get("key"))} diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py new file mode 100644 index 000000000..f8277b84c --- /dev/null +++ b/ee/api/chalicelib/core/insights.py @@ -0,0 +1,943 @@ +from chalicelib.core import sessions_metas +from chalicelib.utils import helper, dev +from chalicelib.utils import pg_client +from chalicelib.utils import ch_client +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.metrics_helper import __get_step_size +import math +from chalicelib.core.dashboard import __get_constraint_values +from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint + + +def __transform_journey(rows): + nodes = [] + links = [] + for r in rows: + source = r["source_event"][r["source_event"].index("_") + 1:] + target = r["target_event"][r["target_event"].index("_") + 1:] + if source not in nodes: + nodes.append(source) + if target not in nodes: + nodes.append(target) + links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) + return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} + + +JOURNEY_DEPTH = 5 +JOURNEY_TYPES = { + "PAGES": {"table": "pages", "column": "url_path"}, + "CLICK": {"table": "clicks", "column": "label"}, + # "VIEW": {"table": "events_ios.views", "column": "name"}, TODO: enable this for SAAS only + # "EVENT": {"table": "events_common.customs", "column": "name"} +} + + +@dev.timed +def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): + event_start = None + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + meta_condition = [] + for f in filters: + if f["type"] == "START_POINT": + event_start = f["value"] + elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + ch_sub_query = __get_basic_constraints(table_name=event_table, data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT source_event, + target_event, + MAX(target_id) max_target_id, + MAX(source_id) max_source_id, + count(*) AS value + FROM (SELECT toString(event_number) || '_' || value as target_event, + toString(session_rank) || '_' || toString(event_number) AS target_id, + lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event, lagInFrame(toString(session_rank) || '_' || toString(event_number)) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_id + FROM (SELECT session_rank, + datetime, + value, + row_number AS event_number + FROM (SELECT session_rank, + groupArray(datetime) AS arr_datetime, + groupArray(value) AS arr_value, + arrayEnumerate(arr_datetime) AS row_number + {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY datetime ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN datetime ELSE NULL END as mark" if event_start else ""} + FROM (SELECT session_id, + datetime, + value, + SUM(new_session) OVER (ORDER BY session_id, datetime) AS session_rank + FROM (SELECT *, + if(equals(source_timestamp, '1970-01-01'), 1, 0) AS new_session + FROM (SELECT session_id, + datetime, + {event_column} AS value, + lagInFrame(datetime) OVER (PARTITION BY session_id ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_timestamp + FROM {event_table} {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + ORDER BY session_id, datetime) AS related_events) AS ranked_events + ORDER BY session_rank, datetime + ) AS processed + {") AS marked) AS maxed WHERE datetime >= max) AS filtered" if event_start else ""} + GROUP BY session_rank + ORDER BY session_rank) + ARRAY JOIN + arr_datetime AS datetime, + arr_value AS value, + row_number + ORDER BY session_rank ASC, + row_number ASC) AS sorted_events + WHERE event_number <= %(JOURNEY_DEPTH)s) AS final + WHERE not empty(source_event) + AND not empty(target_event) + GROUP BY source_event, target_event + ORDER BY value DESC + LIMIT 20;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, + **__get_constraint_values(args), **extra_values} + + rows = ch.execute(query=ch_query, params=params) + # print(ch_query % params) + return __transform_journey(rows) + + +def __compute_weekly_percentage(rows): + if rows is None or len(rows) == 0: + return rows + t = -1 + for r in rows: + if r["week"] == 0: + t = r["usersCount"] + r["percentage"] = r["usersCount"] / t + return rows + + +def __complete_retention(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if i < len(rows) \ + and i != rows[i]["week"]: + rows.insert(i, neutral) + elif i >= len(rows): + rows.append(neutral) + return rows + + +def __complete_acquisition(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + week = 0 + delta_date = 0 + while max_week > 0: + start_date += TimeUTC.MS_WEEK + if end_date is not None and start_date >= end_date: + break + delta = 0 + if delta_date + week >= len(rows) \ + or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + rows.insert(delta_date + week + i, neutral) + delta = i + else: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if delta_date + week + i < len(rows) \ + and i != rows[delta_date + week + i]["week"]: + rows.insert(delta_date + week + i, neutral) + elif delta_date + week + i >= len(rows): + rows.append(neutral) + delta = i + week += delta + max_week -= 1 + delta_date += 1 + return rows + + +@dev.timed +def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts < %(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <=sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id + ) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = 1 + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <= sessions.start_ts + AND sessions.project_id = 1 + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week + FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS raw_users_list) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING(session_id) + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return [] + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL + GROUP BY value + ORDER BY count DESC + LIMIT 7;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + popularity = cur.fetchall() + pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + frequencies = cur.fetchall() + total_usage = sum([f["count"] for f in frequencies]) + frequencies = {f["value"]: f["count"] for f in frequencies} + for p in popularity: + p["popularity"] = p.pop("count") / all_user_count + p["frequency"] = frequencies[p["value"]] / total_usage + + return popularity + + +@dev.timed +def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + adoption = cur.fetchone()["count"] / all_user_count + return {"target": all_user_count, "adoption": adoption, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1 + ORDER BY 2 DESC + LIMIT 10;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND length({event_column})>2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(session_id), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT session_id + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + pg_sub_query.append(f"length({event_column})>2") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value + ORDER BY avg DESC + LIMIT 7;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # TODO: solve full scan issue + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return rows + + +@dev.timed +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + pg_sub_query_chart.append("user_id IS NOT NULL") + period = "DAY" + extra_values = {} + for f in filters: + if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: + period = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(users), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp) AS chart;""" + params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, + "project_id": project_id, + "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( + startTimestamp), + "endTimestamp": endTimestamp, **__get_constraint_values(args), + **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return row_users + + +@dev.timed +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) + pg_sub_query.append("user_id IS NOT NULL") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition + FROM (SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS day_users_partition;""" + params = {"project_id": project_id, + "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return helper.dict_to_camel_case(row_users) + + +@dev.timed +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen + FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY user_id) AS user_last_usage + INNER JOIN sessions USING (user_id) + WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 + GROUP BY user_id, last_time,interactions_count;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "list": helper.list_to_camel_case(rows) + } + + +@dev.timed +def search(text, feature_type, project_id, platform=None): + if not feature_type: + resource_type = "ALL" + data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) + return data + + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True, + data={} if platform is None else {"platform": platform}) + + params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, + "endTimestamp": TimeUTC.now(), + "project_id": project_id, + "value": helper.string_to_sql_like(text.lower()), + "platform_0": platform} + if feature_type == "ALL": + with pg_client.PostgresClient() as cur: + sub_queries = [] + for e in JOURNEY_TYPES: + sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" + FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s + LIMIT 10)""") + pg_query = "UNION ALL".join(sub_queries) + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + elif JOURNEY_TYPES.get(feature_type) is not None: + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s + LIMIT 10;""" + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + else: + return [] + return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index af4612005..e3fc31f02 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -26,7 +26,7 @@ SAML2 = { } idp = None # SAML2 config handler -if len(environ.get("SAML2_MD_URL")) > 0: +if environ.get("SAML2_MD_URL") is not None and len(environ["SAML2_MD_URL"]) > 0: print("SAML2_MD_URL provided, getting IdP metadata config") from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser From 727336f7c589b55165bd37eda0f5c21509ac1b67 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 6 Oct 2021 13:28:38 +0200 Subject: [PATCH 037/218] feat(api): insights journey remove source_id&target_id --- api/chalicelib/core/insights.py | 6 +----- ee/api/chalicelib/core/insights.py | 10 +++------- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index df9b5c5b7..13faf2111 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -52,14 +52,10 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= with pg_client.PostgresClient() as cur: pg_query = f"""SELECT source_event, target_event, - MAX(target_id) max_target_id, - MAX(source_id) max_source_id, count(*) AS value FROM (SELECT event_number || '_' || value as target_event, - message_id AS target_id, - LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event, - LAG(message_id, 1) OVER ( PARTITION BY session_rank ) AS source_id + LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event FROM (SELECT value, session_rank, message_id, diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index f8277b84c..8a16aa98a 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -54,14 +54,10 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT source_event, target_event, - MAX(target_id) max_target_id, - MAX(source_id) max_source_id, count(*) AS value - FROM (SELECT toString(event_number) || '_' || value as target_event, - toString(session_rank) || '_' || toString(event_number) AS target_id, - lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event, lagInFrame(toString(session_rank) || '_' || toString(event_number)) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_id + FROM (SELECT toString(event_number) || '_' || value AS target_event, + lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event FROM (SELECT session_rank, datetime, value, From a9aca8533d68bd38b40b9d996dc0593dde3088da Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 6 Oct 2021 19:20:35 +0200 Subject: [PATCH 038/218] feat(api): insights users retention graph --- ee/api/chalicelib/core/insights.py | 67 +++++++++++++++--------------- 1 file changed, 34 insertions(+), 33 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 8a16aa98a..bc23d7190 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -195,42 +195,43 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + ch_sub_query = __get_basic_constraints(table_name='sessions', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions.duration>0") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT toInt8((start_of_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users FROM (SELECT DISTINCT user_id - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts < %(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <=sessions.start_ts - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id - ) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" - + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + ) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS start_of_week + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + ORDER BY start_of_week, user_id + ) AS all_connexions USING (user_id) + GROUP BY start_of_week + ORDER BY start_of_week;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(ch_query % params) + rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { "startTimestamp": startTimestamp, From 4a2e7f2b10ed4fc3692420766a868d7b51348b76 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 6 Oct 2021 19:57:26 +0200 Subject: [PATCH 039/218] feat(api): FOS insights changed queries --- api/chalicelib/core/insights.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 13faf2111..6b8b7b72f 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -206,7 +206,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT AND project_id = %(project_id)s AND bsess.user_id = sessions.user_id LIMIT 1)) - GROUP BY user_id) AS users_list + ) AS users_list LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, user_id FROM sessions @@ -245,7 +245,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, COUNT(DISTINCT connexions_list.user_id) AS users_count, ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week FROM sessions WHERE {" AND ".join(pg_sub_query)} AND NOT EXISTS((SELECT 1 @@ -261,7 +261,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en WHERE users_list.user_id = sessions.user_id AND first_connexion_week <= DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = 1 + AND sessions.project_id = %(project_id)s AND sessions.start_ts < (%(endTimestamp)s - 1) GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) GROUP BY first_connexion_week, week @@ -347,7 +347,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en FROM sessions INNER JOIN {event_table} AS feature USING (session_id) WHERE users_list.user_id = sessions.user_id AND %(startTimestamp)s <= sessions.start_ts - AND sessions.project_id = 1 + AND sessions.project_id = %(project_id)s AND sessions.start_ts < (%(endTimestamp)s - 1) AND feature.timestamp >= %(startTimestamp)s AND feature.timestamp < %(endTimestamp)s From ef1db21c13462807fb97a8d49f3f45b8c5b846d2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 6 Oct 2021 20:37:34 +0200 Subject: [PATCH 040/218] feat(api): insights users acquisition cohort --- ee/api/chalicelib/core/insights.py | 82 +++++++++++++++++------------- 1 file changed, 46 insertions(+), 36 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index bc23d7190..15f56a6fc 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -203,7 +203,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT ch_sub_query.append("sessions.duration>0") ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toInt8((start_of_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, COUNT(DISTINCT all_connexions.user_id) AS users_count, groupArray(100)(all_connexions.user_id) AS connected_users FROM (SELECT DISTINCT user_id @@ -220,14 +220,14 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT AND bmsess.user_id = sessions_metadata.user_id LIMIT 1)) ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS start_of_week + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week FROM sessions_metadata INNER JOIN sessions USING (session_id) WHERE {" AND ".join(ch_sub_query)} AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) - ORDER BY start_of_week, user_id + ORDER BY connexion_week, user_id ) AS all_connexions USING (user_id) - GROUP BY start_of_week - ORDER BY start_of_week;""" + GROUP BY connexion_week + ORDER BY connexion_week;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} # print(ch_query % params) @@ -245,41 +245,51 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id + ch_sub_query = __get_basic_constraints(table_name='sessions', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions.duration>0") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, + week, + users_count, + connected_users + FROM ( + SELECT first_connexion_week, + toInt8((connexion_week - first_connexion_week) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT user_id, MIN(toStartOfWeek(sessions.datetime, 1)) AS first_connexion_week + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = 1 - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" + GROUP BY user_id) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + WHERE first_connexion_week <= connexion_week + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week + ) AS full_data;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(ch_query%params) + rows =ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { "startTimestamp": startTimestamp, From e7a942ee64988120da8015dd187bcd6edd0ffbdb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 7 Oct 2021 16:51:49 +0200 Subject: [PATCH 041/218] feat(api): insights feature retention graph --- ee/api/chalicelib/core/insights.py | 99 ++++++++++++++---------------- 1 file changed, 47 insertions(+), 52 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 15f56a6fc..00c8e71fb 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -289,7 +289,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} # print(ch_query%params) - rows =ch.execute(ch_query, params) + rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { "startTimestamp": startTimestamp, @@ -303,11 +303,12 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") event_type = "PAGES" event_value = "/" extra_values = {} @@ -319,66 +320,60 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - with pg_client.PostgresClient() as cur: + with ch_client.ClickHouseClient() as ch: if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + # print(ch_query% params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users FROM (SELECT DISTINCT user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <= sessions.start_ts - AND sessions.project_id = 1 - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess INNER JOIN {event_table} AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + AND bsess.{event_column}=%(value)s + LIMIT 1)) + ) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + GROUP BY connexion_week + ORDER BY connexion_week;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + print(ch_query % params) + rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { "startTimestamp": startTimestamp, From 9bc6251db22c2cb81a0228ff31efa8d4080a9989 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 7 Oct 2021 17:08:24 +0200 Subject: [PATCH 042/218] feat(api): insights feature acquisition cohort --- ee/api/chalicelib/core/insights.py | 138 +++++++++++++---------------- 1 file changed, 64 insertions(+), 74 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 00c8e71fb..6ff8b9481 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -241,8 +241,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT @dev.timed def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK ch_sub_query = __get_basic_constraints(table_name='sessions', data=args) @@ -299,8 +298,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en @dev.timed def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK ch_sub_query = __get_basic_constraints(table_name='feature', data=args) @@ -384,15 +382,16 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en @dev.timed def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + event_type = "PAGES" event_value = "/" extra_values = {} @@ -404,69 +403,65 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: + with ch_client.ClickHouseClient() as ch: if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + # print(ch_query% params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week - FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS raw_users_list) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING(session_id) - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, + week, + users_count, + connected_users + FROM ( + SELECT first_connexion_week, + toInt8((connexion_week - first_connexion_week) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + WHERE first_connexion_week <= connexion_week + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week + ) AS full_data;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + print(ch_query % params) + rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { "startTimestamp": startTimestamp, @@ -477,8 +472,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), @dev.timed def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -542,8 +536,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da @dev.timed def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): + filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) event_type = "CLICK" @@ -736,8 +729,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da @dev.timed -def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) @@ -773,8 +765,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en @dev.timed -def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args) @@ -815,8 +806,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime @dev.timed -def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) pg_sub_query.append("user_id IS NOT NULL") @@ -840,8 +830,8 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes @dev.timed -def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) pg_sub_query.append("user_id IS NOT NULL") From fbe359a4bf102dd8bdfb5bff84d78b3e9f122786 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 7 Oct 2021 17:30:33 +0200 Subject: [PATCH 043/218] feat(api): insights feature popularity and frequency --- ee/api/chalicelib/core/insights.py | 65 +++++++++++++++--------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 6ff8b9481..a96ccd70d 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -3,8 +3,6 @@ from chalicelib.utils import helper, dev from chalicelib.utils import pg_client from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.metrics_helper import __get_step_size -import math from chalicelib.core.dashboard import __get_constraint_values from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint @@ -475,8 +473,12 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + event_table = JOURNEY_TYPES["CLICK"]["table"] event_column = JOURNEY_TYPES["CLICK"]["column"] extra_values = {} @@ -485,46 +487,45 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND user_id IS NOT NULL + AND not empty(user_id);""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: + print(ch_query % params) + print("---------------------") + all_user_count = ch.execute(ch_query, params) + if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: return [] - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} + all_user_count = all_user_count[0]["count"] + ch_sub_query.append(f"length({event_column})>2") + ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} AND user_id IS NOT NULL + AND not empty(user_id) GROUP BY value ORDER BY count DESC LIMIT 7;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) + + print(ch_query % params) print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - popularity = cur.fetchall() - pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} + popularity = ch.execute(ch_query, params) + + ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} GROUP BY value;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) + + print(ch_query % params) print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - frequencies = cur.fetchall() + frequencies = ch.execute(ch_query, params) total_usage = sum([f["count"] for f in frequencies]) frequencies = {f["value"]: f["count"] for f in frequencies} for p in popularity: From d6a18940c939ac2b6607a28719d4e6bd0e270c90 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 7 Oct 2021 18:41:48 +0200 Subject: [PATCH 044/218] feat(api): insights feature adoption daily usage chart --- api/chalicelib/core/insights.py | 2 +- ee/api/chalicelib/core/insights.py | 72 ++++++++++++++---------------- 2 files changed, 34 insertions(+), 40 deletions(-) diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 6b8b7b72f..af4a48c09 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -718,7 +718,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da print("---------------------") cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() - return {"users": helper.list_to_camel_case(rows), + return {"chart": helper.list_to_camel_case(rows), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index a96ccd70d..7f22e24fd 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -3,7 +3,7 @@ from chalicelib.utils import helper, dev from chalicelib.utils import pg_client from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.core.dashboard import __get_constraint_values +from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint @@ -667,14 +667,11 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days @dev.timed def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) event_type = "CLICK" event_value = '/' extra_values = {} default = True + meta_condition = [] for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] @@ -682,50 +679,47 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.datetime >= %(startTimestamp)s") + meta_condition.append("sessions_metadata.datetime < %(endTimestamp)s") + extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND length({event_column})>2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + # print(ch_query% params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(session_id), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT session_id - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""" + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count + FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY day + ORDER BY day) AS raw_results;""" params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return {"users": helper.list_to_camel_case(rows), + # print(ch_query % params) + rows = ch.execute(ch_query, params) + return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, + density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, + neutral={"count": 0}), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} From 567ee2323337a132fda6d36902d4165b75f17c6c Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Fri, 8 Oct 2021 00:40:02 +0530 Subject: [PATCH 045/218] chore(variable): define override variables for s3,pg,kafka etc --- .../roles/openreplay/templates/alerts.yaml | 1 + .../roles/openreplay/templates/assets.yaml | 5 +++++ .../roles/openreplay/templates/chalice.yaml | 16 ++++++++++++++ .../helm/roles/openreplay/templates/db.yaml | 4 ++++ .../roles/openreplay/templates/ender.yaml | 3 +++ .../helm/roles/openreplay/templates/http.yaml | 5 +++++ .../openreplay/templates/integrations.yaml | 5 +++++ .../helm/roles/openreplay/templates/sink.yaml | 3 +++ .../roles/openreplay/templates/storage.yaml | 6 ++++++ .../roles/openreplay/templates/utilities.yaml | 1 + scripts/helm/vars.yaml | 21 +++++++++++++++++++ scripts/helm/vars_template.yaml | 21 +++++++++++++++++++ 12 files changed, 91 insertions(+) diff --git a/scripts/helm/roles/openreplay/templates/alerts.yaml b/scripts/helm/roles/openreplay/templates/alerts.yaml index 09f33c8bd..b28a73a53 100644 --- a/scripts/helm/roles/openreplay/templates/alerts.yaml +++ b/scripts/helm/roles/openreplay/templates/alerts.yaml @@ -5,6 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/assets.yaml b/scripts/helm/roles/openreplay/templates/assets.yaml index 6383f4f2c..41f898260 100644 --- a/scripts/helm/roles/openreplay/templates/assets.yaml +++ b/scripts/helm/roles/openreplay/templates/assets.yaml @@ -7,6 +7,11 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_ENDPOINT: "{{ s3_endpoint }}" + AWS_REGION: "{{ aws_region }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/chalice.yaml b/scripts/helm/roles/openreplay/templates/chalice.yaml index 3ab238e72..46de7488b 100644 --- a/scripts/helm/roles/openreplay/templates/chalice.yaml +++ b/scripts/helm/roles/openreplay/templates/chalice.yaml @@ -15,6 +15,22 @@ env: S3_HOST: "https://{{ domain_name }}" SITE_URL: "https://{{ domain_name }}" jwt_secret: "{{ jwt_secret_key }}" + pg_host: "{{ postgres_endpoint }}" + pg_port: "{{ postgres_port }}" + pg_dbname: "{{ postgres_port }}" + pg_user: "{{ postgres_db_user }}" + pg_password: "{{ postgres_db_password }}" + EMAIL_HOST: "{{ email_host }}" + EMAIL_PORT: "{{ email_port }}" + EMAIL_USER: "{{ email_user }}" + EMAIL_PASSWORD: "{{ email_password }}" + EMAIL_USE_TLS: "{{ email_use_tls }}" + EMAIL_USE_SSL: "{{ email_use_ssl }}" + EMAIL_SSL_KEY: "{{ email_ssl_key }}" + EMAIL_SSL_CERT: "{{ email_ssl_cert }}" + EMAIL_FROM: "{{ email_from }}" + AWS_DEFAULT_REGION: "{{ aws_default_region }}" + sessions_region: "{{ aws_default_region }}" {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/db.yaml b/scripts/helm/roles/openreplay/templates/db.yaml index ab8609111..7456794c8 100644 --- a/scripts/helm/roles/openreplay/templates/db.yaml +++ b/scripts/helm/roles/openreplay/templates/db.yaml @@ -5,6 +5,10 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/ender.yaml b/scripts/helm/roles/openreplay/templates/ender.yaml index 5749e4a52..b5d256b2d 100644 --- a/scripts/helm/roles/openreplay/templates/ender.yaml +++ b/scripts/helm/roles/openreplay/templates/ender.yaml @@ -5,6 +5,9 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/http.yaml b/scripts/helm/roles/openreplay/templates/http.yaml index 6383f4f2c..da7b0979f 100644 --- a/scripts/helm/roles/openreplay/templates/http.yaml +++ b/scripts/helm/roles/openreplay/templates/http.yaml @@ -7,6 +7,11 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_REGION: "{{ aws_region }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/integrations.yaml b/scripts/helm/roles/openreplay/templates/integrations.yaml index 5749e4a52..9cc8f8b76 100644 --- a/scripts/helm/roles/openreplay/templates/integrations.yaml +++ b/scripts/helm/roles/openreplay/templates/integrations.yaml @@ -5,6 +5,11 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + # + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/sink.yaml b/scripts/helm/roles/openreplay/templates/sink.yaml index 5749e4a52..b5d256b2d 100644 --- a/scripts/helm/roles/openreplay/templates/sink.yaml +++ b/scripts/helm/roles/openreplay/templates/sink.yaml @@ -5,6 +5,9 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/storage.yaml b/scripts/helm/roles/openreplay/templates/storage.yaml index 6383f4f2c..6a70f3a4c 100644 --- a/scripts/helm/roles/openreplay/templates/storage.yaml +++ b/scripts/helm/roles/openreplay/templates/storage.yaml @@ -7,6 +7,12 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" + AWS_ENDPOINT: "{{ s3_endpoint }}" + AWS_REGION_WEB: "{{ aws_region }}" + AWS_REGION_IOS: "{{ aws_region }}" + REDIS_STRING: "{{ redis_endpoint }}" + KAFKA_SERVERS: "{{ kafka_endpoint }}" + KAFKA_USE_SSL: "{{ kafka_ssl }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/utilities.yaml b/scripts/helm/roles/openreplay/templates/utilities.yaml index 3ae1efca8..fb7eb0ae0 100644 --- a/scripts/helm/roles/openreplay/templates/utilities.yaml +++ b/scripts/helm/roles/openreplay/templates/utilities.yaml @@ -12,6 +12,7 @@ env: S3_SECRET: "{{ minio_secret_key }}" S3_HOST: "https://{{ domain_name }}" jwt_secret: "{{ jwt_secret_key }}" + AWS_DEFAULT_REGION: "{{ aws_region }}" {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/vars.yaml b/scripts/helm/vars.yaml index 710fbd496..ca0037b27 100644 --- a/scripts/helm/vars.yaml +++ b/scripts/helm/vars.yaml @@ -86,3 +86,24 @@ db_resource_override: # memory: 256Mi redis: {} clickhouse: {} + +## Sane defaults +s3_endpoint: "http://minio.db.svc.cluster.local:9000" +aws_region: "us-east-1" +kafka_endpoint: kafka.db.svc.cluster.local:9042 +kafka_ssl: false +postgres_endpoint: postgresql.db.svc.cluster.local +postgres_port: 5432 +postgres_db_name: postgres +postgres_db_user: postgres +postgres_db_password: asayerPostgres +redis_endpoint: redis-master.db.svc.cluster.local:6379 +email_host: '' +email_port: '587' +email_user: '' +email_password: '' +email_use_tls: 'true' +email_use_ssl: 'false' +email_ssl_key: '' +email_ssl_cert: '' +email_from: OpenReplay diff --git a/scripts/helm/vars_template.yaml b/scripts/helm/vars_template.yaml index 6d92b8a66..766ed6a02 100644 --- a/scripts/helm/vars_template.yaml +++ b/scripts/helm/vars_template.yaml @@ -86,3 +86,24 @@ db_resource_override: # memory: 256Mi redis: {{ db_resource_override.redis|default({}) }} clickhouse: {{ db_resource_override.clickhouse|default({}) }} + +## Sane defaults +s3_endpoint: "{{ s3_endpoint }}" +aws_region: "{{ aws_region }}" +kafka_endpoint: "{{ kafka_endpoint }}" +kafka_ssl: "{{ kafka_ssl }}" +postgres_endpoint: "{{ postgres_endpoint }}" +postgres_port: "{{ postgres_port }}" +postgres_db_name: "{{ postgres_db_name }}" +postgres_db_user: "{{ postgres_db_user }}" +postgres_db_password: "{{ postgres_db_password }}" +redis_endpoint: "{{ redis_endpoint }}" +email_host: "{{ email_host }}" +email_port: "{{ email_port }}" +email_user: "{{ email_user }}" +email_password: "{{ email_password }}" +email_use_tls: "{{ email_use_tls }}" +email_use_ssl: "{{ email_use_ssl }}" +email_ssl_key: "{{ email_ssl_key }}" +email_ssl_cert: "{{ email_ssl_cert }}" +email_from: "{{ email_from }}" From cfb87014f858ff5ff04ba25cd1ab6d41ec203942 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 16:38:56 +0200 Subject: [PATCH 046/218] feat(api): insights users active chart --- ee/api/chalicelib/core/insights.py | 61 ++++++++++++++++++------------ 1 file changed, 37 insertions(+), 24 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 7f22e24fd..f21ee23fa 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -759,45 +759,58 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en return rows +PERIOD_TO_FUNCTION = { + "DAY": "toStartOfDay", + "WEEK": "toStartOfWeek" +} + + @dev.timed def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) - - pg_sub_query_chart.append("user_id IS NOT NULL") + meta_condition = [] period = "DAY" extra_values = {} for f in filters: if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: period = f["value"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart - FROM (SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(users), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT user_id - FROM public.sessions - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp) AS chart;""" + period_function = PERIOD_TO_FUNCTION[period] + ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg + FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY period) AS daily_users;""" params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, "project_id": project_id, "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( - startTimestamp), - "endTimestamp": endTimestamp, **__get_constraint_values(args), + startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() + avg = ch.execute(ch_query, params) + if len(avg) == 0 or avg[0]["avg"] == 0: + return {"avg": 0, "chart": []} + avg=avg[0]["avg"] + ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count + FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY period + ORDER BY period) AS raw_results""" - return row_users + # print(pg_query%params) + # print("---------------------") + rows = ch.execute(ch_query, params) + return {"avg": avg, "chart": rows} @dev.timed From a174348a37fd230219d3b98ef301af8645565c12 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 17:14:33 +0200 Subject: [PATCH 047/218] feat(api): insights power users active bar chart --- ee/api/chalicelib/core/insights.py | 48 +++++++++++++++++++----------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index f21ee23fa..c17c5091f 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -799,7 +799,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime avg = ch.execute(ch_query, params) if len(avg) == 0 or avg[0]["avg"] == 0: return {"avg": 0, "chart": []} - avg=avg[0]["avg"] + avg = avg[0]["avg"] ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count FROM sessions_metadata INNER JOIN sessions USING (session_id) @@ -815,26 +815,40 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime @dev.timed def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) - pg_sub_query.append("user_id IS NOT NULL") + ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition - FROM (SELECT number_of_days, COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days) AS day_users_partition;""" + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT AVG(count) AS avg + FROM(SELECT COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() + avg = ch.execute(ch_query, params) + if len(avg) == 0 or avg[0]["avg"] == 0: + return {"avg": 0, "partition": []} + avg = avg[0]["avg"] + ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days;""" - return helper.dict_to_camel_case(row_users) + # print(ch_query%params) + # print("---------------------") + rows = ch.execute(ch_query, params) + + return {"avg": avg, "partition": helper.list_to_camel_case(rows)} @dev.timed From 7e07e763ff9ac06b5de8e32f32c59a610ef14f3b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 18:09:05 +0200 Subject: [PATCH 048/218] feat(api): insights slipping users list --- ee/api/chalicelib/core/insights.py | 61 +++++++++++++++--------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index c17c5091f..b41c6da76 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -854,15 +854,11 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes @dev.timed def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") event_type = "PAGES" event_value = "/" extra_values = {} default = True + meta_condition = [] for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] @@ -870,45 +866,50 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - with pg_client.PostgresClient() as cur: + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + with ch_client.ClickHouseClient() as ch: if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen - FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY user_id) AS user_last_usage - INNER JOIN sessions USING (user_id) - WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 - GROUP BY user_id, last_time,interactions_count;""" - + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT user_id, + toUnixTimestamp(last_time)*1000 AS last_time, + interactions_count, + toUnixTimestamp(first_seen) * 1000 AS first_seen, + toUnixTimestamp(last_seen) * 1000 AS last_seen + FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen + FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) + WHERE now() - last_time > 7 + GROUP BY user_id, last_time, interactions_count) AS raw_results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(ch_query, params) + rows = ch.execute(ch_query, params) return { "startTimestamp": startTimestamp, "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], From 00c2c8d01d7768c4aa9c0e6387641c5249785cda Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 18:30:01 +0200 Subject: [PATCH 049/218] feat(api): insights feature intensity --- ee/api/chalicelib/core/insights.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index b41c6da76..0de31408e 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -726,35 +726,33 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da @dev.timed def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") event_table = JOURNEY_TYPES["CLICK"]["table"] event_column = JOURNEY_TYPES["CLICK"]["column"] extra_values = {} + meta_condition = [] for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] - pg_sub_query.append(f"length({event_column})>2") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY avg DESC LIMIT 7;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # TODO: solve full scan issue - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(ch_query % params) + rows = ch.execute(ch_query, params) return rows From d82f2e6decc3c49aeb64034e2d623da01af25b93 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 18:44:34 +0200 Subject: [PATCH 050/218] feat(api): insights feature adoption --- ee/api/chalicelib/core/insights.py | 61 +++++++++++++++--------------- 1 file changed, 30 insertions(+), 31 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 0de31408e..92d93bb27 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -538,12 +538,11 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da @dev.timed def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) event_type = "CLICK" event_value = '/' extra_values = {} default = True + meta_condition = [] for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] @@ -551,55 +550,55 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" + + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions_metadata INNER JOIN sessions AS feature USING(session_id) + WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} # print(cur.mogrify(pg_query, params)) # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: + all_user_count = ch.execute(ch_query, params) + if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], } - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + all_user_count = all_user_count[0]["count"] if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) + # print(ch_query% params) # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - adoption = cur.fetchone()["count"] / all_user_count + adoption = ch.execute(ch_query, params) + adoption = adoption[0]["count"] / all_user_count return {"target": all_user_count, "adoption": adoption, "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} From 17cf937e48960be8e86ba0e0583f9ec1eca3d991 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 19:01:51 +0200 Subject: [PATCH 051/218] feat(api): insights feature adoption top users --- ee/api/chalicelib/core/insights.py | 52 ++++++++++++++---------------- 1 file changed, 24 insertions(+), 28 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 92d93bb27..308d3b131 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -606,13 +606,11 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end @dev.timed def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") event_type = "CLICK" event_value = '/' extra_values = {} default = True + meta_condition = [] for f in filters: if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_type = f["value"] @@ -620,45 +618,43 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(user_id)") + with ch_client.ClickHouseClient() as ch: if default: # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1 - ORDER BY 2 DESC + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id + ORDER BY count DESC LIMIT 10;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(pg_query%params) + rows = ch.execute(ch_query, params) return {"users": helper.list_to_camel_case(rows), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} From 086b7b6b0bfc9b1c580e13ea76429ffdc4f9a9b9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 19:13:08 +0200 Subject: [PATCH 052/218] feat(api): insights autocomplete --- ee/api/chalicelib/core/insights.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 308d3b131..7f302c14a 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -916,32 +916,33 @@ def search(text, feature_type, project_id, platform=None): resource_type = "ALL" data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) return data - - pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True, - data={} if platform is None else {"platform": platform}) - - params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, + args = {} if platform is None else {"platform": platform} + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, "endTimestamp": TimeUTC.now(), "project_id": project_id, "value": helper.string_to_sql_like(text.lower()), "platform_0": platform} if feature_type == "ALL": - with pg_client.PostgresClient() as cur: + with ch_client.ClickHouseClient() as ch: sub_queries = [] for e in JOURNEY_TYPES: sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s + FROM {JOURNEY_TYPES[e]["table"]} AS feature INNER JOIN sessions_metadata USING(session_id) + WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 LIMIT 10)""") - pg_query = "UNION ALL".join(sub_queries) + ch_query = "UNION ALL".join(sub_queries) # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + rows = ch.execute(ch_query, params) elif JOURNEY_TYPES.get(feature_type) is not None: with pg_client.PostgresClient() as cur: pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s + FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature INNER JOIN sessions_metadata USING(session_id) + WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 LIMIT 10;""" # print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) From 45daf914f5908162c57882d9e946b286869447c8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 8 Oct 2021 19:22:31 +0200 Subject: [PATCH 053/218] feat(api): insights cleanup --- ee/api/chalicelib/core/insights.py | 33 ++++++++++++++---------------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 7f302c14a..b7f2b7092 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -1,6 +1,5 @@ from chalicelib.core import sessions_metas from chalicelib.utils import helper, dev -from chalicelib.utils import pg_client from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps @@ -368,7 +367,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) + # print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -458,7 +457,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) + # print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -498,7 +497,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da AND not empty(user_id);""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) + # print(ch_query % params) print("---------------------") all_user_count = ch.execute(ch_query, params) if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: @@ -514,8 +513,8 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da ORDER BY count DESC LIMIT 7;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") popularity = ch.execute(ch_query, params) ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count @@ -523,8 +522,8 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da WHERE {" AND ".join(ch_sub_query)} GROUP BY value;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") frequencies = ch.execute(ch_query, params) total_usage = sum([f["count"] for f in frequencies]) frequencies = {f["value"]: f["count"] for f in frequencies} @@ -568,8 +567,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") + # print(ch_query%params) all_user_count = ch.execute(ch_query, params) if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, @@ -653,7 +651,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days LIMIT 10;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(pg_query%params) + # print(ch_query%params) rows = ch.execute(ch_query, params) return {"users": helper.list_to_camel_case(rows), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} @@ -800,7 +798,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime GROUP BY period ORDER BY period) AS raw_results""" - # print(pg_query%params) + # print(ch_query%params) # print("---------------------") rows = ch.execute(ch_query, params) return {"avg": avg, "chart": rows} @@ -936,17 +934,16 @@ def search(text, feature_type, project_id, platform=None): WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 LIMIT 10)""") ch_query = "UNION ALL".join(sub_queries) - # print(cur.mogrify(pg_query, params)) + # print(ch_query, params) rows = ch.execute(ch_query, params) elif JOURNEY_TYPES.get(feature_type) is not None: - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature INNER JOIN sessions_metadata USING(session_id) WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 LIMIT 10;""" - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() + # print(ch_query, params) + rows = ch.execute(ch_query, params) else: return [] return [helper.dict_to_camel_case(row) for row in rows] From ae413710f272fd2d0bc0a5bf6b28e464f3ed1a4c Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Sat, 9 Oct 2021 00:39:09 +0530 Subject: [PATCH 054/218] chore(install): replace docker with cri Signed-off-by: Rajesh Rajendran --- scripts/helm/install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helm/install.sh b/scripts/helm/install.sh index 881d98808..529d08a8c 100755 --- a/scripts/helm/install.sh +++ b/scripts/helm/install.sh @@ -31,7 +31,7 @@ which docker &> /dev/null || { # response {"data":{"valid": TRUE|FALSE, "expiration": expiration date in ms}} # Installing k3s -curl -sL https://get.k3s.io | sudo K3S_KUBECONFIG_MODE="644" INSTALL_K3S_VERSION='v1.19.5+k3s2' INSTALL_K3S_EXEC="--no-deploy=traefik --docker" sh - +curl -sL https://get.k3s.io | sudo K3S_KUBECONFIG_MODE="644" INSTALL_K3S_VERSION='v1.19.5+k3s2' INSTALL_K3S_EXEC="--no-deploy=traefik" sh - mkdir ~/.kube sudo cp /etc/rancher/k3s/k3s.yaml ~/.kube/config sudo chown $(whoami) ~/.kube/config From 95acdf90d3f29fa3938f6f6f6ec8897e6d38ba48 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Sat, 9 Oct 2021 11:14:48 +0530 Subject: [PATCH 055/218] feat(kubernetes): use host volume if nfs not explicitly specified. Note: Can't scale sink/storage more than one pod, and one machine --- .../helm/app/openreplay/templates/deployment.yaml | 12 ++++++++++++ scripts/helm/app/openreplay/templates/pvc.yaml | 2 +- scripts/helm/app/sink.yaml | 3 ++- scripts/helm/app/storage.yaml | 3 ++- 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/scripts/helm/app/openreplay/templates/deployment.yaml b/scripts/helm/app/openreplay/templates/deployment.yaml index a2259a852..187025b74 100644 --- a/scripts/helm/app/openreplay/templates/deployment.yaml +++ b/scripts/helm/app/openreplay/templates/deployment.yaml @@ -47,6 +47,17 @@ spec: resources: {{- toYaml .Values.resources | nindent 12 }} {{- if .Values.pvc }} + {{- if eq .Values.pvc.name "hostPath" }} + volumeMounts: + - mountPath: {{ .Values.pvc.mountPath }} + name: {{ .Values.pvc.name }} + volumes: + - name: mydir + hostPath: + # Ensure the file directory is created. + path: {{ .Values.pvc.hostMountPath }} + type: DirectoryOrCreate + {{- else }} volumeMounts: - name: {{ .Values.pvc.name }} mountPath: {{ .Values.pvc.mountPath }} @@ -55,6 +66,7 @@ spec: persistentVolumeClaim: claimName: {{ .Values.pvc.volumeName }} {{- end }} + {{- end }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} diff --git a/scripts/helm/app/openreplay/templates/pvc.yaml b/scripts/helm/app/openreplay/templates/pvc.yaml index 2180024e6..fc02fa6c3 100644 --- a/scripts/helm/app/openreplay/templates/pvc.yaml +++ b/scripts/helm/app/openreplay/templates/pvc.yaml @@ -1,5 +1,5 @@ {{- if .Values.pvc }} -{{- if .Values.pvc.create }} +{{- if and (.Values.pvc.create) (ne .Values.pvc.name "hostPath") }} apiVersion: v1 kind: PersistentVolumeClaim metadata: diff --git a/scripts/helm/app/sink.yaml b/scripts/helm/app/sink.yaml index 51113fdd0..8d6239d3e 100644 --- a/scripts/helm/app/sink.yaml +++ b/scripts/helm/app/sink.yaml @@ -23,10 +23,11 @@ resources: pvc: create: true - name: nfs + name: hostPath storageClassName: nfs volumeName: nfs mountPath: /mnt/efs + hostMountPath: /openreplay/storage/nfs storageSize: 5Gi env: diff --git a/scripts/helm/app/storage.yaml b/scripts/helm/app/storage.yaml index 39cca7f6b..ef839210e 100644 --- a/scripts/helm/app/storage.yaml +++ b/scripts/helm/app/storage.yaml @@ -24,10 +24,11 @@ resources: pvc: # PVC Created from filesink.yaml create: false - name: nfs + name: hostPath storageClassName: nfs volumeName: nfs mountPath: /mnt/efs + hostMountPath: /openreplay/storage/nfs storageSize: 5Gi env: From 85ac7884dc95c0efd5db4a7deb1720a5bfc8ec1b Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Sat, 9 Oct 2021 11:33:38 +0530 Subject: [PATCH 056/218] feat(kubernetes): remove nfs-server-provisioner Signed-off-by: Rajesh Rajendran --- scripts/helm/roles/openreplay/defaults/main.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helm/roles/openreplay/defaults/main.yaml b/scripts/helm/roles/openreplay/defaults/main.yaml index bcd303672..e706891fb 100644 --- a/scripts/helm/roles/openreplay/defaults/main.yaml +++ b/scripts/helm/roles/openreplay/defaults/main.yaml @@ -4,6 +4,6 @@ db_name: "" app_name: "" db_list: - "minio" - - "nfs-server-provisioner" - "postgresql" - "redis" + # - "nfs-server-provisioner" From c2a61a9c887bd42488c2be46d4b20bd5a7bcedf1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 11 Oct 2021 20:00:30 +0200 Subject: [PATCH 057/218] feat(db): changed CH-structure to support insights --- .gitignore | 11 ++++- ee/api/_clickhouse_upgrade.sh | 10 ++++ .../db/init_dbs/clickhouse/1.3.6/1.3.6.sql | 13 +++++ .../db/init_dbs/clickhouse/create/customs.sql | 22 +++++++++ .../clickhouse/create/sessions_metadata.sql | 48 ++++++++++++------- 5 files changed, 85 insertions(+), 19 deletions(-) create mode 100644 ee/api/_clickhouse_upgrade.sh create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql diff --git a/.gitignore b/.gitignore index ee79ca544..309d70038 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,13 @@ public node_modules *DS_Store *.env -.idea \ No newline at end of file +.idea +/ee/api/chalicelib/core/heatmaps.py +/ee/api/entrypoint.bundle.sh +/ee/api/entrypoint.sh +/ee/api/env_handler.py +/ee/api/chalicelib/blueprints/app/v1_api.py +/ee/api/build.sh +/ee/api/chalicelib/core/assist.py +/ee/api/chalicelib/blueprints/app/__init__.py +/ee/api/Dockerfile.bundle diff --git a/ee/api/_clickhouse_upgrade.sh b/ee/api/_clickhouse_upgrade.sh new file mode 100644 index 000000000..9b656a584 --- /dev/null +++ b/ee/api/_clickhouse_upgrade.sh @@ -0,0 +1,10 @@ +sudo yum update +sudo yum install yum-utils +sudo rpm --import https://repo.clickhouse.com/CLICKHOUSE-KEY.GPG +sudo yum-config-manager --add-repo https://repo.clickhouse.com/rpm/stable/x86_64 +sudo yum update +sudo service clickhouse-server restart + + +#later mus use in clickhouse-client: +#SET allow_experimental_window_functions = 1; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql new file mode 100644 index 000000000..2d7a14020 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql @@ -0,0 +1,13 @@ + +ALTER TABLE sessions_metadata + ADD COLUMN project_id UInt32, + ADD COLUMN tracker_version String, + ADD COLUMN rev_id Nullable(String), + ADD COLUMN user_uuid UUID, + ADD COLUMN user_os String, + ADD COLUMN user_os_version Nullable(String), + ADD COLUMN user_browser String, + ADD COLUMN user_browser_version Nullable(String), + ADD COLUMN user_device Nullable(String), + ADD COLUMN user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + ADD COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122); \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql new file mode 100644 index 000000000..6d466a7a0 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/customs.sql @@ -0,0 +1,22 @@ +CREATE TABLE customs +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + name Nullable(String), + payload Nullable(String), + level Enum8('info'=0, 'error'=1) DEFAULT 'info' +) ENGINE = MergeTree + PARTITION BY toDate(datetime) + ORDER BY (project_id, datetime) + TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql index 6ad10f8a3..a6d0382e6 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/sessions_metadata.sql @@ -1,19 +1,31 @@ -CREATE TABLE sessions_metadata ( - session_id UInt64, - user_id Nullable(String), - user_anonymous_id Nullable(String), - metadata_1 Nullable(String), - metadata_2 Nullable(String), - metadata_3 Nullable(String), - metadata_4 Nullable(String), - metadata_5 Nullable(String), - metadata_6 Nullable(String), - metadata_7 Nullable(String), - metadata_8 Nullable(String), - metadata_9 Nullable(String), - metadata_10 Nullable(String), - datetime DateTime +CREATE TABLE sessions_metadata +( + session_id UInt64, + project_id UInt32, + tracker_version String, + rev_id Nullable(String), + user_uuid UUID, + user_os String, + user_os_version Nullable(String), + user_browser String, + user_browser_version Nullable(String), + user_device Nullable(String), + user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2), + user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122), + datetime DateTime, + user_id Nullable(String), + user_anonymous_id Nullable(String), + metadata_1 Nullable(String), + metadata_2 Nullable(String), + metadata_3 Nullable(String), + metadata_4 Nullable(String), + metadata_5 Nullable(String), + metadata_6 Nullable(String), + metadata_7 Nullable(String), + metadata_8 Nullable(String), + metadata_9 Nullable(String), + metadata_10 Nullable(String) ) ENGINE = MergeTree -PARTITION BY toDate(datetime) -ORDER BY (session_id) -TTL datetime + INTERVAL 1 MONTH; + PARTITION BY toDate(datetime) + ORDER BY (session_id) + TTL datetime + INTERVAL 1 MONTH; \ No newline at end of file From 4b199abe2d2a50079d04f95ead21c6a0077b8987 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Tue, 12 Oct 2021 18:28:58 +0530 Subject: [PATCH 058/218] fix(ee): define variable Signed-off-by: Rajesh Rajendran --- ee/scripts/helm/roles/openreplay/defaults/main.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/scripts/helm/roles/openreplay/defaults/main.yaml b/ee/scripts/helm/roles/openreplay/defaults/main.yaml index 97d11429b..eb9071ff3 100644 --- a/ee/scripts/helm/roles/openreplay/defaults/main.yaml +++ b/ee/scripts/helm/roles/openreplay/defaults/main.yaml @@ -1,9 +1,9 @@ --- # defaults file for openreplay app_name: "" +db_name: "" db_list: - "minio" - - "nfs-server-provisioner" - "postgresql" - "redis" - "clickhouse" From 53326cce3a123ad8603be8520e3d42e3daf4ad8d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 12 Oct 2021 19:19:50 +0200 Subject: [PATCH 059/218] feat(api): ee insights small optimizations feat(api): ee insights fix materialized column-join issue --- ee/api/app.py | 6 +- ee/api/chalicelib/core/insights.py | 136 +++++++++++++++++++---------- 2 files changed, 93 insertions(+), 49 deletions(-) diff --git a/ee/api/app.py b/ee/api/app.py index 222e37a39..1cde5efe4 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -5,8 +5,9 @@ from sentry_sdk import configure_scope from chalicelib import _overrides from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons +from chalicelib.blueprints.app import v1_api from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints.subs import bp_dashboard +from chalicelib.blueprints.subs import bp_dashboard,bp_insights from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ @@ -121,7 +122,8 @@ app.register_blueprint(bp_core_crons.app) app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) - +app.register_blueprint(bp_insights.app) +app.register_blueprint(v1_api.app) # Enterprise app.register_blueprint(bp_ee.app) app.register_blueprint(bp_ee_crons.app) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index b7f2b7092..1d8503f3f 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -25,7 +25,7 @@ JOURNEY_TYPES = { "PAGES": {"table": "pages", "column": "url_path"}, "CLICK": {"table": "clicks", "column": "label"}, # "VIEW": {"table": "events_ios.views", "column": "name"}, TODO: enable this for SAAS only - # "EVENT": {"table": "events_common.customs", "column": "name"} + "EVENT": {"table": "customs", "column": "name"} } @@ -200,31 +200,54 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT ch_sub_query.append("sessions.duration>0") ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions_metadata INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM sessions_metadata INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - GROUP BY connexion_week - ORDER BY connexion_week;""" + # current optimization: from 6s to 4-5s + ch_query = f"""SELECT toInt8((toStartOfWeek(datetime, 1) - toDate(1630886400000 / 1000)) / 7) AS week, + COUNT(DISTINCT user_id) AS users_count + FROM sessions_metadata INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + AND user_id IN (SELECT DISTINCT user_id + FROM sessions_metadata + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess + INNER JOIN sessions AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + ) + GROUP BY week;""" + # THIS IS THE ORIGINAL QUERY, PROBABLY WILL BE REUSED AGAIN WHEN CH-STRUCTURE CHANGES + # ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + # COUNT(all_connexions.user_id) AS users_count, + # groupArray(100)(all_connexions.user_id) AS connected_users + # FROM (SELECT DISTINCT user_id + # FROM sessions_metadata INNER JOIN sessions USING (session_id) + # WHERE {" AND ".join(ch_sub_query)} + # AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) + # AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + # AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + # AND isNull((SELECT 1 + # FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) + # WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + # AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + # AND bsess.project_id = %(project_id)s + # AND bmsess.user_id = sessions_metadata.user_id + # LIMIT 1)) + # ) AS users_list + # INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week + # FROM sessions_metadata INNER JOIN sessions USING (session_id) + # WHERE {" AND ".join(ch_sub_query)} + # AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) + # ) AS all_connexions USING (user_id) + # GROUP BY connexion_week + # ORDER BY connexion_week;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} # print(ch_query % params) @@ -249,6 +272,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en ch_sub_query.append("sessions.duration>0") ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") with ch_client.ClickHouseClient() as ch: + # TODO: optimize after DB structure change, optimization from 6s to 4s ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, week, users_count, @@ -284,7 +308,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query%params) + print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -342,13 +366,13 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en COUNT(DISTINCT all_connexions.user_id) AS users_count, groupArray(100)(all_connexions.user_id) AS connected_users FROM (SELECT DISTINCT user_id - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess INNER JOIN {event_table} AS bsess USING (session_id) + FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bsess.project_id = %(project_id)s @@ -357,7 +381,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en LIMIT 1)) ) AS users_list INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) ORDER BY connexion_week, user_id @@ -367,7 +391,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) + print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -490,6 +514,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da extra_values["user_id"] = f["value"] with ch_client.ClickHouseClient() as ch: + # TODO: change this query to not use join, optimization from 5s to 1s ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count FROM sessions AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} @@ -497,7 +522,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da AND not empty(user_id);""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) + print(ch_query % params) print("---------------------") all_user_count = ch.execute(ch_query, params) if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: @@ -513,17 +538,19 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da ORDER BY count DESC LIMIT 7;""" - # print(ch_query % params) - # print("---------------------") + print(ch_query % params) + print("---------------------") popularity = ch.execute(ch_query, params) + params["values"] = [p["value"] for p in popularity] ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} + AND {event_column} IN %(values)s GROUP BY value;""" - # print(ch_query % params) - # print("---------------------") + print(ch_query % params) + print("---------------------") frequencies = ch.execute(ch_query, params) total_usage = sum([f["count"] for f in frequencies]) frequencies = {f["value"]: f["count"] for f in frequencies} @@ -562,12 +589,14 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end ch_sub_query.append("user_id IS NOT NULL") ch_sub_query.append("not empty(user_id)") with ch_client.ClickHouseClient() as ch: + # TODO: optimize this when DB structure changes, optimization from 3s to 1s ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count FROM sessions_metadata INNER JOIN sessions AS feature USING(session_id) WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query%params) + print(ch_query % params) + print("---------------------") all_user_count = ch.execute(ch_query, params) if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, @@ -583,6 +612,8 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + print("---------------------") row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] @@ -593,8 +624,8 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) - # print("---------------------") + print(ch_query % params) + print("---------------------") adoption = ch.execute(ch_query, params) adoption = adoption[0]["count"] / all_user_count return {"target": all_user_count, "adoption": adoption, @@ -643,6 +674,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days event_value = row[0]["value"] extra_values["value"] = event_value ch_sub_query.append(f"feature.{event_column} = %(value)s") + # TODO: no possible optimization right now ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} @@ -651,7 +683,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days LIMIT 10;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query%params) + print(ch_query % params) rows = ch.execute(ch_query, params) return {"users": helper.list_to_camel_case(rows), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} @@ -700,6 +732,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da event_value = row[0]["value"] extra_values["value"] = event_value ch_sub_query.append(f"feature.{event_column} = %(value)s") + # optimal ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} @@ -777,6 +810,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") with ch_client.ClickHouseClient() as ch: + # TODO: optimize this when DB structure changes, optimization from 3s to 1s ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count FROM sessions_metadata INNER JOIN sessions USING (session_id) @@ -787,19 +821,21 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + print("---------------------") avg = ch.execute(ch_query, params) if len(avg) == 0 or avg[0]["avg"] == 0: return {"avg": 0, "chart": []} avg = avg[0]["avg"] + # TODO: optimize this when DB structure changes, optimization from 3s to 1s ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count FROM sessions_metadata INNER JOIN sessions USING (session_id) WHERE {" AND ".join(ch_sub_query)} GROUP BY period - ORDER BY period) AS raw_results""" - - # print(ch_query%params) - # print("---------------------") + ORDER BY period) AS raw_results;""" + print(ch_query % params) + print("---------------------") rows = ch.execute(ch_query, params) return {"avg": avg, "chart": rows} @@ -813,6 +849,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes ch_sub_query.append("not empty(user_id)") with ch_client.ClickHouseClient() as ch: + # TODO: optimize this when DB structure changes, optimization from 4s to 1s ch_query = f"""SELECT AVG(count) AS avg FROM(SELECT COUNT(user_id) AS count FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days @@ -823,10 +860,13 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes ORDER BY number_of_days) AS results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(ch_query % params) + print("---------------------") avg = ch.execute(ch_query, params) if len(avg) == 0 or avg[0]["avg"] == 0: return {"avg": 0, "partition": []} avg = avg[0]["avg"] + # TODO: optimize this when DB structure changes, optimization from 4s to 1s ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days FROM sessions_metadata INNER JOIN sessions USING (session_id) @@ -835,8 +875,8 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes GROUP BY number_of_days ORDER BY number_of_days;""" - # print(ch_query%params) - # print("---------------------") + print(ch_query % params) + print("---------------------") rows = ch.execute(ch_query, params) return {"avg": avg, "partition": helper.list_to_camel_case(rows)} @@ -880,11 +920,13 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] extra_values["value"] = event_value ch_sub_query.append(f"feature.{event_column} = %(value)s") + # TODO: no possible optimization right now ch_query = f"""SELECT user_id, toUnixTimestamp(last_time)*1000 AS last_time, interactions_count, @@ -899,7 +941,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi GROUP BY user_id, last_time, interactions_count) AS raw_results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query, params) + print(ch_query % params) rows = ch.execute(ch_query, params) return { "startTimestamp": startTimestamp, From ebfa2b381596af1d028eaa5e369f42c2f5e18284 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 13 Oct 2021 14:39:49 +0530 Subject: [PATCH 060/218] fix(certbot): updating letsencrypt certificates. Signed-off-by: Rajesh Rajendran --- scripts/certbot.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/certbot.sh b/scripts/certbot.sh index 42cb00015..da5357ff8 100755 --- a/scripts/certbot.sh +++ b/scripts/certbot.sh @@ -14,7 +14,7 @@ read dns_name echo please enter your email id: read emai_id ssh_ansible_user=$(whoami) -certbot_home=/etc/letsencrypt/archive/$dns_name +certbot_home=/etc/letsencrypt/live/$dns_name #Check certbot installed or not @@ -26,8 +26,8 @@ fi sudo certbot certonly --non-interactive --agree-tos -m $emai_id -d $dns_name --standalone -sudo cp $certbot_home/privkey1.pem ${homedir}/site.key -sudo cp $certbot_home/fullchain1.pem ${homedir}/site.crt +sudo cp $certbot_home/privkey.pem ${homedir}/site.key +sudo cp $certbot_home/fullchain.pem ${homedir}/site.crt sudo chown -R $ssh_ansible_user:$ssh_ansible_user ${homedir}/site.key ${homedir}/site.crt sudo chmod 775 ${homedir}/site.crt ${homedir}/site.key From 6cab9599687b81f7d052a4b2741dd5fc66be881c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 13 Oct 2021 11:44:24 +0200 Subject: [PATCH 061/218] feat(api): changed .gitignore --- .gitignore | 11 +---------- ee/api/.gitignore | 9 +++++++++ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 309d70038..ee79ca544 100644 --- a/.gitignore +++ b/.gitignore @@ -3,13 +3,4 @@ public node_modules *DS_Store *.env -.idea -/ee/api/chalicelib/core/heatmaps.py -/ee/api/entrypoint.bundle.sh -/ee/api/entrypoint.sh -/ee/api/env_handler.py -/ee/api/chalicelib/blueprints/app/v1_api.py -/ee/api/build.sh -/ee/api/chalicelib/core/assist.py -/ee/api/chalicelib/blueprints/app/__init__.py -/ee/api/Dockerfile.bundle +.idea \ No newline at end of file diff --git a/ee/api/.gitignore b/ee/api/.gitignore index a526e1c21..d5e957053 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -232,3 +232,12 @@ Pipfile /chalicelib/utils/smtp.py /chalicelib/utils/strings.py /chalicelib/utils/TimeUTC.py +/chalicelib/core/heatmaps.py +/entrypoint.bundle.sh +/entrypoint.sh +/env_handler.py +/chalicelib/blueprints/app/v1_api.py +/build.sh +/chalicelib/core/assist.py +/chalicelib/blueprints/app/__init__.py +/Dockerfile.bundle From f537a77e5e2c2c47c0ddc20a835936e3b5af14b6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 13 Oct 2021 11:48:06 +0200 Subject: [PATCH 062/218] feat(db): changed index --- scripts/helm/db/init_dbs/postgresql/init_schema.sql | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 586f84e74..a2b0c72bd 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -172,7 +172,6 @@ CREATE TABLE projects "defaultInputMode": "plain" }'::jsonb -- ?????? ); -CREATE INDEX projects_tenant_id_idx ON projects (tenant_id); CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS $$ @@ -248,7 +247,6 @@ create table webhooks index integer default 0 not null, name varchar(100) ); -CREATE INDEX webhooks_tenant_id_idx ON webhooks (tenant_id); -- --- notifications.sql --- From 161d40d07810308db4aec5504651406bca0defc4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 13 Oct 2021 18:46:49 +0200 Subject: [PATCH 063/218] feat(api): ee insights optimized with new structure --- ee/api/chalicelib/core/insights.py | 424 ++++++++++++++++------------- 1 file changed, 240 insertions(+), 184 deletions(-) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 1d8503f3f..2a8febe5d 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -44,6 +44,9 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.project_id = %(project_id)s") + meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + meta_condition.append(f"sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000)") extra_values["user_id"] = f["value"] ch_sub_query = __get_basic_constraints(table_name=event_table, data=args) meta_condition += __get_meta_constraint(args) @@ -192,62 +195,33 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions', data=args) + ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions.duration>0") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") with ch_client.ClickHouseClient() as ch: - # current optimization: from 6s to 4-5s - ch_query = f"""SELECT toInt8((toStartOfWeek(datetime, 1) - toDate(1630886400000 / 1000)) / 7) AS week, - COUNT(DISTINCT user_id) AS users_count - FROM sessions_metadata INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) - AND user_id IN (SELECT DISTINCT user_id - FROM sessions_metadata - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - INNER JOIN sessions AS bsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - ) - GROUP BY week;""" - # THIS IS THE ORIGINAL QUERY, PROBABLY WILL BE REUSED AGAIN WHEN CH-STRUCTURE CHANGES - # ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - # COUNT(all_connexions.user_id) AS users_count, - # groupArray(100)(all_connexions.user_id) AS connected_users - # FROM (SELECT DISTINCT user_id - # FROM sessions_metadata INNER JOIN sessions USING (session_id) - # WHERE {" AND ".join(ch_sub_query)} - # AND toStartOfWeek(sessions.datetime,1) = toDate(%(startTimestamp)s / 1000) - # AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - # AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - # AND isNull((SELECT 1 - # FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) - # WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - # AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - # AND bsess.project_id = %(project_id)s - # AND bmsess.user_id = sessions_metadata.user_id - # LIMIT 1)) - # ) AS users_list - # INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - # FROM sessions_metadata INNER JOIN sessions USING (session_id) - # WHERE {" AND ".join(ch_sub_query)} - # AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) - # ) AS all_connexions USING (user_id) - # GROUP BY connexion_week - # ORDER BY connexion_week;""" + ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + COUNT(all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess + WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + ) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + ) AS all_connexions USING (user_id) + GROUP BY connexion_week + ORDER BY connexion_week;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} # print(ch_query % params) @@ -264,15 +238,13 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions', data=args) + ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions.duration>0") + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") with ch_client.ClickHouseClient() as ch: - # TODO: optimize after DB structure change, optimization from 6s to 4s ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, week, users_count, @@ -281,24 +253,21 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en SELECT first_connexion_week, toInt8((connexion_week - first_connexion_week) / 7) AS week, COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT user_id, MIN(toStartOfWeek(sessions.datetime, 1)) AS first_connexion_week - FROM sessions_metadata INNER JOIN sessions USING (session_id) + groupArray(20)(all_connexions.user_id) AS connected_users + FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND sessions.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s + FROM sessions_metadata AS bmsess + WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.project_id = %(project_id)s AND bmsess.user_id = sessions_metadata.user_id LIMIT 1)) GROUP BY user_id) AS users_list INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week - FROM sessions_metadata INNER JOIN sessions USING (session_id) + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) ORDER BY connexion_week, user_id ) AS all_connexions USING (user_id) WHERE first_connexion_week <= connexion_week @@ -308,7 +277,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(ch_query % params) + # print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -324,10 +293,6 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK ch_sub_query = __get_basic_constraints(table_name='feature', data=args) meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") event_type = "PAGES" event_value = "/" extra_values = {} @@ -339,7 +304,12 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] @@ -348,9 +318,9 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en if default: # get most used value ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} - AND length({event_column}) > 2 GROUP BY value ORDER BY count DESC LIMIT 1;""" @@ -360,7 +330,22 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } extra_values["value"] = event_value + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query += meta_condition ch_sub_query.append(f"feature.{event_column} = %(value)s") ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, COUNT(DISTINCT all_connexions.user_id) AS users_count, @@ -376,6 +361,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bsess.project_id = %(project_id)s + AND bmsess.project_id = %(project_id)s AND bmsess.user_id = sessions_metadata.user_id AND bsess.{event_column}=%(value)s LIMIT 1)) @@ -383,7 +369,6 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) ORDER BY connexion_week, user_id ) AS all_connexions USING (user_id) GROUP BY connexion_week @@ -408,10 +393,6 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK ch_sub_query = __get_basic_constraints(table_name='feature', data=args) meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") event_type = "PAGES" event_value = "/" @@ -424,7 +405,13 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), event_value = f["value"] default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] @@ -432,9 +419,9 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), if default: # get most used value ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} - AND length({event_column}) > 2 GROUP BY value ORDER BY count DESC LIMIT 1;""" @@ -444,7 +431,24 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } extra_values["value"] = event_value + + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + + ch_sub_query += meta_condition ch_sub_query.append(f"feature.{event_column} = %(value)s") ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, week, @@ -461,17 +465,19 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess INNER JOIN sessions AS bsess USING (session_id) + FROM sessions_metadata AS bmsess + INNER JOIN {event_table} AS bsess USING (session_id) WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) AND bsess.project_id = %(project_id)s + AND bmsess.project_id = %(project_id)s AND bmsess.user_id = sessions_metadata.user_id + AND bsess.{event_column} = %(value)s LIMIT 1)) GROUP BY user_id) AS users_list INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000) ORDER BY connexion_week, user_id ) AS all_connexions USING (user_id) WHERE first_connexion_week <= connexion_week @@ -481,7 +487,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) + print(ch_query % params) rows = ch.execute(ch_query, params) rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) return { @@ -498,9 +504,6 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK ch_sub_query = __get_basic_constraints(table_name='feature', data=args) meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") event_table = JOURNEY_TYPES["CLICK"]["table"] event_column = JOURNEY_TYPES["CLICK"]["column"] @@ -510,47 +513,55 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - ch_sub_query.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] with ch_client.ClickHouseClient() as ch: - # TODO: change this query to not use join, optimization from 5s to 1s - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND user_id IS NOT NULL - AND not empty(user_id);""" + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query += meta_condition + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(meta_condition)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") all_user_count = ch.execute(ch_query, params) if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: return [] all_user_count = all_user_count[0]["count"] - ch_sub_query.append(f"length({event_column})>2") ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} - AND user_id IS NOT NULL - AND not empty(user_id) + AND length({event_column})>2 GROUP BY value ORDER BY count DESC LIMIT 7;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") popularity = ch.execute(ch_query, params) params["values"] = [p["value"] for p in popularity] - + if len(params["values"]) == 0: + return [] ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} AND {event_column} IN %(values)s GROUP BY value;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") frequencies = ch.execute(ch_query, params) total_usage = sum([f["count"] for f in frequencies]) frequencies = {f["value"]: f["count"] for f in frequencies} @@ -577,6 +588,11 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] @@ -584,48 +600,58 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end ch_sub_query = __get_basic_constraints(table_name='feature', data=args) meta_condition += __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") with ch_client.ClickHouseClient() as ch: - # TODO: optimize this when DB structure changes, optimization from 3s to 1s - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions_metadata INNER JOIN sessions AS feature USING(session_id) - WHERE {" AND ".join(ch_sub_query)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - print("---------------------") - all_user_count = ch.execute(ch_query, params) - if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: - return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": event_value}], } - all_user_count = all_user_count[0]["count"] if default: # get most used value ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + # else: + # print(f"no {event_table} most used value") + # return {"target": 0, "adoption": 0, + # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} + extra_values["value"] = event_value + + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + ch_sub_query += meta_condition + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(meta_condition)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + # print("---------------------") + all_user_count = ch.execute(ch_query, params) + if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + all_user_count = all_user_count[0]["count"] + ch_sub_query.append(f"feature.{event_column} = %(value)s") ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") adoption = ch.execute(ch_query, params) adoption = adoption[0]["count"] / all_user_count return {"target": all_user_count, "adoption": adoption, @@ -648,21 +674,24 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] ch_sub_query = __get_basic_constraints(table_name='feature', data=args) meta_condition += __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") + with ch_client.ClickHouseClient() as ch: if default: # get most used value ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC @@ -672,9 +701,19 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return {"users": [], + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} + extra_values["value"] = event_value + if len(meta_condition) == 0: + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") ch_sub_query.append(f"feature.{event_column} = %(value)s") - # TODO: no possible optimization right now ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) WHERE {" AND ".join(ch_sub_query)} @@ -683,7 +722,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days LIMIT 10;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) + # print(ch_query % params) rows = ch.execute(ch_query, params) return {"users": helper.list_to_camel_case(rows), "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} @@ -705,8 +744,9 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.datetime >= %(startTimestamp)s") - meta_condition.append("sessions_metadata.datetime < %(endTimestamp)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] @@ -726,13 +766,20 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da LIMIT 1;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) + # print(ch_query % params) row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } extra_values["value"] = event_value ch_sub_query.append(f"feature.{event_column} = %(value)s") - # optimal ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} @@ -762,15 +809,17 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en event_column = JOURNEY_TYPES[f["value"]]["column"] elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] ch_sub_query = __get_basic_constraints(table_name="feature", data=args) meta_condition += __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY avg DESC @@ -792,7 +841,7 @@ PERIOD_TO_FUNCTION = { @dev.timed def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - meta_condition = [] + meta_condition = __get_meta_constraint(args) period = "DAY" extra_values = {} for f in filters: @@ -802,18 +851,14 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] period_function = PERIOD_TO_FUNCTION[period] - ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) - meta_condition += __get_meta_constraint(args) + ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") with ch_client.ClickHouseClient() as ch: - # TODO: optimize this when DB structure changes, optimization from 3s to 1s ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata INNER JOIN sessions USING (session_id) + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} GROUP BY period) AS daily_users;""" params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, @@ -821,8 +866,8 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") avg = ch.execute(ch_query, params) if len(avg) == 0 or avg[0]["avg"] == 0: return {"avg": 0, "chart": []} @@ -830,53 +875,51 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime # TODO: optimize this when DB structure changes, optimization from 3s to 1s ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata INNER JOIN sessions USING (session_id) + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} GROUP BY period ORDER BY period) AS raw_results;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") rows = ch.execute(ch_query, params) return {"avg": avg, "chart": rows} @dev.timed def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") with ch_client.ClickHouseClient() as ch: - # TODO: optimize this when DB structure changes, optimization from 4s to 1s - ch_query = f"""SELECT AVG(count) AS avg + ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg FROM(SELECT COUNT(user_id) AS count FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata INNER JOIN sessions USING (session_id) + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} GROUP BY user_id) AS users_connexions GROUP BY number_of_days ORDER BY number_of_days) AS results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") avg = ch.execute(ch_query, params) if len(avg) == 0 or avg[0]["avg"] == 0: return {"avg": 0, "partition": []} avg = avg[0]["avg"] - # TODO: optimize this when DB structure changes, optimization from 4s to 1s ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata INNER JOIN sessions USING (session_id) + FROM sessions_metadata WHERE {" AND ".join(ch_sub_query)} GROUP BY user_id) AS users_connexions GROUP BY number_of_days ORDER BY number_of_days;""" - print(ch_query % params) - print("---------------------") + # print(ch_query % params) + # print("---------------------") rows = ch.execute(ch_query, params) return {"avg": avg, "partition": helper.list_to_camel_case(rows)} @@ -885,6 +928,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes @dev.timed def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) event_type = "PAGES" event_value = "/" extra_values = {} @@ -898,22 +942,21 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi default = False elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") extra_values["user_id"] = f["value"] event_table = JOURNEY_TYPES[event_type]["table"] event_column = JOURNEY_TYPES[event_type]["column"] - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) meta_condition += __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") with ch_client.ClickHouseClient() as ch: if default: # get most used value ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY value ORDER BY count DESC @@ -924,9 +967,22 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi row = ch.execute(ch_query, params) if len(row) > 0: event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "list": [] + } extra_values["value"] = event_value + if len(meta_condition) == 0: + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") ch_sub_query.append(f"feature.{event_column} = %(value)s") - # TODO: no possible optimization right now ch_query = f"""SELECT user_id, toUnixTimestamp(last_time)*1000 AS last_time, interactions_count, @@ -938,7 +994,9 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi WHERE {" AND ".join(ch_sub_query)} GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) WHERE now() - last_time > 7 - GROUP BY user_id, last_time, interactions_count) AS raw_results;""" + GROUP BY user_id, last_time, interactions_count + ORDER BY interactions_count DESC, last_time DESC + LIMIT 50) AS raw_results;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} print(ch_query % params) @@ -960,31 +1018,29 @@ def search(text, feature_type, project_id, platform=None): ch_sub_query = __get_basic_constraints(table_name="feature", data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, "endTimestamp": TimeUTC.now(), "project_id": project_id, - "value": helper.string_to_sql_like(text.lower()), + "value": text.lower(), "platform_0": platform} if feature_type == "ALL": with ch_client.ClickHouseClient() as ch: sub_queries = [] for e in JOURNEY_TYPES: sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} AS feature INNER JOIN sessions_metadata USING(session_id) + FROM {JOURNEY_TYPES[e]["table"]} AS feature WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 LIMIT 10)""") ch_query = "UNION ALL".join(sub_queries) - # print(ch_query, params) + print(ch_query % params) rows = ch.execute(ch_query, params) elif JOURNEY_TYPES.get(feature_type) is not None: with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature INNER JOIN sessions_metadata USING(session_id) + FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 LIMIT 10;""" - # print(ch_query, params) + print(ch_query % params) rows = ch.execute(ch_query, params) else: return [] From 5aa267b5639207cb35d318e948e2d587d3c0c893 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 14 Oct 2021 16:37:34 +0200 Subject: [PATCH 064/218] feat(db): changed indexs --- ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql | 1 + scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + 3 files changed, 3 insertions(+) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index e4a711117..64454f9c7 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -427,6 +427,7 @@ CREATE TABLE errors stacktrace jsonb, --to save the stacktrace and not query S3 another time stacktrace_parsed_at timestamp ); +CREATE INDEX errors_error_id_idx ON errors (error_id); CREATE INDEX ON errors (project_id, source); CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index bcbece5c9..1d68b59c6 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -5,4 +5,5 @@ CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessio CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); +CREATE INDEX errors_error_id_idx ON errors (error_id); COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 6c3dd1f23..34b455a1e 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -407,6 +407,7 @@ CREATE TABLE errors stacktrace jsonb, --to save the stacktrace and not query S3 another time stacktrace_parsed_at timestamp ); +CREATE INDEX errors_error_id_idx ON errors (error_id); CREATE INDEX ON errors (project_id, source); CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); From 6db13febcbd409ea8d15247705c6866f62eca32f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 14 Oct 2021 16:40:43 +0200 Subject: [PATCH 065/218] feat(db): changed indexs --- ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql | 1 + scripts/helm/db/init_dbs/postgresql/init_schema.sql | 1 + 3 files changed, 3 insertions(+) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 64454f9c7..7bbcf4e80 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -732,6 +732,7 @@ CREATE TABLE events.errors ); CREATE INDEX ON events.errors (session_id); CREATE INDEX ON events.errors (timestamp); +CREATE INDEX errors_error_id_idx ON events.errors (error_id); CREATE TABLE events.graphql diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index 1d68b59c6..50ce4bb0c 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -6,4 +6,5 @@ CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHER CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); CREATE INDEX errors_error_id_idx ON errors (error_id); +CREATE INDEX errors_error_id_idx ON events.errors (error_id); COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 34b455a1e..def953e2f 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -731,6 +731,7 @@ CREATE TABLE events.errors PRIMARY KEY (session_id, message_id) ); CREATE INDEX ON events.errors (session_id); +CREATE INDEX errors_error_id_idx ON events.errors (error_id); CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id); CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp); CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id); From 54d036b45e2dc4620167ef8ba79fccefe04aea69 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 15 Oct 2021 06:55:01 +0200 Subject: [PATCH 066/218] feat(api): errors-queries fixes --- ee/api/chalicelib/core/errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index a62e900bc..90738df9b 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -321,7 +321,7 @@ def get_details_chart(project_id, error_id, user_id, **data): "error_id": error_id} main_ch_query = f"""\ - SELECT error_id, + SELECT browser_details.error_id, browsers_partition, os_partition, device_partition, @@ -516,7 +516,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F FROM errors WHERE {" AND ".join(ch_sub_query)} GROUP BY error_id, timestamp - ORDER BY timestamp) + ORDER BY timestamp) AS sub_table GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;""" # print("--------------------") From dd5a84a18bc13e89b7557344138665a6d891dbe8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 15 Oct 2021 09:11:41 +0200 Subject: [PATCH 067/218] feat(api): errors-queries aliases fixes --- ee/api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 90738df9b..98b5620af 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -321,7 +321,7 @@ def get_details_chart(project_id, error_id, user_id, **data): "error_id": error_id} main_ch_query = f"""\ - SELECT browser_details.error_id, + SELECT browser_details.error_id AS error_id, browsers_partition, os_partition, device_partition, From 240ce27681f6dddb7be76a8bf9ec8588ebe250a5 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Sat, 16 Oct 2021 02:37:08 +0200 Subject: [PATCH 068/218] feat(tracker-assist): greenlight fix & no video by default --- .../Assist/ChatControls/ChatControls.tsx | 15 +- .../Assist/ChatWindow/ChatWindow.tsx | 6 +- .../AssistActions/AssistActions.tsx | 31 +- .../managers/AssistManager.ts | 20 +- .../managers/LocalStream.ts | 85 ++++++ tracker/tracker-assist/.npmignore | 1 + .../layout/css/bootstrap.min.css | 7 + tracker/tracker-assist/layout/css/styles.css | 86 ++++++ tracker/tracker-assist/layout/index.html | 168 +++++++++++ tracker/tracker-assist/package.json | 2 +- tracker/tracker-assist/src/CallWindow.ts | 279 ++++++++++-------- tracker/tracker-assist/src/LocalStream.ts | 85 ++++++ tracker/tracker-assist/src/_slim.ts | 4 +- tracker/tracker-assist/src/index.ts | 115 ++++---- 14 files changed, 694 insertions(+), 210 deletions(-) create mode 100644 frontend/app/player/MessageDistributor/managers/LocalStream.ts create mode 100644 tracker/tracker-assist/layout/css/bootstrap.min.css create mode 100644 tracker/tracker-assist/layout/css/styles.css create mode 100644 tracker/tracker-assist/layout/index.html create mode 100644 tracker/tracker-assist/src/LocalStream.ts diff --git a/frontend/app/components/Assist/ChatControls/ChatControls.tsx b/frontend/app/components/Assist/ChatControls/ChatControls.tsx index 6ca747455..f4508e9f2 100644 --- a/frontend/app/components/Assist/ChatControls/ChatControls.tsx +++ b/frontend/app/components/Assist/ChatControls/ChatControls.tsx @@ -2,27 +2,26 @@ import React, { useState } from 'react' import stl from './ChatControls.css' import cn from 'classnames' import { Button, Icon } from 'UI' +import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream'; + interface Props { - stream: MediaStream | null, + stream: LocalStream | null, endCall: () => void } function ChatControls({ stream, endCall } : Props) { const [audioEnabled, setAudioEnabled] = useState(true) - const [videoEnabled, setVideoEnabled] = useState(true) + const [videoEnabled, setVideoEnabled] = useState(false) const toggleAudio = () => { if (!stream) { return; } - const aEn = !audioEnabled - stream.getAudioTracks().forEach(track => track.enabled = aEn); - setAudioEnabled(aEn); + setAudioEnabled(stream.toggleAudio()); } const toggleVideo = () => { if (!stream) { return; } - const vEn = !videoEnabled; - stream.getVideoTracks().forEach(track => track.enabled = vEn); - setVideoEnabled(vEn) + stream.toggleVideo() + .then(setVideoEnabled) } return ( diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx index e1e5ba1a6..ff0767ab9 100644 --- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx +++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx @@ -6,10 +6,12 @@ import Counter from 'App/components/shared/SessionItem/Counter' import stl from './chatWindow.css' import ChatControls from '../ChatControls/ChatControls' import Draggable from 'react-draggable'; +import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream'; + export interface Props { incomeStream: MediaStream | null, - localStream: MediaStream | null, + localStream: LocalStream | null, userId: String, endCall: () => void } @@ -30,7 +32,7 @@ const ChatWindow: FC = function ChatWindow({ userId, incomeStream, localS
- +
diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 1c5af3090..493ed3cfe 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -7,6 +7,9 @@ import { connectPlayer } from 'Player/store'; import ChatWindow from '../../ChatWindow'; import { callPeer } from 'Player' import { CallingState, ConnectionStatus } from 'Player/MessageDistributor/managers/AssistManager'; +import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream'; +import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream'; + import { toast } from 'react-toastify'; import stl from './AassistActions.css' @@ -32,7 +35,7 @@ interface Props { function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus }: Props) { const [ incomeStream, setIncomeStream ] = useState(null); - const [ localStream, setLocalStream ] = useState(null); + const [ localStream, setLocalStream ] = useState(null); const [ endCall, setEndCall ] = useState<()=>void>(()=>{}); useEffect(() => { @@ -45,24 +48,18 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus } }, [peerConnectionStatus]) - function onCallConnect(lStream) { - setLocalStream(lStream); - setEndCall(() => callPeer( - lStream, - setIncomeStream, - onClose.bind(null, lStream), - onReject, - onError - )); - } function call() { - navigator.mediaDevices.getUserMedia({video:true, audio:true}) - .then(onCallConnect).catch(error => { // TODO retry only if specific error - navigator.mediaDevices.getUserMedia({audio:true}) - .then(onCallConnect) - .catch(onError) - }); + RequestLocalStream().then(lStream => { + setLocalStream(lStream); + setEndCall(() => callPeer( + lStream, + setIncomeStream, + lStream.stop.bind(lStream), + onReject, + onError + )); + }).catch(onError) } const inCall = calling !== CallingState.False; diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 7e6dce18d..9e508de25 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -5,6 +5,7 @@ import type { TimedMessage } from '../Timed'; import type { Message } from '../messages' import { ID_TP_MAP } from '../messages'; import store from 'App/store'; +import type { LocalStream } from './LocalStream'; import { update, getState } from '../../store'; @@ -349,13 +350,14 @@ export default class AssistManager { private localCallData: { - localStream: MediaStream, + localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void } | null = null - call(localStream: MediaStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function { + + call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function { this.localCallData = { localStream, onStream, @@ -379,7 +381,17 @@ export default class AssistManager { //console.log('calling...', this.localCallData.localStream) - const call = this.peer.call(this.peerID, this.localCallData.localStream); + const call = this.peer.call(this.peerID, this.localCallData.localStream.stream); + this.localCallData.localStream.onVideoTrack(vTrack => { + const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") + if (!sender) { + //logger.warn("No video sender found") + return + } + //logger.log("sender found:", sender) + sender.replaceTrack(vTrack) + }) + call.on('stream', stream => { update({ calling: CallingState.True }); this.localCallData && this.localCallData.onStream(stream); @@ -388,7 +400,9 @@ export default class AssistManager { }); this.md.overlay.addEventListener("mousemove", this.onMouseMove) + }); + //call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track)) call.on("close", this.localCallData.onCallEnd); call.on("error", (e) => { diff --git a/frontend/app/player/MessageDistributor/managers/LocalStream.ts b/frontend/app/player/MessageDistributor/managers/LocalStream.ts new file mode 100644 index 000000000..63f01ad58 --- /dev/null +++ b/frontend/app/player/MessageDistributor/managers/LocalStream.ts @@ -0,0 +1,85 @@ +declare global { + interface HTMLCanvasElement { + captureStream(frameRate?: number): MediaStream; + } +} + +function dummyTrack(): MediaStreamTrack { + const canvas = document.createElement("canvas")//, { width: 0, height: 0}) + canvas.width=canvas.height=2 // Doesn't work when 1 (?!) + const ctx = canvas.getContext('2d'); + ctx?.fillRect(0, 0, canvas.width, canvas.height); + requestAnimationFrame(function draw(){ + ctx?.fillRect(0,0, canvas.width, canvas.height) + requestAnimationFrame(draw); + }); + // Also works. Probably it should be done once connected. + //setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000) + return canvas.captureStream(60).getTracks()[0]; +} + +export default function RequestLocalStream(): Promise { + return navigator.mediaDevices.getUserMedia({ audio:true }) + .then(aStream => { + const aTrack = aStream.getAudioTracks()[0] + if (!aTrack) { throw new Error("No audio tracks provided") } + return new _LocalStream(aTrack) + }) +} + +class _LocalStream { + private mediaRequested: boolean = false + readonly stream: MediaStream + private readonly vdTrack: MediaStreamTrack + constructor(aTrack: MediaStreamTrack) { + this.vdTrack = dummyTrack() + this.stream = new MediaStream([ aTrack, this.vdTrack ]) + } + + toggleVideo(): Promise { + if (!this.mediaRequested) { + return navigator.mediaDevices.getUserMedia({video:true}) + .then(vStream => { + const vTrack = vStream.getVideoTracks()[0] + if (!vTrack) { + throw new Error("No video track provided") + } + this.stream.addTrack(vTrack) + this.stream.removeTrack(this.vdTrack) + this.mediaRequested = true + if (this.onVideoTrackCb) { + this.onVideoTrackCb(vTrack) + } + return true + }) + .catch(e => { + // TODO: log + return false + }) + } + let enabled = true + this.stream.getVideoTracks().forEach(track => { + track.enabled = enabled = enabled && !track.enabled + }) + return Promise.resolve(enabled) + } + + toggleAudio(): boolean { + let enabled = true + this.stream.getAudioTracks().forEach(track => { + track.enabled = enabled = enabled && !track.enabled + }) + return enabled + } + + private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null + onVideoTrack(cb: (t: MediaStreamTrack) => void) { + this.onVideoTrackCb = cb + } + + stop() { + this.stream.getTracks().forEach(t => t.stop()) + } +} + +export type LocalStream = InstanceType diff --git a/tracker/tracker-assist/.npmignore b/tracker/tracker-assist/.npmignore index a3e81897a..038eacb24 100644 --- a/tracker/tracker-assist/.npmignore +++ b/tracker/tracker-assist/.npmignore @@ -3,3 +3,4 @@ tsconfig-cjs.json tsconfig.json .prettierrc.json .cache +layout diff --git a/tracker/tracker-assist/layout/css/bootstrap.min.css b/tracker/tracker-assist/layout/css/bootstrap.min.css new file mode 100644 index 000000000..376294960 --- /dev/null +++ b/tracker/tracker-assist/layout/css/bootstrap.min.css @@ -0,0 +1,7 @@ +@charset "UTF-8";/*! + * Bootstrap v5.0.0-beta3 (https://getbootstrap.com/) + * Copyright 2011-2021 The Bootstrap Authors + * Copyright 2011-2021 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */:root{--bs-blue:#0d6efd;--bs-indigo:#6610f2;--bs-purple:#6f42c1;--bs-pink:#d63384;--bs-red:#dc3545;--bs-orange:#fd7e14;--bs-yellow:#ffc107;--bs-green:#198754;--bs-teal:#20c997;--bs-cyan:#0dcaf0;--bs-white:#fff;--bs-gray:#6c757d;--bs-gray-dark:#343a40;--bs-primary:#0d6efd;--bs-secondary:#6c757d;--bs-success:#198754;--bs-info:#0dcaf0;--bs-warning:#ffc107;--bs-danger:#dc3545;--bs-light:#f8f9fa;--bs-dark:#212529;--bs-font-sans-serif:system-ui,-apple-system,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans","Liberation Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--bs-font-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--bs-gradient:linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0))}*,::after,::before{box-sizing:border-box}@media (prefers-reduced-motion:no-preference){:root{scroll-behavior:smooth}}body{margin:0;font-family:var(--bs-font-sans-serif);font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:transparent}hr{margin:1rem 0;color:inherit;background-color:currentColor;border:0;opacity:.25}hr:not([size]){height:1px}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem;font-weight:500;line-height:1.2}.h1,h1{font-size:calc(1.375rem + 1.5vw)}@media (min-width:1200px){.h1,h1{font-size:2.5rem}}.h2,h2{font-size:calc(1.325rem + .9vw)}@media (min-width:1200px){.h2,h2{font-size:2rem}}.h3,h3{font-size:calc(1.3rem + .6vw)}@media (min-width:1200px){.h3,h3{font-size:1.75rem}}.h4,h4{font-size:calc(1.275rem + .3vw)}@media (min-width:1200px){.h4,h4{font-size:1.5rem}}.h5,h5{font-size:1.25rem}.h6,h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}abbr[data-bs-original-title],abbr[title]{-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul{padding-left:2rem}dl,ol,ul{margin-top:0;margin-bottom:1rem}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}.small,small{font-size:.875em}.mark,mark{padding:.2em;background-color:#fcf8e3}sub,sup{position:relative;font-size:.75em;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#0d6efd;text-decoration:underline}a:hover{color:#0a58ca}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}code,kbd,pre,samp{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:.875em}pre code{font-size:inherit;color:inherit;word-break:normal}code{font-size:.875em;color:#d63384;word-wrap:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:.875em;color:#fff;background-color:#212529;border-radius:.2rem}kbd kbd{padding:0;font-size:1em;font-weight:700}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}caption{padding-top:.5rem;padding-bottom:.5rem;color:#6c757d;text-align:left}th{text-align:inherit;text-align:-webkit-match-parent}tbody,td,tfoot,th,thead,tr{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}select:disabled{opacity:1}[list]::-webkit-calendar-picker-indicator{display:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{float:left;width:100%;padding:0;margin-bottom:.5rem;font-size:calc(1.275rem + .3vw);line-height:inherit}@media (min-width:1200px){legend{font-size:1.5rem}}legend+*{clear:left}::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-text,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}iframe{border:0}summary{display:list-item;cursor:pointer}progress{vertical-align:baseline}[hidden]{display:none!important}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:calc(1.625rem + 4.5vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-1{font-size:5rem}}.display-2{font-size:calc(1.575rem + 3.9vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-2{font-size:4.5rem}}.display-3{font-size:calc(1.525rem + 3.3vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-3{font-size:4rem}}.display-4{font-size:calc(1.475rem + 2.7vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-4{font-size:3.5rem}}.display-5{font-size:calc(1.425rem + 2.1vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-5{font-size:3rem}}.display-6{font-size:calc(1.375rem + 1.5vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-6{font-size:2.5rem}}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:.875em;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote>:last-child{margin-bottom:0}.blockquote-footer{margin-top:-1rem;margin-bottom:1rem;font-size:.875em;color:#6c757d}.blockquote-footer::before{content:"— "}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:.875em;color:#6c757d}.container,.container-fluid,.container-lg,.container-md,.container-sm,.container-xl,.container-xxl{width:100%;padding-right:var(--bs-gutter-x,.75rem);padding-left:var(--bs-gutter-x,.75rem);margin-right:auto;margin-left:auto}@media (min-width:576px){.container,.container-sm{max-width:540px}}@media (min-width:768px){.container,.container-md,.container-sm{max-width:720px}}@media (min-width:992px){.container,.container-lg,.container-md,.container-sm{max-width:960px}}@media (min-width:1200px){.container,.container-lg,.container-md,.container-sm,.container-xl{max-width:1140px}}@media (min-width:1400px){.container,.container-lg,.container-md,.container-sm,.container-xl,.container-xxl{max-width:1320px}}.row{--bs-gutter-x:1.5rem;--bs-gutter-y:0;display:flex;flex-wrap:wrap;margin-top:calc(var(--bs-gutter-y) * -1);margin-right:calc(var(--bs-gutter-x)/ -2);margin-left:calc(var(--bs-gutter-x)/ -2)}.row>*{flex-shrink:0;width:100%;max-width:100%;padding-right:calc(var(--bs-gutter-x)/ 2);padding-left:calc(var(--bs-gutter-x)/ 2);margin-top:var(--bs-gutter-y)}.col{flex:1 0 0%}.row-cols-auto>*{flex:0 0 auto;width:auto}.row-cols-1>*{flex:0 0 auto;width:100%}.row-cols-2>*{flex:0 0 auto;width:50%}.row-cols-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-4>*{flex:0 0 auto;width:25%}.row-cols-5>*{flex:0 0 auto;width:20%}.row-cols-6>*{flex:0 0 auto;width:16.6666666667%}.col-auto{flex:0 0 auto;width:auto}.col-1{flex:0 0 auto;width:8.3333333333%}.col-2{flex:0 0 auto;width:16.6666666667%}.col-3{flex:0 0 auto;width:25%}.col-4{flex:0 0 auto;width:33.3333333333%}.col-5{flex:0 0 auto;width:41.6666666667%}.col-6{flex:0 0 auto;width:50%}.col-7{flex:0 0 auto;width:58.3333333333%}.col-8{flex:0 0 auto;width:66.6666666667%}.col-9{flex:0 0 auto;width:75%}.col-10{flex:0 0 auto;width:83.3333333333%}.col-11{flex:0 0 auto;width:91.6666666667%}.col-12{flex:0 0 auto;width:100%}.offset-1{margin-left:8.3333333333%}.offset-2{margin-left:16.6666666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.3333333333%}.offset-5{margin-left:41.6666666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.3333333333%}.offset-8{margin-left:66.6666666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.3333333333%}.offset-11{margin-left:91.6666666667%}.g-0,.gx-0{--bs-gutter-x:0}.g-0,.gy-0{--bs-gutter-y:0}.g-1,.gx-1{--bs-gutter-x:0.25rem}.g-1,.gy-1{--bs-gutter-y:0.25rem}.g-2,.gx-2{--bs-gutter-x:0.5rem}.g-2,.gy-2{--bs-gutter-y:0.5rem}.g-3,.gx-3{--bs-gutter-x:1rem}.g-3,.gy-3{--bs-gutter-y:1rem}.g-4,.gx-4{--bs-gutter-x:1.5rem}.g-4,.gy-4{--bs-gutter-y:1.5rem}.g-5,.gx-5{--bs-gutter-x:3rem}.g-5,.gy-5{--bs-gutter-y:3rem}@media (min-width:576px){.col-sm{flex:1 0 0%}.row-cols-sm-auto>*{flex:0 0 auto;width:auto}.row-cols-sm-1>*{flex:0 0 auto;width:100%}.row-cols-sm-2>*{flex:0 0 auto;width:50%}.row-cols-sm-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-sm-4>*{flex:0 0 auto;width:25%}.row-cols-sm-5>*{flex:0 0 auto;width:20%}.row-cols-sm-6>*{flex:0 0 auto;width:16.6666666667%}.col-sm-auto{flex:0 0 auto;width:auto}.col-sm-1{flex:0 0 auto;width:8.3333333333%}.col-sm-2{flex:0 0 auto;width:16.6666666667%}.col-sm-3{flex:0 0 auto;width:25%}.col-sm-4{flex:0 0 auto;width:33.3333333333%}.col-sm-5{flex:0 0 auto;width:41.6666666667%}.col-sm-6{flex:0 0 auto;width:50%}.col-sm-7{flex:0 0 auto;width:58.3333333333%}.col-sm-8{flex:0 0 auto;width:66.6666666667%}.col-sm-9{flex:0 0 auto;width:75%}.col-sm-10{flex:0 0 auto;width:83.3333333333%}.col-sm-11{flex:0 0 auto;width:91.6666666667%}.col-sm-12{flex:0 0 auto;width:100%}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.3333333333%}.offset-sm-2{margin-left:16.6666666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.3333333333%}.offset-sm-5{margin-left:41.6666666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.3333333333%}.offset-sm-8{margin-left:66.6666666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.3333333333%}.offset-sm-11{margin-left:91.6666666667%}.g-sm-0,.gx-sm-0{--bs-gutter-x:0}.g-sm-0,.gy-sm-0{--bs-gutter-y:0}.g-sm-1,.gx-sm-1{--bs-gutter-x:0.25rem}.g-sm-1,.gy-sm-1{--bs-gutter-y:0.25rem}.g-sm-2,.gx-sm-2{--bs-gutter-x:0.5rem}.g-sm-2,.gy-sm-2{--bs-gutter-y:0.5rem}.g-sm-3,.gx-sm-3{--bs-gutter-x:1rem}.g-sm-3,.gy-sm-3{--bs-gutter-y:1rem}.g-sm-4,.gx-sm-4{--bs-gutter-x:1.5rem}.g-sm-4,.gy-sm-4{--bs-gutter-y:1.5rem}.g-sm-5,.gx-sm-5{--bs-gutter-x:3rem}.g-sm-5,.gy-sm-5{--bs-gutter-y:3rem}}@media (min-width:768px){.col-md{flex:1 0 0%}.row-cols-md-auto>*{flex:0 0 auto;width:auto}.row-cols-md-1>*{flex:0 0 auto;width:100%}.row-cols-md-2>*{flex:0 0 auto;width:50%}.row-cols-md-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-md-4>*{flex:0 0 auto;width:25%}.row-cols-md-5>*{flex:0 0 auto;width:20%}.row-cols-md-6>*{flex:0 0 auto;width:16.6666666667%}.col-md-auto{flex:0 0 auto;width:auto}.col-md-1{flex:0 0 auto;width:8.3333333333%}.col-md-2{flex:0 0 auto;width:16.6666666667%}.col-md-3{flex:0 0 auto;width:25%}.col-md-4{flex:0 0 auto;width:33.3333333333%}.col-md-5{flex:0 0 auto;width:41.6666666667%}.col-md-6{flex:0 0 auto;width:50%}.col-md-7{flex:0 0 auto;width:58.3333333333%}.col-md-8{flex:0 0 auto;width:66.6666666667%}.col-md-9{flex:0 0 auto;width:75%}.col-md-10{flex:0 0 auto;width:83.3333333333%}.col-md-11{flex:0 0 auto;width:91.6666666667%}.col-md-12{flex:0 0 auto;width:100%}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.3333333333%}.offset-md-2{margin-left:16.6666666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.3333333333%}.offset-md-5{margin-left:41.6666666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.3333333333%}.offset-md-8{margin-left:66.6666666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.3333333333%}.offset-md-11{margin-left:91.6666666667%}.g-md-0,.gx-md-0{--bs-gutter-x:0}.g-md-0,.gy-md-0{--bs-gutter-y:0}.g-md-1,.gx-md-1{--bs-gutter-x:0.25rem}.g-md-1,.gy-md-1{--bs-gutter-y:0.25rem}.g-md-2,.gx-md-2{--bs-gutter-x:0.5rem}.g-md-2,.gy-md-2{--bs-gutter-y:0.5rem}.g-md-3,.gx-md-3{--bs-gutter-x:1rem}.g-md-3,.gy-md-3{--bs-gutter-y:1rem}.g-md-4,.gx-md-4{--bs-gutter-x:1.5rem}.g-md-4,.gy-md-4{--bs-gutter-y:1.5rem}.g-md-5,.gx-md-5{--bs-gutter-x:3rem}.g-md-5,.gy-md-5{--bs-gutter-y:3rem}}@media (min-width:992px){.col-lg{flex:1 0 0%}.row-cols-lg-auto>*{flex:0 0 auto;width:auto}.row-cols-lg-1>*{flex:0 0 auto;width:100%}.row-cols-lg-2>*{flex:0 0 auto;width:50%}.row-cols-lg-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-lg-4>*{flex:0 0 auto;width:25%}.row-cols-lg-5>*{flex:0 0 auto;width:20%}.row-cols-lg-6>*{flex:0 0 auto;width:16.6666666667%}.col-lg-auto{flex:0 0 auto;width:auto}.col-lg-1{flex:0 0 auto;width:8.3333333333%}.col-lg-2{flex:0 0 auto;width:16.6666666667%}.col-lg-3{flex:0 0 auto;width:25%}.col-lg-4{flex:0 0 auto;width:33.3333333333%}.col-lg-5{flex:0 0 auto;width:41.6666666667%}.col-lg-6{flex:0 0 auto;width:50%}.col-lg-7{flex:0 0 auto;width:58.3333333333%}.col-lg-8{flex:0 0 auto;width:66.6666666667%}.col-lg-9{flex:0 0 auto;width:75%}.col-lg-10{flex:0 0 auto;width:83.3333333333%}.col-lg-11{flex:0 0 auto;width:91.6666666667%}.col-lg-12{flex:0 0 auto;width:100%}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.3333333333%}.offset-lg-2{margin-left:16.6666666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.3333333333%}.offset-lg-5{margin-left:41.6666666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.3333333333%}.offset-lg-8{margin-left:66.6666666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.3333333333%}.offset-lg-11{margin-left:91.6666666667%}.g-lg-0,.gx-lg-0{--bs-gutter-x:0}.g-lg-0,.gy-lg-0{--bs-gutter-y:0}.g-lg-1,.gx-lg-1{--bs-gutter-x:0.25rem}.g-lg-1,.gy-lg-1{--bs-gutter-y:0.25rem}.g-lg-2,.gx-lg-2{--bs-gutter-x:0.5rem}.g-lg-2,.gy-lg-2{--bs-gutter-y:0.5rem}.g-lg-3,.gx-lg-3{--bs-gutter-x:1rem}.g-lg-3,.gy-lg-3{--bs-gutter-y:1rem}.g-lg-4,.gx-lg-4{--bs-gutter-x:1.5rem}.g-lg-4,.gy-lg-4{--bs-gutter-y:1.5rem}.g-lg-5,.gx-lg-5{--bs-gutter-x:3rem}.g-lg-5,.gy-lg-5{--bs-gutter-y:3rem}}@media (min-width:1200px){.col-xl{flex:1 0 0%}.row-cols-xl-auto>*{flex:0 0 auto;width:auto}.row-cols-xl-1>*{flex:0 0 auto;width:100%}.row-cols-xl-2>*{flex:0 0 auto;width:50%}.row-cols-xl-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-xl-4>*{flex:0 0 auto;width:25%}.row-cols-xl-5>*{flex:0 0 auto;width:20%}.row-cols-xl-6>*{flex:0 0 auto;width:16.6666666667%}.col-xl-auto{flex:0 0 auto;width:auto}.col-xl-1{flex:0 0 auto;width:8.3333333333%}.col-xl-2{flex:0 0 auto;width:16.6666666667%}.col-xl-3{flex:0 0 auto;width:25%}.col-xl-4{flex:0 0 auto;width:33.3333333333%}.col-xl-5{flex:0 0 auto;width:41.6666666667%}.col-xl-6{flex:0 0 auto;width:50%}.col-xl-7{flex:0 0 auto;width:58.3333333333%}.col-xl-8{flex:0 0 auto;width:66.6666666667%}.col-xl-9{flex:0 0 auto;width:75%}.col-xl-10{flex:0 0 auto;width:83.3333333333%}.col-xl-11{flex:0 0 auto;width:91.6666666667%}.col-xl-12{flex:0 0 auto;width:100%}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.3333333333%}.offset-xl-2{margin-left:16.6666666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.3333333333%}.offset-xl-5{margin-left:41.6666666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.3333333333%}.offset-xl-8{margin-left:66.6666666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.3333333333%}.offset-xl-11{margin-left:91.6666666667%}.g-xl-0,.gx-xl-0{--bs-gutter-x:0}.g-xl-0,.gy-xl-0{--bs-gutter-y:0}.g-xl-1,.gx-xl-1{--bs-gutter-x:0.25rem}.g-xl-1,.gy-xl-1{--bs-gutter-y:0.25rem}.g-xl-2,.gx-xl-2{--bs-gutter-x:0.5rem}.g-xl-2,.gy-xl-2{--bs-gutter-y:0.5rem}.g-xl-3,.gx-xl-3{--bs-gutter-x:1rem}.g-xl-3,.gy-xl-3{--bs-gutter-y:1rem}.g-xl-4,.gx-xl-4{--bs-gutter-x:1.5rem}.g-xl-4,.gy-xl-4{--bs-gutter-y:1.5rem}.g-xl-5,.gx-xl-5{--bs-gutter-x:3rem}.g-xl-5,.gy-xl-5{--bs-gutter-y:3rem}}@media (min-width:1400px){.col-xxl{flex:1 0 0%}.row-cols-xxl-auto>*{flex:0 0 auto;width:auto}.row-cols-xxl-1>*{flex:0 0 auto;width:100%}.row-cols-xxl-2>*{flex:0 0 auto;width:50%}.row-cols-xxl-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-xxl-4>*{flex:0 0 auto;width:25%}.row-cols-xxl-5>*{flex:0 0 auto;width:20%}.row-cols-xxl-6>*{flex:0 0 auto;width:16.6666666667%}.col-xxl-auto{flex:0 0 auto;width:auto}.col-xxl-1{flex:0 0 auto;width:8.3333333333%}.col-xxl-2{flex:0 0 auto;width:16.6666666667%}.col-xxl-3{flex:0 0 auto;width:25%}.col-xxl-4{flex:0 0 auto;width:33.3333333333%}.col-xxl-5{flex:0 0 auto;width:41.6666666667%}.col-xxl-6{flex:0 0 auto;width:50%}.col-xxl-7{flex:0 0 auto;width:58.3333333333%}.col-xxl-8{flex:0 0 auto;width:66.6666666667%}.col-xxl-9{flex:0 0 auto;width:75%}.col-xxl-10{flex:0 0 auto;width:83.3333333333%}.col-xxl-11{flex:0 0 auto;width:91.6666666667%}.col-xxl-12{flex:0 0 auto;width:100%}.offset-xxl-0{margin-left:0}.offset-xxl-1{margin-left:8.3333333333%}.offset-xxl-2{margin-left:16.6666666667%}.offset-xxl-3{margin-left:25%}.offset-xxl-4{margin-left:33.3333333333%}.offset-xxl-5{margin-left:41.6666666667%}.offset-xxl-6{margin-left:50%}.offset-xxl-7{margin-left:58.3333333333%}.offset-xxl-8{margin-left:66.6666666667%}.offset-xxl-9{margin-left:75%}.offset-xxl-10{margin-left:83.3333333333%}.offset-xxl-11{margin-left:91.6666666667%}.g-xxl-0,.gx-xxl-0{--bs-gutter-x:0}.g-xxl-0,.gy-xxl-0{--bs-gutter-y:0}.g-xxl-1,.gx-xxl-1{--bs-gutter-x:0.25rem}.g-xxl-1,.gy-xxl-1{--bs-gutter-y:0.25rem}.g-xxl-2,.gx-xxl-2{--bs-gutter-x:0.5rem}.g-xxl-2,.gy-xxl-2{--bs-gutter-y:0.5rem}.g-xxl-3,.gx-xxl-3{--bs-gutter-x:1rem}.g-xxl-3,.gy-xxl-3{--bs-gutter-y:1rem}.g-xxl-4,.gx-xxl-4{--bs-gutter-x:1.5rem}.g-xxl-4,.gy-xxl-4{--bs-gutter-y:1.5rem}.g-xxl-5,.gx-xxl-5{--bs-gutter-x:3rem}.g-xxl-5,.gy-xxl-5{--bs-gutter-y:3rem}}.table{--bs-table-bg:transparent;--bs-table-striped-color:#212529;--bs-table-striped-bg:rgba(0, 0, 0, 0.05);--bs-table-active-color:#212529;--bs-table-active-bg:rgba(0, 0, 0, 0.1);--bs-table-hover-color:#212529;--bs-table-hover-bg:rgba(0, 0, 0, 0.075);width:100%;margin-bottom:1rem;color:#212529;vertical-align:top;border-color:#dee2e6}.table>:not(caption)>*>*{padding:.5rem .5rem;background-color:var(--bs-table-bg);border-bottom-width:1px;box-shadow:inset 0 0 0 9999px var(--bs-table-accent-bg)}.table>tbody{vertical-align:inherit}.table>thead{vertical-align:bottom}.table>:not(:last-child)>:last-child>*{border-bottom-color:currentColor}.caption-top{caption-side:top}.table-sm>:not(caption)>*>*{padding:.25rem .25rem}.table-bordered>:not(caption)>*{border-width:1px 0}.table-bordered>:not(caption)>*>*{border-width:0 1px}.table-borderless>:not(caption)>*>*{border-bottom-width:0}.table-striped>tbody>tr:nth-of-type(odd){--bs-table-accent-bg:var(--bs-table-striped-bg);color:var(--bs-table-striped-color)}.table-active{--bs-table-accent-bg:var(--bs-table-active-bg);color:var(--bs-table-active-color)}.table-hover>tbody>tr:hover{--bs-table-accent-bg:var(--bs-table-hover-bg);color:var(--bs-table-hover-color)}.table-primary{--bs-table-bg:#cfe2ff;--bs-table-striped-bg:#c5d7f2;--bs-table-striped-color:#000;--bs-table-active-bg:#bacbe6;--bs-table-active-color:#000;--bs-table-hover-bg:#bfd1ec;--bs-table-hover-color:#000;color:#000;border-color:#bacbe6}.table-secondary{--bs-table-bg:#e2e3e5;--bs-table-striped-bg:#d7d8da;--bs-table-striped-color:#000;--bs-table-active-bg:#cbccce;--bs-table-active-color:#000;--bs-table-hover-bg:#d1d2d4;--bs-table-hover-color:#000;color:#000;border-color:#cbccce}.table-success{--bs-table-bg:#d1e7dd;--bs-table-striped-bg:#c7dbd2;--bs-table-striped-color:#000;--bs-table-active-bg:#bcd0c7;--bs-table-active-color:#000;--bs-table-hover-bg:#c1d6cc;--bs-table-hover-color:#000;color:#000;border-color:#bcd0c7}.table-info{--bs-table-bg:#cff4fc;--bs-table-striped-bg:#c5e8ef;--bs-table-striped-color:#000;--bs-table-active-bg:#badce3;--bs-table-active-color:#000;--bs-table-hover-bg:#bfe2e9;--bs-table-hover-color:#000;color:#000;border-color:#badce3}.table-warning{--bs-table-bg:#fff3cd;--bs-table-striped-bg:#f2e7c3;--bs-table-striped-color:#000;--bs-table-active-bg:#e6dbb9;--bs-table-active-color:#000;--bs-table-hover-bg:#ece1be;--bs-table-hover-color:#000;color:#000;border-color:#e6dbb9}.table-danger{--bs-table-bg:#f8d7da;--bs-table-striped-bg:#eccccf;--bs-table-striped-color:#000;--bs-table-active-bg:#dfc2c4;--bs-table-active-color:#000;--bs-table-hover-bg:#e5c7ca;--bs-table-hover-color:#000;color:#000;border-color:#dfc2c4}.table-light{--bs-table-bg:#f8f9fa;--bs-table-striped-bg:#ecedee;--bs-table-striped-color:#000;--bs-table-active-bg:#dfe0e1;--bs-table-active-color:#000;--bs-table-hover-bg:#e5e6e7;--bs-table-hover-color:#000;color:#000;border-color:#dfe0e1}.table-dark{--bs-table-bg:#212529;--bs-table-striped-bg:#2c3034;--bs-table-striped-color:#fff;--bs-table-active-bg:#373b3e;--bs-table-active-color:#fff;--bs-table-hover-bg:#323539;--bs-table-hover-color:#fff;color:#fff;border-color:#373b3e}.table-responsive{overflow-x:auto;-webkit-overflow-scrolling:touch}@media (max-width:575.98px){.table-responsive-sm{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:767.98px){.table-responsive-md{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:991.98px){.table-responsive-lg{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:1199.98px){.table-responsive-xl{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:1399.98px){.table-responsive-xxl{overflow-x:auto;-webkit-overflow-scrolling:touch}}.form-label{margin-bottom:.5rem}.col-form-label{padding-top:calc(.375rem + 1px);padding-bottom:calc(.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(.5rem + 1px);padding-bottom:calc(.5rem + 1px);font-size:1.25rem}.col-form-label-sm{padding-top:calc(.25rem + 1px);padding-bottom:calc(.25rem + 1px);font-size:.875rem}.form-text{margin-top:.25rem;font-size:.875em;color:#6c757d}.form-control{display:block;width:100%;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-clip:padding-box;border:1px solid #ced4da;-webkit-appearance:none;-moz-appearance:none;appearance:none;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control{transition:none}}.form-control[type=file]{overflow:hidden}.form-control[type=file]:not(:disabled):not([readonly]){cursor:pointer}.form-control:focus{color:#212529;background-color:#fff;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-control::-webkit-date-and-time-value{height:1.5em}.form-control::-moz-placeholder{color:#6c757d;opacity:1}.form-control::placeholder{color:#6c757d;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#e9ecef;opacity:1}.form-control::file-selector-button{padding:.375rem .75rem;margin:-.375rem -.75rem;-webkit-margin-end:.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control::file-selector-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::file-selector-button{background-color:#dde0e3}.form-control::-webkit-file-upload-button{padding:.375rem .75rem;margin:-.375rem -.75rem;-webkit-margin-end:.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;-webkit-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control::-webkit-file-upload-button{-webkit-transition:none;transition:none}}.form-control:hover:not(:disabled):not([readonly])::-webkit-file-upload-button{background-color:#dde0e3}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;line-height:1.5;color:#212529;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-lg,.form-control-plaintext.form-control-sm{padding-right:0;padding-left:0}.form-control-sm{min-height:calc(1.5em + .5rem + 2px);padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.form-control-sm::file-selector-button{padding:.25rem .5rem;margin:-.25rem -.5rem;-webkit-margin-end:.5rem;margin-inline-end:.5rem}.form-control-sm::-webkit-file-upload-button{padding:.25rem .5rem;margin:-.25rem -.5rem;-webkit-margin-end:.5rem;margin-inline-end:.5rem}.form-control-lg{min-height:calc(1.5em + 1rem + 2px);padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.form-control-lg::file-selector-button{padding:.5rem 1rem;margin:-.5rem -1rem;-webkit-margin-end:1rem;margin-inline-end:1rem}.form-control-lg::-webkit-file-upload-button{padding:.5rem 1rem;margin:-.5rem -1rem;-webkit-margin-end:1rem;margin-inline-end:1rem}textarea.form-control{min-height:calc(1.5em + .75rem + 2px)}textarea.form-control-sm{min-height:calc(1.5em + .5rem + 2px)}textarea.form-control-lg{min-height:calc(1.5em + 1rem + 2px)}.form-control-color{max-width:3rem;height:auto;padding:.375rem}.form-control-color:not(:disabled):not([readonly]){cursor:pointer}.form-control-color::-moz-color-swatch{height:1.5em;border-radius:.25rem}.form-control-color::-webkit-color-swatch{height:1.5em;border-radius:.25rem}.form-select{display:block;width:100%;padding:.375rem 2.25rem .375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right .75rem center;background-size:16px 12px;border:1px solid #ced4da;border-radius:.25rem;-webkit-appearance:none;-moz-appearance:none;appearance:none}.form-select:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-select[multiple],.form-select[size]:not([size="1"]){padding-right:.75rem;background-image:none}.form-select:disabled{background-color:#e9ecef}.form-select:-moz-focusring{color:transparent;text-shadow:0 0 0 #212529}.form-select-sm{padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:.875rem}.form-select-lg{padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem}.form-check{display:block;min-height:1.5rem;padding-left:1.5em;margin-bottom:.125rem}.form-check .form-check-input{float:left;margin-left:-1.5em}.form-check-input{width:1em;height:1em;margin-top:.25em;vertical-align:top;background-color:#fff;background-repeat:no-repeat;background-position:center;background-size:contain;border:1px solid rgba(0,0,0,.25);-webkit-appearance:none;-moz-appearance:none;appearance:none;-webkit-print-color-adjust:exact;color-adjust:exact}.form-check-input[type=checkbox]{border-radius:.25em}.form-check-input[type=radio]{border-radius:50%}.form-check-input:active{filter:brightness(90%)}.form-check-input:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-check-input:checked{background-color:#0d6efd;border-color:#0d6efd}.form-check-input:checked[type=checkbox]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10l3 3l6-6'/%3e%3c/svg%3e")}.form-check-input:checked[type=radio]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='2' fill='%23fff'/%3e%3c/svg%3e")}.form-check-input[type=checkbox]:indeterminate{background-color:#0d6efd;border-color:#0d6efd;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10h8'/%3e%3c/svg%3e")}.form-check-input:disabled{pointer-events:none;filter:none;opacity:.5}.form-check-input:disabled~.form-check-label,.form-check-input[disabled]~.form-check-label{opacity:.5}.form-switch{padding-left:2.5em}.form-switch .form-check-input{width:2em;margin-left:-2.5em;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%280, 0, 0, 0.25%29'/%3e%3c/svg%3e");background-position:left center;border-radius:2em;transition:background-position .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-switch .form-check-input{transition:none}}.form-switch .form-check-input:focus{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2386b7fe'/%3e%3c/svg%3e")}.form-switch .form-check-input:checked{background-position:right center;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e")}.form-check-inline{display:inline-block;margin-right:1rem}.btn-check{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.btn-check:disabled+.btn,.btn-check[disabled]+.btn{pointer-events:none;filter:none;opacity:.65}.form-range{width:100%;height:1.5rem;padding:0;background-color:transparent;-webkit-appearance:none;-moz-appearance:none;appearance:none}.form-range:focus{outline:0}.form-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range::-moz-focus-outer{border:0}.form-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-.25rem;background-color:#0d6efd;border:0;border-radius:1rem;-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-webkit-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.form-range::-webkit-slider-thumb{-webkit-transition:none;transition:none}}.form-range::-webkit-slider-thumb:active{background-color:#b6d4fe}.form-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range::-moz-range-thumb{width:1rem;height:1rem;background-color:#0d6efd;border:0;border-radius:1rem;-moz-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-moz-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.form-range::-moz-range-thumb{-moz-transition:none;transition:none}}.form-range::-moz-range-thumb:active{background-color:#b6d4fe}.form-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range:disabled{pointer-events:none}.form-range:disabled::-webkit-slider-thumb{background-color:#adb5bd}.form-range:disabled::-moz-range-thumb{background-color:#adb5bd}.form-floating{position:relative}.form-floating>.form-control,.form-floating>.form-select{height:calc(3.5rem + 2px);padding:1rem .75rem}.form-floating>label{position:absolute;top:0;left:0;height:100%;padding:1rem .75rem;pointer-events:none;border:1px solid transparent;transform-origin:0 0;transition:opacity .1s ease-in-out,transform .1s ease-in-out}@media (prefers-reduced-motion:reduce){.form-floating>label{transition:none}}.form-floating>.form-control::-moz-placeholder{color:transparent}.form-floating>.form-control::placeholder{color:transparent}.form-floating>.form-control:not(:-moz-placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:focus,.form-floating>.form-control:not(:placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:-webkit-autofill{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-select{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:not(:-moz-placeholder-shown)~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.form-floating>.form-control:focus~label,.form-floating>.form-control:not(:placeholder-shown)~label,.form-floating>.form-select~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.form-floating>.form-control:-webkit-autofill~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.input-group{position:relative;display:flex;flex-wrap:wrap;align-items:stretch;width:100%}.input-group>.form-control,.input-group>.form-select{position:relative;flex:1 1 auto;width:1%;min-width:0}.input-group>.form-control:focus,.input-group>.form-select:focus{z-index:3}.input-group .btn{position:relative;z-index:2}.input-group .btn:focus{z-index:3}.input-group-text{display:flex;align-items:center;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:center;white-space:nowrap;background-color:#e9ecef;border:1px solid #ced4da;border-radius:.25rem}.input-group-lg>.btn,.input-group-lg>.form-control,.input-group-lg>.form-select,.input-group-lg>.input-group-text{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.input-group-sm>.btn,.input-group-sm>.form-control,.input-group-sm>.form-select,.input-group-sm>.input-group-text{padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.input-group-lg>.form-select,.input-group-sm>.form-select{padding-right:3rem}.input-group:not(.has-validation)>.dropdown-toggle:nth-last-child(n+3),.input-group:not(.has-validation)>:not(:last-child):not(.dropdown-toggle):not(.dropdown-menu){border-top-right-radius:0;border-bottom-right-radius:0}.input-group.has-validation>.dropdown-toggle:nth-last-child(n+4),.input-group.has-validation>:nth-last-child(n+3):not(.dropdown-toggle):not(.dropdown-menu){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>:not(:first-child):not(.dropdown-menu):not(.valid-tooltip):not(.valid-feedback):not(.invalid-tooltip):not(.invalid-feedback){margin-left:-1px;border-top-left-radius:0;border-bottom-left-radius:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:.875em;color:#198754}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;color:#fff;background-color:rgba(25,135,84,.9);border-radius:.25rem}.is-valid~.valid-feedback,.is-valid~.valid-tooltip,.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip{display:block}.form-control.is-valid,.was-validated .form-control:valid{border-color:#198754;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-valid:focus,.was-validated .form-control:valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.form-select.is-valid,.was-validated .form-select:valid{border-color:#198754;padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(.75em + .375rem) calc(.75em + .375rem)}.form-select.is-valid:focus,.was-validated .form-select:valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.form-check-input.is-valid,.was-validated .form-check-input:valid{border-color:#198754}.form-check-input.is-valid:checked,.was-validated .form-check-input:valid:checked{background-color:#198754}.form-check-input.is-valid:focus,.was-validated .form-check-input:valid:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.form-check-input.is-valid~.form-check-label,.was-validated .form-check-input:valid~.form-check-label{color:#198754}.form-check-inline .form-check-input~.valid-feedback{margin-left:.5em}.input-group .form-control.is-valid,.input-group .form-select.is-valid,.was-validated .input-group .form-control:valid,.was-validated .input-group .form-select:valid{z-index:3}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:.875em;color:#dc3545}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;color:#fff;background-color:rgba(220,53,69,.9);border-radius:.25rem}.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip,.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip{display:block}.form-control.is-invalid,.was-validated .form-control:invalid{border-color:#dc3545;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-invalid:focus,.was-validated .form-control:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.form-select.is-invalid,.was-validated .form-select:invalid{border-color:#dc3545;padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(.75em + .375rem) calc(.75em + .375rem)}.form-select.is-invalid:focus,.was-validated .form-select:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.form-check-input.is-invalid,.was-validated .form-check-input:invalid{border-color:#dc3545}.form-check-input.is-invalid:checked,.was-validated .form-check-input:invalid:checked{background-color:#dc3545}.form-check-input.is-invalid:focus,.was-validated .form-check-input:invalid:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.form-check-input.is-invalid~.form-check-label,.was-validated .form-check-input:invalid~.form-check-label{color:#dc3545}.form-check-inline .form-check-input~.invalid-feedback{margin-left:.5em}.input-group .form-control.is-invalid,.input-group .form-select.is-invalid,.was-validated .input-group .form-control:invalid,.was-validated .input-group .form-select:invalid{z-index:3}.btn{display:inline-block;font-weight:400;line-height:1.5;color:#212529;text-align:center;text-decoration:none;vertical-align:middle;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.btn{transition:none}}.btn:hover{color:#212529}.btn-check:focus+.btn,.btn:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.btn.disabled,.btn:disabled,fieldset:disabled .btn{pointer-events:none;opacity:.65}.btn-primary{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-primary:hover{color:#fff;background-color:#0b5ed7;border-color:#0a58ca}.btn-check:focus+.btn-primary,.btn-primary:focus{color:#fff;background-color:#0b5ed7;border-color:#0a58ca;box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-check:active+.btn-primary,.btn-check:checked+.btn-primary,.btn-primary.active,.btn-primary:active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#0a58ca;border-color:#0a53be}.btn-check:active+.btn-primary:focus,.btn-check:checked+.btn-primary:focus,.btn-primary.active:focus,.btn-primary:active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-secondary{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-secondary:hover{color:#fff;background-color:#5c636a;border-color:#565e64}.btn-check:focus+.btn-secondary,.btn-secondary:focus{color:#fff;background-color:#5c636a;border-color:#565e64;box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-check:active+.btn-secondary,.btn-check:checked+.btn-secondary,.btn-secondary.active,.btn-secondary:active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#565e64;border-color:#51585e}.btn-check:active+.btn-secondary:focus,.btn-check:checked+.btn-secondary:focus,.btn-secondary.active:focus,.btn-secondary:active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-success{color:#fff;background-color:#198754;border-color:#198754}.btn-success:hover{color:#fff;background-color:#157347;border-color:#146c43}.btn-check:focus+.btn-success,.btn-success:focus{color:#fff;background-color:#157347;border-color:#146c43;box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-check:active+.btn-success,.btn-check:checked+.btn-success,.btn-success.active,.btn-success:active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#146c43;border-color:#13653f}.btn-check:active+.btn-success:focus,.btn-check:checked+.btn-success:focus,.btn-success.active:focus,.btn-success:active:focus,.show>.btn-success.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-success.disabled,.btn-success:disabled{color:#fff;background-color:#198754;border-color:#198754}.btn-info{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-info:hover{color:#000;background-color:#31d2f2;border-color:#25cff2}.btn-check:focus+.btn-info,.btn-info:focus{color:#000;background-color:#31d2f2;border-color:#25cff2;box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-check:active+.btn-info,.btn-check:checked+.btn-info,.btn-info.active,.btn-info:active,.show>.btn-info.dropdown-toggle{color:#000;background-color:#3dd5f3;border-color:#25cff2}.btn-check:active+.btn-info:focus,.btn-check:checked+.btn-info:focus,.btn-info.active:focus,.btn-info:active:focus,.show>.btn-info.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-info.disabled,.btn-info:disabled{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-warning{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-warning:hover{color:#000;background-color:#ffca2c;border-color:#ffc720}.btn-check:focus+.btn-warning,.btn-warning:focus{color:#000;background-color:#ffca2c;border-color:#ffc720;box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-check:active+.btn-warning,.btn-check:checked+.btn-warning,.btn-warning.active,.btn-warning:active,.show>.btn-warning.dropdown-toggle{color:#000;background-color:#ffcd39;border-color:#ffc720}.btn-check:active+.btn-warning:focus,.btn-check:checked+.btn-warning:focus,.btn-warning.active:focus,.btn-warning:active:focus,.show>.btn-warning.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-warning.disabled,.btn-warning:disabled{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-danger{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-danger:hover{color:#fff;background-color:#bb2d3b;border-color:#b02a37}.btn-check:focus+.btn-danger,.btn-danger:focus{color:#fff;background-color:#bb2d3b;border-color:#b02a37;box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-check:active+.btn-danger,.btn-check:checked+.btn-danger,.btn-danger.active,.btn-danger:active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#b02a37;border-color:#a52834}.btn-check:active+.btn-danger:focus,.btn-check:checked+.btn-danger:focus,.btn-danger.active:focus,.btn-danger:active:focus,.show>.btn-danger.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-danger.disabled,.btn-danger:disabled{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-light{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:hover{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:focus+.btn-light,.btn-light:focus{color:#000;background-color:#f9fafb;border-color:#f9fafb;box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-check:active+.btn-light,.btn-check:checked+.btn-light,.btn-light.active,.btn-light:active,.show>.btn-light.dropdown-toggle{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:active+.btn-light:focus,.btn-check:checked+.btn-light:focus,.btn-light.active:focus,.btn-light:active:focus,.show>.btn-light.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-light.disabled,.btn-light:disabled{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-dark{color:#fff;background-color:#212529;border-color:#212529}.btn-dark:hover{color:#fff;background-color:#1c1f23;border-color:#1a1e21}.btn-check:focus+.btn-dark,.btn-dark:focus{color:#fff;background-color:#1c1f23;border-color:#1a1e21;box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-check:active+.btn-dark,.btn-check:checked+.btn-dark,.btn-dark.active,.btn-dark:active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#1a1e21;border-color:#191c1f}.btn-check:active+.btn-dark:focus,.btn-check:checked+.btn-dark:focus,.btn-dark.active:focus,.btn-dark:active:focus,.show>.btn-dark.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-dark.disabled,.btn-dark:disabled{color:#fff;background-color:#212529;border-color:#212529}.btn-outline-primary{color:#0d6efd;border-color:#0d6efd}.btn-outline-primary:hover{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:focus+.btn-outline-primary,.btn-outline-primary:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-check:active+.btn-outline-primary,.btn-check:checked+.btn-outline-primary,.btn-outline-primary.active,.btn-outline-primary.dropdown-toggle.show,.btn-outline-primary:active{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:active+.btn-outline-primary:focus,.btn-check:checked+.btn-outline-primary:focus,.btn-outline-primary.active:focus,.btn-outline-primary.dropdown-toggle.show:focus,.btn-outline-primary:active:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#0d6efd;background-color:transparent}.btn-outline-secondary{color:#6c757d;border-color:#6c757d}.btn-outline-secondary:hover{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:focus+.btn-outline-secondary,.btn-outline-secondary:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-check:active+.btn-outline-secondary,.btn-check:checked+.btn-outline-secondary,.btn-outline-secondary.active,.btn-outline-secondary.dropdown-toggle.show,.btn-outline-secondary:active{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:active+.btn-outline-secondary:focus,.btn-check:checked+.btn-outline-secondary:focus,.btn-outline-secondary.active:focus,.btn-outline-secondary.dropdown-toggle.show:focus,.btn-outline-secondary:active:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#6c757d;background-color:transparent}.btn-outline-success{color:#198754;border-color:#198754}.btn-outline-success:hover{color:#fff;background-color:#198754;border-color:#198754}.btn-check:focus+.btn-outline-success,.btn-outline-success:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-check:active+.btn-outline-success,.btn-check:checked+.btn-outline-success,.btn-outline-success.active,.btn-outline-success.dropdown-toggle.show,.btn-outline-success:active{color:#fff;background-color:#198754;border-color:#198754}.btn-check:active+.btn-outline-success:focus,.btn-check:checked+.btn-outline-success:focus,.btn-outline-success.active:focus,.btn-outline-success.dropdown-toggle.show:focus,.btn-outline-success:active:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-outline-success.disabled,.btn-outline-success:disabled{color:#198754;background-color:transparent}.btn-outline-info{color:#0dcaf0;border-color:#0dcaf0}.btn-outline-info:hover{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:focus+.btn-outline-info,.btn-outline-info:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-check:active+.btn-outline-info,.btn-check:checked+.btn-outline-info,.btn-outline-info.active,.btn-outline-info.dropdown-toggle.show,.btn-outline-info:active{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:active+.btn-outline-info:focus,.btn-check:checked+.btn-outline-info:focus,.btn-outline-info.active:focus,.btn-outline-info.dropdown-toggle.show:focus,.btn-outline-info:active:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-outline-info.disabled,.btn-outline-info:disabled{color:#0dcaf0;background-color:transparent}.btn-outline-warning{color:#ffc107;border-color:#ffc107}.btn-outline-warning:hover{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:focus+.btn-outline-warning,.btn-outline-warning:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-check:active+.btn-outline-warning,.btn-check:checked+.btn-outline-warning,.btn-outline-warning.active,.btn-outline-warning.dropdown-toggle.show,.btn-outline-warning:active{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:active+.btn-outline-warning:focus,.btn-check:checked+.btn-outline-warning:focus,.btn-outline-warning.active:focus,.btn-outline-warning.dropdown-toggle.show:focus,.btn-outline-warning:active:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-outline-warning.disabled,.btn-outline-warning:disabled{color:#ffc107;background-color:transparent}.btn-outline-danger{color:#dc3545;border-color:#dc3545}.btn-outline-danger:hover{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:focus+.btn-outline-danger,.btn-outline-danger:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-check:active+.btn-outline-danger,.btn-check:checked+.btn-outline-danger,.btn-outline-danger.active,.btn-outline-danger.dropdown-toggle.show,.btn-outline-danger:active{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:active+.btn-outline-danger:focus,.btn-check:checked+.btn-outline-danger:focus,.btn-outline-danger.active:focus,.btn-outline-danger.dropdown-toggle.show:focus,.btn-outline-danger:active:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-outline-danger.disabled,.btn-outline-danger:disabled{color:#dc3545;background-color:transparent}.btn-outline-light{color:#f8f9fa;border-color:#f8f9fa}.btn-outline-light:hover{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:focus+.btn-outline-light,.btn-outline-light:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-check:active+.btn-outline-light,.btn-check:checked+.btn-outline-light,.btn-outline-light.active,.btn-outline-light.dropdown-toggle.show,.btn-outline-light:active{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:active+.btn-outline-light:focus,.btn-check:checked+.btn-outline-light:focus,.btn-outline-light.active:focus,.btn-outline-light.dropdown-toggle.show:focus,.btn-outline-light:active:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-outline-light.disabled,.btn-outline-light:disabled{color:#f8f9fa;background-color:transparent}.btn-outline-dark{color:#212529;border-color:#212529}.btn-outline-dark:hover{color:#fff;background-color:#212529;border-color:#212529}.btn-check:focus+.btn-outline-dark,.btn-outline-dark:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-check:active+.btn-outline-dark,.btn-check:checked+.btn-outline-dark,.btn-outline-dark.active,.btn-outline-dark.dropdown-toggle.show,.btn-outline-dark:active{color:#fff;background-color:#212529;border-color:#212529}.btn-check:active+.btn-outline-dark:focus,.btn-check:checked+.btn-outline-dark:focus,.btn-outline-dark.active:focus,.btn-outline-dark.dropdown-toggle.show:focus,.btn-outline-dark:active:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-outline-dark.disabled,.btn-outline-dark:disabled{color:#212529;background-color:transparent}.btn-link{font-weight:400;color:#0d6efd;text-decoration:underline}.btn-link:hover{color:#0a58ca}.btn-link.disabled,.btn-link:disabled{color:#6c757d}.btn-group-lg>.btn,.btn-lg{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.btn-group-sm>.btn,.btn-sm{padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.fade{transition:opacity .15s linear}@media (prefers-reduced-motion:reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{height:0;overflow:hidden;transition:height .35s ease}@media (prefers-reduced-motion:reduce){.collapsing{transition:none}}.dropdown,.dropend,.dropstart,.dropup{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;top:100%;z-index:1000;display:none;min-width:10rem;padding:.5rem 0;margin:0;font-size:1rem;color:#212529;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem}.dropdown-menu[data-bs-popper]{left:0;margin-top:.125rem}.dropdown-menu-start{--bs-position:start}.dropdown-menu-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-end{--bs-position:end}.dropdown-menu-end[data-bs-popper]{right:0;left:auto}@media (min-width:576px){.dropdown-menu-sm-start{--bs-position:start}.dropdown-menu-sm-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-sm-end{--bs-position:end}.dropdown-menu-sm-end[data-bs-popper]{right:0;left:auto}}@media (min-width:768px){.dropdown-menu-md-start{--bs-position:start}.dropdown-menu-md-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-md-end{--bs-position:end}.dropdown-menu-md-end[data-bs-popper]{right:0;left:auto}}@media (min-width:992px){.dropdown-menu-lg-start{--bs-position:start}.dropdown-menu-lg-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-lg-end{--bs-position:end}.dropdown-menu-lg-end[data-bs-popper]{right:0;left:auto}}@media (min-width:1200px){.dropdown-menu-xl-start{--bs-position:start}.dropdown-menu-xl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xl-end{--bs-position:end}.dropdown-menu-xl-end[data-bs-popper]{right:0;left:auto}}@media (min-width:1400px){.dropdown-menu-xxl-start{--bs-position:start}.dropdown-menu-xxl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xxl-end{--bs-position:end}.dropdown-menu-xxl-end[data-bs-popper]{right:0;left:auto}}.dropup .dropdown-menu[data-bs-popper]{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-menu{top:0;right:auto;left:100%}.dropend .dropdown-menu[data-bs-popper]{margin-top:0;margin-left:.125rem}.dropend .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropend .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-toggle::after{vertical-align:0}.dropstart .dropdown-menu{top:0;right:100%;left:auto}.dropstart .dropdown-menu[data-bs-popper]{margin-top:0;margin-right:.125rem}.dropstart .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropstart .dropdown-toggle::after{display:none}.dropstart .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropstart .dropdown-toggle:empty::after{margin-left:0}.dropstart .dropdown-toggle::before{vertical-align:0}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid rgba(0,0,0,.15)}.dropdown-item{display:block;width:100%;padding:.25rem 1rem;clear:both;font-weight:400;color:#212529;text-align:inherit;text-decoration:none;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:focus,.dropdown-item:hover{color:#1e2125;background-color:#e9ecef}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background-color:#0d6efd}.dropdown-item.disabled,.dropdown-item:disabled{color:#adb5bd;pointer-events:none;background-color:transparent}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1rem;margin-bottom:0;font-size:.875rem;color:#6c757d;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1rem;color:#212529}.dropdown-menu-dark{color:#dee2e6;background-color:#343a40;border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item{color:#dee2e6}.dropdown-menu-dark .dropdown-item:focus,.dropdown-menu-dark .dropdown-item:hover{color:#fff;background-color:rgba(255,255,255,.15)}.dropdown-menu-dark .dropdown-item.active,.dropdown-menu-dark .dropdown-item:active{color:#fff;background-color:#0d6efd}.dropdown-menu-dark .dropdown-item.disabled,.dropdown-menu-dark .dropdown-item:disabled{color:#adb5bd}.dropdown-menu-dark .dropdown-divider{border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item-text{color:#dee2e6}.dropdown-menu-dark .dropdown-header{color:#adb5bd}.btn-group,.btn-group-vertical{position:relative;display:inline-flex;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;flex:1 1 auto}.btn-group-vertical>.btn-check:checked+.btn,.btn-group-vertical>.btn-check:focus+.btn,.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn-check:checked+.btn,.btn-group>.btn-check:focus+.btn,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:1}.btn-toolbar{display:flex;flex-wrap:wrap;justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn-group:not(:first-child),.btn-group>.btn:not(:first-child){margin-left:-1px}.btn-group>.btn-group:not(:last-child)>.btn,.btn-group>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:not(:first-child)>.btn,.btn-group>.btn:nth-child(n+3),.btn-group>:not(.btn-check)+.btn{border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropend .dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after{margin-left:0}.dropstart .dropdown-toggle-split::before{margin-right:0}.btn-group-sm>.btn+.dropdown-toggle-split,.btn-sm+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-group-lg>.btn+.dropdown-toggle-split,.btn-lg+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{flex-direction:column;align-items:flex-start;justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn-group:not(:first-child),.btn-group-vertical>.btn:not(:first-child){margin-top:-1px}.btn-group-vertical>.btn-group:not(:last-child)>.btn,.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child)>.btn,.btn-group-vertical>.btn~.btn{border-top-left-radius:0;border-top-right-radius:0}.nav{display:flex;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem;text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out}@media (prefers-reduced-motion:reduce){.nav-link{transition:none}}.nav-link.disabled{color:#6c757d;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-link{margin-bottom:-1px;background:0 0;border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:focus,.nav-tabs .nav-link:hover{border-color:#e9ecef #e9ecef #dee2e6;isolation:isolate}.nav-tabs .nav-link.disabled{color:#6c757d;background-color:transparent;border-color:transparent}.nav-tabs .nav-item.show .nav-link,.nav-tabs .nav-link.active{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{background:0 0;border:0;border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#0d6efd}.nav-fill .nav-item,.nav-fill>.nav-link{flex:1 1 auto;text-align:center}.nav-justified .nav-item,.nav-justified>.nav-link{flex-basis:0;flex-grow:1;text-align:center}.nav-fill .nav-item .nav-link,.nav-justified .nav-item .nav-link{width:100%}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:flex;flex-wrap:wrap;align-items:center;justify-content:space-between;padding-top:.5rem;padding-bottom:.5rem}.navbar>.container,.navbar>.container-fluid,.navbar>.container-lg,.navbar>.container-md,.navbar>.container-sm,.navbar>.container-xl,.navbar>.container-xxl{display:flex;flex-wrap:inherit;align-items:center;justify-content:space-between}.navbar-brand{padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;text-decoration:none;white-space:nowrap}.navbar-nav{display:flex;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static}.navbar-text{padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{flex-basis:100%;flex-grow:1;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem;transition:box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.navbar-toggler{transition:none}}.navbar-toggler:hover{text-decoration:none}.navbar-toggler:focus{text-decoration:none;outline:0;box-shadow:0 0 0 .25rem}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;background-repeat:no-repeat;background-position:center;background-size:100%}.navbar-nav-scroll{max-height:var(--bs-scroll-height,75vh);overflow-y:auto}@media (min-width:576px){.navbar-expand-sm{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm .navbar-nav-scroll{overflow:visible}.navbar-expand-sm .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}}@media (min-width:768px){.navbar-expand-md{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md .navbar-nav-scroll{overflow:visible}.navbar-expand-md .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}}@media (min-width:992px){.navbar-expand-lg{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg .navbar-nav-scroll{overflow:visible}.navbar-expand-lg .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}}@media (min-width:1200px){.navbar-expand-xl{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl .navbar-nav-scroll{overflow:visible}.navbar-expand-xl .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}}@media (min-width:1400px){.navbar-expand-xxl{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-xxl .navbar-nav{flex-direction:row}.navbar-expand-xxl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xxl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xxl .navbar-nav-scroll{overflow:visible}.navbar-expand-xxl .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-xxl .navbar-toggler{display:none}}.navbar-expand{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand .navbar-nav{flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand .navbar-nav-scroll{overflow:visible}.navbar-expand .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-light .navbar-brand{color:rgba(0,0,0,.9)}.navbar-light .navbar-brand:focus,.navbar-light .navbar-brand:hover{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.55)}.navbar-light .navbar-nav .nav-link:focus,.navbar-light .navbar-nav .nav-link:hover{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .nav-link.active,.navbar-light .navbar-nav .show>.nav-link{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.55);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%280, 0, 0, 0.55%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-light .navbar-text{color:rgba(0,0,0,.55)}.navbar-light .navbar-text a,.navbar-light .navbar-text a:focus,.navbar-light .navbar-text a:hover{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand{color:#fff}.navbar-dark .navbar-brand:focus,.navbar-dark .navbar-brand:hover{color:#fff}.navbar-dark .navbar-nav .nav-link{color:rgba(255,255,255,.55)}.navbar-dark .navbar-nav .nav-link:focus,.navbar-dark .navbar-nav .nav-link:hover{color:rgba(255,255,255,.75)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(255,255,255,.25)}.navbar-dark .navbar-nav .nav-link.active,.navbar-dark .navbar-nav .show>.nav-link{color:#fff}.navbar-dark .navbar-toggler{color:rgba(255,255,255,.55);border-color:rgba(255,255,255,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%28255, 255, 255, 0.55%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-dark .navbar-text{color:rgba(255,255,255,.55)}.navbar-dark .navbar-text a,.navbar-dark .navbar-text a:focus,.navbar-dark .navbar-text a:hover{color:#fff}.card{position:relative;display:flex;flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0;border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card>.list-group:last-child{border-bottom-width:0;border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{flex:1 1 auto;padding:1rem 1rem}.card-title{margin-bottom:.5rem}.card-subtitle{margin-top:-.25rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link:hover{text-decoration:none}.card-link+.card-link{margin-left:1rem}.card-header{padding:.5rem 1rem;margin-bottom:0;background-color:rgba(0,0,0,.03);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:calc(.25rem - 1px) calc(.25rem - 1px) 0 0}.card-footer{padding:.5rem 1rem;background-color:rgba(0,0,0,.03);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 calc(.25rem - 1px) calc(.25rem - 1px)}.card-header-tabs{margin-right:-.5rem;margin-bottom:-.5rem;margin-left:-.5rem;border-bottom:0}.card-header-pills{margin-right:-.5rem;margin-left:-.5rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem;border-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom,.card-img-top{width:100%}.card-img,.card-img-top{border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom{border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card-group>.card{margin-bottom:.75rem}@media (min-width:576px){.card-group{display:flex;flex-flow:row wrap}.card-group>.card{flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}.card-group>.card:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:not(:last-child) .card-header,.card-group>.card:not(:last-child) .card-img-top{border-top-right-radius:0}.card-group>.card:not(:last-child) .card-footer,.card-group>.card:not(:last-child) .card-img-bottom{border-bottom-right-radius:0}.card-group>.card:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:not(:first-child) .card-header,.card-group>.card:not(:first-child) .card-img-top{border-top-left-radius:0}.card-group>.card:not(:first-child) .card-footer,.card-group>.card:not(:first-child) .card-img-bottom{border-bottom-left-radius:0}}.accordion-button{position:relative;display:flex;align-items:center;width:100%;padding:1rem 1.25rem;font-size:1rem;color:#212529;text-align:left;background-color:#fff;border:0;border-radius:0;overflow-anchor:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,border-radius .15s ease}@media (prefers-reduced-motion:reduce){.accordion-button{transition:none}}.accordion-button:not(.collapsed){color:#0c63e4;background-color:#e7f1ff;box-shadow:inset 0 -1px 0 rgba(0,0,0,.125)}.accordion-button:not(.collapsed)::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%230c63e4'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");transform:rotate(180deg)}.accordion-button::after{flex-shrink:0;width:1.25rem;height:1.25rem;margin-left:auto;content:"";background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23212529'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-size:1.25rem;transition:transform .2s ease-in-out}@media (prefers-reduced-motion:reduce){.accordion-button::after{transition:none}}.accordion-button:hover{z-index:2}.accordion-button:focus{z-index:3;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.accordion-header{margin-bottom:0}.accordion-item{margin-bottom:-1px;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.accordion-item:first-of-type{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.accordion-item:first-of-type .accordion-button{border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.accordion-item:last-of-type{margin-bottom:0;border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-item:last-of-type .accordion-button.collapsed{border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.accordion-item:last-of-type .accordion-collapse{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-body{padding:1rem 1.25rem}.accordion-flush .accordion-collapse{border-width:0}.accordion-flush .accordion-item{border-right:0;border-left:0;border-radius:0}.accordion-flush .accordion-item:first-child{border-top:0}.accordion-flush .accordion-item:last-child{border-bottom:0}.accordion-flush .accordion-item .accordion-button{border-radius:0}.breadcrumb{display:flex;flex-wrap:wrap;padding:0 0;margin-bottom:1rem;list-style:none}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{float:left;padding-right:.5rem;color:#6c757d;content:var(--bs-breadcrumb-divider, "/")}.breadcrumb-item.active{color:#6c757d}.pagination{display:flex;padding-left:0;list-style:none}.page-link{position:relative;display:block;color:#0d6efd;text-decoration:none;background-color:#fff;border:1px solid #dee2e6;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.page-link{transition:none}}.page-link:hover{z-index:2;color:#0a58ca;background-color:#e9ecef;border-color:#dee2e6}.page-link:focus{z-index:3;color:#0a58ca;background-color:#e9ecef;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.page-item:not(:first-child) .page-link{margin-left:-1px}.page-item.active .page-link{z-index:3;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.page-item.disabled .page-link{color:#6c757d;pointer-events:none;background-color:#fff;border-color:#dee2e6}.page-link{padding:.375rem .75rem}.page-item:first-child .page-link{border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:.875rem}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.35em .65em;font-size:.75em;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.alert{position:relative;padding:1rem 1rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:3rem}.alert-dismissible .btn-close{position:absolute;top:0;right:0;z-index:2;padding:1.25rem 1rem}.alert-primary{color:#084298;background-color:#cfe2ff;border-color:#b6d4fe}.alert-primary .alert-link{color:#06357a}.alert-secondary{color:#41464b;background-color:#e2e3e5;border-color:#d3d6d8}.alert-secondary .alert-link{color:#34383c}.alert-success{color:#0f5132;background-color:#d1e7dd;border-color:#badbcc}.alert-success .alert-link{color:#0c4128}.alert-info{color:#055160;background-color:#cff4fc;border-color:#b6effb}.alert-info .alert-link{color:#04414d}.alert-warning{color:#664d03;background-color:#fff3cd;border-color:#ffecb5}.alert-warning .alert-link{color:#523e02}.alert-danger{color:#842029;background-color:#f8d7da;border-color:#f5c2c7}.alert-danger .alert-link{color:#6a1a21}.alert-light{color:#636464;background-color:#fefefe;border-color:#fdfdfe}.alert-light .alert-link{color:#4f5050}.alert-dark{color:#141619;background-color:#d3d3d4;border-color:#bcbebf}.alert-dark .alert-link{color:#101214}@-webkit-keyframes progress-bar-stripes{0%{background-position-x:1rem}}@keyframes progress-bar-stripes{0%{background-position-x:1rem}}.progress{display:flex;height:1rem;overflow:hidden;font-size:.75rem;background-color:#e9ecef;border-radius:.25rem}.progress-bar{display:flex;flex-direction:column;justify-content:center;overflow:hidden;color:#fff;text-align:center;white-space:nowrap;background-color:#0d6efd;transition:width .6s ease}@media (prefers-reduced-motion:reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-size:1rem 1rem}.progress-bar-animated{-webkit-animation:1s linear infinite progress-bar-stripes;animation:1s linear infinite progress-bar-stripes}@media (prefers-reduced-motion:reduce){.progress-bar-animated{-webkit-animation:none;animation:none}}.list-group{display:flex;flex-direction:column;padding-left:0;margin-bottom:0;border-radius:.25rem}.list-group-numbered{list-style-type:none;counter-reset:section}.list-group-numbered>li::before{content:counters(section, ".") ". ";counter-increment:section}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:focus,.list-group-item-action:hover{z-index:1;color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#212529;background-color:#e9ecef}.list-group-item{position:relative;display:block;padding:.5rem 1rem;color:#212529;text-decoration:none;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:inherit;border-top-right-radius:inherit}.list-group-item:last-child{border-bottom-right-radius:inherit;border-bottom-left-radius:inherit}.list-group-item.disabled,.list-group-item:disabled{color:#6c757d;pointer-events:none;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:-1px;border-top-width:1px}.list-group-horizontal{flex-direction:row}.list-group-horizontal>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}@media (min-width:576px){.list-group-horizontal-sm{flex-direction:row}.list-group-horizontal-sm>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-sm>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:768px){.list-group-horizontal-md{flex-direction:row}.list-group-horizontal-md>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-md>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:992px){.list-group-horizontal-lg{flex-direction:row}.list-group-horizontal-lg>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-lg>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:1200px){.list-group-horizontal-xl{flex-direction:row}.list-group-horizontal-xl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:1400px){.list-group-horizontal-xxl{flex-direction:row}.list-group-horizontal-xxl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xxl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xxl>.list-group-item.active{margin-top:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}.list-group-flush{border-radius:0}.list-group-flush>.list-group-item{border-width:0 0 1px}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-primary{color:#084298;background-color:#cfe2ff}.list-group-item-primary.list-group-item-action:focus,.list-group-item-primary.list-group-item-action:hover{color:#084298;background-color:#bacbe6}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#084298;border-color:#084298}.list-group-item-secondary{color:#41464b;background-color:#e2e3e5}.list-group-item-secondary.list-group-item-action:focus,.list-group-item-secondary.list-group-item-action:hover{color:#41464b;background-color:#cbccce}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#41464b;border-color:#41464b}.list-group-item-success{color:#0f5132;background-color:#d1e7dd}.list-group-item-success.list-group-item-action:focus,.list-group-item-success.list-group-item-action:hover{color:#0f5132;background-color:#bcd0c7}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#0f5132;border-color:#0f5132}.list-group-item-info{color:#055160;background-color:#cff4fc}.list-group-item-info.list-group-item-action:focus,.list-group-item-info.list-group-item-action:hover{color:#055160;background-color:#badce3}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#055160;border-color:#055160}.list-group-item-warning{color:#664d03;background-color:#fff3cd}.list-group-item-warning.list-group-item-action:focus,.list-group-item-warning.list-group-item-action:hover{color:#664d03;background-color:#e6dbb9}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#664d03;border-color:#664d03}.list-group-item-danger{color:#842029;background-color:#f8d7da}.list-group-item-danger.list-group-item-action:focus,.list-group-item-danger.list-group-item-action:hover{color:#842029;background-color:#dfc2c4}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#842029;border-color:#842029}.list-group-item-light{color:#636464;background-color:#fefefe}.list-group-item-light.list-group-item-action:focus,.list-group-item-light.list-group-item-action:hover{color:#636464;background-color:#e5e5e5}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#636464;border-color:#636464}.list-group-item-dark{color:#141619;background-color:#d3d3d4}.list-group-item-dark.list-group-item-action:focus,.list-group-item-dark.list-group-item-action:hover{color:#141619;background-color:#bebebf}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#141619;border-color:#141619}.btn-close{box-sizing:content-box;width:1em;height:1em;padding:.25em .25em;color:#000;background:transparent url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23000'%3e%3cpath d='M.293.293a1 1 0 011.414 0L8 6.586 14.293.293a1 1 0 111.414 1.414L9.414 8l6.293 6.293a1 1 0 01-1.414 1.414L8 9.414l-6.293 6.293a1 1 0 01-1.414-1.414L6.586 8 .293 1.707a1 1 0 010-1.414z'/%3e%3c/svg%3e") center/1em auto no-repeat;border:0;border-radius:.25rem;opacity:.5}.btn-close:hover{color:#000;text-decoration:none;opacity:.75}.btn-close:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25);opacity:1}.btn-close.disabled,.btn-close:disabled{pointer-events:none;-webkit-user-select:none;-moz-user-select:none;user-select:none;opacity:.25}.btn-close-white{filter:invert(1) grayscale(100%) brightness(200%)}.toast{width:350px;max-width:100%;font-size:.875rem;pointer-events:auto;background-color:rgba(255,255,255,.85);background-clip:padding-box;border:1px solid rgba(0,0,0,.1);box-shadow:0 .5rem 1rem rgba(0,0,0,.15);border-radius:.25rem}.toast:not(.showing):not(.show){opacity:0}.toast.hide{display:none}.toast-container{width:-webkit-max-content;width:-moz-max-content;width:max-content;max-width:100%;pointer-events:none}.toast-container>:not(:last-child){margin-bottom:.75rem}.toast-header{display:flex;align-items:center;padding:.5rem .75rem;color:#6c757d;background-color:rgba(255,255,255,.85);background-clip:padding-box;border-bottom:1px solid rgba(0,0,0,.05);border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.toast-header .btn-close{margin-right:-.375rem;margin-left:.75rem}.toast-body{padding:.75rem;word-wrap:break-word}.modal-open{overflow:hidden}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal{position:fixed;top:0;left:0;z-index:1060;display:none;width:100%;height:100%;overflow:hidden;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{transition:transform .3s ease-out;transform:translate(0,-50px)}@media (prefers-reduced-motion:reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{transform:none}.modal.modal-static .modal-dialog{transform:scale(1.02)}.modal-dialog-scrollable{height:calc(100% - 1rem)}.modal-dialog-scrollable .modal-content{max-height:100%;overflow:hidden}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:flex;align-items:center;min-height:calc(100% - 1rem)}.modal-content{position:relative;display:flex;flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;outline:0}.modal-backdrop{position:fixed;top:0;left:0;z-index:1050;width:100vw;height:100vh;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:flex;flex-shrink:0;align-items:center;justify-content:space-between;padding:1rem 1rem;border-bottom:1px solid #dee2e6;border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.modal-header .btn-close{padding:.5rem .5rem;margin:-.5rem -.5rem -.5rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;flex:1 1 auto;padding:1rem}.modal-footer{display:flex;flex-wrap:wrap;flex-shrink:0;align-items:center;justify-content:flex-end;padding:.75rem;border-top:1px solid #dee2e6;border-bottom-right-radius:calc(.3rem - 1px);border-bottom-left-radius:calc(.3rem - 1px)}.modal-footer>*{margin:.25rem}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-scrollable{height:calc(100% - 3.5rem)}.modal-dialog-centered{min-height:calc(100% - 3.5rem)}.modal-sm{max-width:300px}}@media (min-width:992px){.modal-lg,.modal-xl{max-width:800px}}@media (min-width:1200px){.modal-xl{max-width:1140px}}.modal-fullscreen{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen .modal-header{border-radius:0}.modal-fullscreen .modal-body{overflow-y:auto}.modal-fullscreen .modal-footer{border-radius:0}@media (max-width:575.98px){.modal-fullscreen-sm-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-sm-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-sm-down .modal-header{border-radius:0}.modal-fullscreen-sm-down .modal-body{overflow-y:auto}.modal-fullscreen-sm-down .modal-footer{border-radius:0}}@media (max-width:767.98px){.modal-fullscreen-md-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-md-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-md-down .modal-header{border-radius:0}.modal-fullscreen-md-down .modal-body{overflow-y:auto}.modal-fullscreen-md-down .modal-footer{border-radius:0}}@media (max-width:991.98px){.modal-fullscreen-lg-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-lg-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-lg-down .modal-header{border-radius:0}.modal-fullscreen-lg-down .modal-body{overflow-y:auto}.modal-fullscreen-lg-down .modal-footer{border-radius:0}}@media (max-width:1199.98px){.modal-fullscreen-xl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xl-down .modal-header{border-radius:0}.modal-fullscreen-xl-down .modal-body{overflow-y:auto}.modal-fullscreen-xl-down .modal-footer{border-radius:0}}@media (max-width:1399.98px){.modal-fullscreen-xxl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xxl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xxl-down .modal-header{border-radius:0}.modal-fullscreen-xxl-down .modal-body{overflow-y:auto}.modal-fullscreen-xxl-down .modal-footer{border-radius:0}}.tooltip{position:absolute;z-index:1080;display:block;margin:0;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .tooltip-arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .tooltip-arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-auto[data-popper-placement^=top],.bs-tooltip-top{padding:.4rem 0}.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow,.bs-tooltip-top .tooltip-arrow{bottom:0}.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow::before,.bs-tooltip-top .tooltip-arrow::before{top:-1px;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-auto[data-popper-placement^=right],.bs-tooltip-end{padding:0 .4rem}.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow,.bs-tooltip-end .tooltip-arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow::before,.bs-tooltip-end .tooltip-arrow::before{right:-1px;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-auto[data-popper-placement^=bottom],.bs-tooltip-bottom{padding:.4rem 0}.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow,.bs-tooltip-bottom .tooltip-arrow{top:0}.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow::before,.bs-tooltip-bottom .tooltip-arrow::before{bottom:-1px;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-auto[data-popper-placement^=left],.bs-tooltip-start{padding:0 .4rem}.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow,.bs-tooltip-start .tooltip-arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow::before,.bs-tooltip-start .tooltip-arrow::before{left:-1px;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0;z-index:1070;display:block;max-width:276px;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem}.popover .popover-arrow{position:absolute;display:block;width:1rem;height:.5rem}.popover .popover-arrow::after,.popover .popover-arrow::before{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow,.bs-popover-top>.popover-arrow{bottom:calc(-.5rem - 1px)}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before,.bs-popover-top>.popover-arrow::before{bottom:0;border-width:.5rem .5rem 0;border-top-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after,.bs-popover-top>.popover-arrow::after{bottom:1px;border-width:.5rem .5rem 0;border-top-color:#fff}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow,.bs-popover-end>.popover-arrow{left:calc(-.5rem - 1px);width:.5rem;height:1rem}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before,.bs-popover-end>.popover-arrow::before{left:0;border-width:.5rem .5rem .5rem 0;border-right-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after,.bs-popover-end>.popover-arrow::after{left:1px;border-width:.5rem .5rem .5rem 0;border-right-color:#fff}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow,.bs-popover-bottom>.popover-arrow{top:calc(-.5rem - 1px)}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before,.bs-popover-bottom>.popover-arrow::before{top:0;border-width:0 .5rem .5rem .5rem;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after,.bs-popover-bottom>.popover-arrow::after{top:1px;border-width:0 .5rem .5rem .5rem;border-bottom-color:#fff}.bs-popover-auto[data-popper-placement^=bottom] .popover-header::before,.bs-popover-bottom .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-.5rem;content:"";border-bottom:1px solid #f0f0f0}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow,.bs-popover-start>.popover-arrow{right:calc(-.5rem - 1px);width:.5rem;height:1rem}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before,.bs-popover-start>.popover-arrow::before{right:0;border-width:.5rem 0 .5rem .5rem;border-left-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after,.bs-popover-start>.popover-arrow::after{right:1px;border-width:.5rem 0 .5rem .5rem;border-left-color:#fff}.popover-header{padding:.5rem 1rem;margin-bottom:0;font-size:1rem;background-color:#f0f0f0;border-bottom:1px solid #d8d8d8;border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:1rem 1rem;color:#212529}.carousel{position:relative}.carousel.pointer-event{touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;-webkit-backface-visibility:hidden;backface-visibility:hidden;transition:transform .6s ease-in-out}@media (prefers-reduced-motion:reduce){.carousel-item{transition:none}}.carousel-item-next,.carousel-item-prev,.carousel-item.active{display:block}.active.carousel-item-end,.carousel-item-next:not(.carousel-item-start){transform:translateX(100%)}.active.carousel-item-start,.carousel-item-prev:not(.carousel-item-end){transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;transform:none}.carousel-fade .carousel-item-next.carousel-item-start,.carousel-fade .carousel-item-prev.carousel-item-end,.carousel-fade .carousel-item.active{z-index:1;opacity:1}.carousel-fade .active.carousel-item-end,.carousel-fade .active.carousel-item-start{z-index:0;opacity:0;transition:opacity 0s .6s}@media (prefers-reduced-motion:reduce){.carousel-fade .active.carousel-item-end,.carousel-fade .active.carousel-item-start{transition:none}}.carousel-control-next,.carousel-control-prev{position:absolute;top:0;bottom:0;z-index:1;display:flex;align-items:center;justify-content:center;width:15%;padding:0;color:#fff;text-align:center;background:0 0;border:0;opacity:.5;transition:opacity .15s ease}@media (prefers-reduced-motion:reduce){.carousel-control-next,.carousel-control-prev{transition:none}}.carousel-control-next:focus,.carousel-control-next:hover,.carousel-control-prev:focus,.carousel-control-prev:hover{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-next-icon,.carousel-control-prev-icon{display:inline-block;width:2rem;height:2rem;background-repeat:no-repeat;background-position:50%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:2;display:flex;justify-content:center;padding:0;margin-right:15%;margin-bottom:1rem;margin-left:15%;list-style:none}.carousel-indicators [data-bs-target]{box-sizing:content-box;flex:0 1 auto;width:30px;height:3px;padding:0;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border:0;border-top:10px solid transparent;border-bottom:10px solid transparent;opacity:.5;transition:opacity .6s ease}@media (prefers-reduced-motion:reduce){.carousel-indicators [data-bs-target]{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:1.25rem;left:15%;padding-top:1.25rem;padding-bottom:1.25rem;color:#fff;text-align:center}.carousel-dark .carousel-control-next-icon,.carousel-dark .carousel-control-prev-icon{filter:invert(1) grayscale(100)}.carousel-dark .carousel-indicators [data-bs-target]{background-color:#000}.carousel-dark .carousel-caption{color:#000}@-webkit-keyframes spinner-border{to{transform:rotate(360deg)}}@keyframes spinner-border{to{transform:rotate(360deg)}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;border:.25em solid currentColor;border-right-color:transparent;border-radius:50%;-webkit-animation:.75s linear infinite spinner-border;animation:.75s linear infinite spinner-border}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@-webkit-keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;background-color:currentColor;border-radius:50%;opacity:0;-webkit-animation:.75s linear infinite spinner-grow;animation:.75s linear infinite spinner-grow}.spinner-grow-sm{width:1rem;height:1rem}@media (prefers-reduced-motion:reduce){.spinner-border,.spinner-grow{-webkit-animation-duration:1.5s;animation-duration:1.5s}}.offcanvas{position:fixed;bottom:0;z-index:1040;display:flex;flex-direction:column;max-width:100%;visibility:hidden;background-color:#fff;background-clip:padding-box;outline:0;transition:transform .3s ease-in-out}@media (prefers-reduced-motion:reduce){.offcanvas{transition:none}}.offcanvas-header{display:flex;justify-content:space-between;padding:1rem 1rem}.offcanvas-header .btn-close{padding:.5rem .5rem;margin:-.5rem -.5rem -.5rem auto}.offcanvas-title{margin-bottom:0;line-height:1.5}.offcanvas-body{flex-grow:1;padding:1rem 1rem;overflow-y:auto}.offcanvas-start{top:0;left:0;width:400px;border-right:1px solid rgba(0,0,0,.2);transform:translateX(-100%)}.offcanvas-end{top:0;right:0;width:400px;border-left:1px solid rgba(0,0,0,.2);transform:translateX(100%)}.offcanvas-bottom{right:0;left:0;height:30vh;max-height:100%;border-top:1px solid rgba(0,0,0,.2);transform:translateY(100%)}.offcanvas.show{transform:none}.offcanvas-backdrop::before{position:fixed;top:0;left:0;z-index:1039;width:100vw;height:100vh;content:"";background-color:rgba(0,0,0,.5)}.clearfix::after{display:block;clear:both;content:""}.link-primary{color:#0d6efd}.link-primary:focus,.link-primary:hover{color:#0a58ca}.link-secondary{color:#6c757d}.link-secondary:focus,.link-secondary:hover{color:#565e64}.link-success{color:#198754}.link-success:focus,.link-success:hover{color:#146c43}.link-info{color:#0dcaf0}.link-info:focus,.link-info:hover{color:#3dd5f3}.link-warning{color:#ffc107}.link-warning:focus,.link-warning:hover{color:#ffcd39}.link-danger{color:#dc3545}.link-danger:focus,.link-danger:hover{color:#b02a37}.link-light{color:#f8f9fa}.link-light:focus,.link-light:hover{color:#f9fafb}.link-dark{color:#212529}.link-dark:focus,.link-dark:hover{color:#1a1e21}.ratio{position:relative;width:100%}.ratio::before{display:block;padding-top:var(--bs-aspect-ratio);content:""}.ratio>*{position:absolute;top:0;left:0;width:100%;height:100%}.ratio-1x1{--bs-aspect-ratio:100%}.ratio-4x3{--bs-aspect-ratio:calc(3 / 4 * 100%)}.ratio-16x9{--bs-aspect-ratio:calc(9 / 16 * 100%)}.ratio-21x9{--bs-aspect-ratio:calc(9 / 21 * 100%)}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}@media (min-width:576px){.sticky-sm-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:768px){.sticky-md-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:992px){.sticky-lg-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:1200px){.sticky-xl-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:1400px){.sticky-xxl-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.visually-hidden,.visually-hidden-focusable:not(:focus):not(:focus-within){position:absolute!important;width:1px!important;height:1px!important;padding:0!important;margin:-1px!important;overflow:hidden!important;clip:rect(0,0,0,0)!important;white-space:nowrap!important;border:0!important}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.float-start{float:left!important}.float-end{float:right!important}.float-none{float:none!important}.overflow-auto{overflow:auto!important}.overflow-hidden{overflow:hidden!important}.overflow-visible{overflow:visible!important}.overflow-scroll{overflow:scroll!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-grid{display:grid!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:flex!important}.d-inline-flex{display:inline-flex!important}.d-none{display:none!important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{box-shadow:none!important}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.top-0{top:0!important}.top-50{top:50%!important}.top-100{top:100%!important}.bottom-0{bottom:0!important}.bottom-50{bottom:50%!important}.bottom-100{bottom:100%!important}.start-0{left:0!important}.start-50{left:50%!important}.start-100{left:100%!important}.end-0{right:0!important}.end-50{right:50%!important}.end-100{right:100%!important}.translate-middle{transform:translate(-50%,-50%)!important}.translate-middle-x{transform:translateX(-50%)!important}.translate-middle-y{transform:translateY(-50%)!important}.border{border:1px solid #dee2e6!important}.border-0{border:0!important}.border-top{border-top:1px solid #dee2e6!important}.border-top-0{border-top:0!important}.border-end{border-right:1px solid #dee2e6!important}.border-end-0{border-right:0!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-bottom-0{border-bottom:0!important}.border-start{border-left:1px solid #dee2e6!important}.border-start-0{border-left:0!important}.border-primary{border-color:#0d6efd!important}.border-secondary{border-color:#6c757d!important}.border-success{border-color:#198754!important}.border-info{border-color:#0dcaf0!important}.border-warning{border-color:#ffc107!important}.border-danger{border-color:#dc3545!important}.border-light{border-color:#f8f9fa!important}.border-dark{border-color:#212529!important}.border-white{border-color:#fff!important}.border-1{border-width:1px!important}.border-2{border-width:2px!important}.border-3{border-width:3px!important}.border-4{border-width:4px!important}.border-5{border-width:5px!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.mw-100{max-width:100%!important}.vw-100{width:100vw!important}.min-vw-100{min-width:100vw!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mh-100{max-height:100%!important}.vh-100{height:100vh!important}.min-vh-100{min-height:100vh!important}.flex-fill{flex:1 1 auto!important}.flex-row{flex-direction:row!important}.flex-column{flex-direction:column!important}.flex-row-reverse{flex-direction:row-reverse!important}.flex-column-reverse{flex-direction:column-reverse!important}.flex-grow-0{flex-grow:0!important}.flex-grow-1{flex-grow:1!important}.flex-shrink-0{flex-shrink:0!important}.flex-shrink-1{flex-shrink:1!important}.flex-wrap{flex-wrap:wrap!important}.flex-nowrap{flex-wrap:nowrap!important}.flex-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-0{gap:0!important}.gap-1{gap:.25rem!important}.gap-2{gap:.5rem!important}.gap-3{gap:1rem!important}.gap-4{gap:1.5rem!important}.gap-5{gap:3rem!important}.justify-content-start{justify-content:flex-start!important}.justify-content-end{justify-content:flex-end!important}.justify-content-center{justify-content:center!important}.justify-content-between{justify-content:space-between!important}.justify-content-around{justify-content:space-around!important}.justify-content-evenly{justify-content:space-evenly!important}.align-items-start{align-items:flex-start!important}.align-items-end{align-items:flex-end!important}.align-items-center{align-items:center!important}.align-items-baseline{align-items:baseline!important}.align-items-stretch{align-items:stretch!important}.align-content-start{align-content:flex-start!important}.align-content-end{align-content:flex-end!important}.align-content-center{align-content:center!important}.align-content-between{align-content:space-between!important}.align-content-around{align-content:space-around!important}.align-content-stretch{align-content:stretch!important}.align-self-auto{align-self:auto!important}.align-self-start{align-self:flex-start!important}.align-self-end{align-self:flex-end!important}.align-self-center{align-self:center!important}.align-self-baseline{align-self:baseline!important}.align-self-stretch{align-self:stretch!important}.order-first{order:-1!important}.order-0{order:0!important}.order-1{order:1!important}.order-2{order:2!important}.order-3{order:3!important}.order-4{order:4!important}.order-5{order:5!important}.order-last{order:6!important}.m-0{margin:0!important}.m-1{margin:.25rem!important}.m-2{margin:.5rem!important}.m-3{margin:1rem!important}.m-4{margin:1.5rem!important}.m-5{margin:3rem!important}.m-auto{margin:auto!important}.mx-0{margin-right:0!important;margin-left:0!important}.mx-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-3{margin-right:1rem!important;margin-left:1rem!important}.mx-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-5{margin-right:3rem!important;margin-left:3rem!important}.mx-auto{margin-right:auto!important;margin-left:auto!important}.my-0{margin-top:0!important;margin-bottom:0!important}.my-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-0{margin-top:0!important}.mt-1{margin-top:.25rem!important}.mt-2{margin-top:.5rem!important}.mt-3{margin-top:1rem!important}.mt-4{margin-top:1.5rem!important}.mt-5{margin-top:3rem!important}.mt-auto{margin-top:auto!important}.me-0{margin-right:0!important}.me-1{margin-right:.25rem!important}.me-2{margin-right:.5rem!important}.me-3{margin-right:1rem!important}.me-4{margin-right:1.5rem!important}.me-5{margin-right:3rem!important}.me-auto{margin-right:auto!important}.mb-0{margin-bottom:0!important}.mb-1{margin-bottom:.25rem!important}.mb-2{margin-bottom:.5rem!important}.mb-3{margin-bottom:1rem!important}.mb-4{margin-bottom:1.5rem!important}.mb-5{margin-bottom:3rem!important}.mb-auto{margin-bottom:auto!important}.ms-0{margin-left:0!important}.ms-1{margin-left:.25rem!important}.ms-2{margin-left:.5rem!important}.ms-3{margin-left:1rem!important}.ms-4{margin-left:1.5rem!important}.ms-5{margin-left:3rem!important}.ms-auto{margin-left:auto!important}.p-0{padding:0!important}.p-1{padding:.25rem!important}.p-2{padding:.5rem!important}.p-3{padding:1rem!important}.p-4{padding:1.5rem!important}.p-5{padding:3rem!important}.px-0{padding-right:0!important;padding-left:0!important}.px-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-3{padding-right:1rem!important;padding-left:1rem!important}.px-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-5{padding-right:3rem!important;padding-left:3rem!important}.py-0{padding-top:0!important;padding-bottom:0!important}.py-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-0{padding-top:0!important}.pt-1{padding-top:.25rem!important}.pt-2{padding-top:.5rem!important}.pt-3{padding-top:1rem!important}.pt-4{padding-top:1.5rem!important}.pt-5{padding-top:3rem!important}.pe-0{padding-right:0!important}.pe-1{padding-right:.25rem!important}.pe-2{padding-right:.5rem!important}.pe-3{padding-right:1rem!important}.pe-4{padding-right:1.5rem!important}.pe-5{padding-right:3rem!important}.pb-0{padding-bottom:0!important}.pb-1{padding-bottom:.25rem!important}.pb-2{padding-bottom:.5rem!important}.pb-3{padding-bottom:1rem!important}.pb-4{padding-bottom:1.5rem!important}.pb-5{padding-bottom:3rem!important}.ps-0{padding-left:0!important}.ps-1{padding-left:.25rem!important}.ps-2{padding-left:.5rem!important}.ps-3{padding-left:1rem!important}.ps-4{padding-left:1.5rem!important}.ps-5{padding-left:3rem!important}.font-monospace{font-family:var(--bs-font-monospace)!important}.fs-1{font-size:calc(1.375rem + 1.5vw)!important}.fs-2{font-size:calc(1.325rem + .9vw)!important}.fs-3{font-size:calc(1.3rem + .6vw)!important}.fs-4{font-size:calc(1.275rem + .3vw)!important}.fs-5{font-size:1.25rem!important}.fs-6{font-size:1rem!important}.fst-italic{font-style:italic!important}.fst-normal{font-style:normal!important}.fw-light{font-weight:300!important}.fw-lighter{font-weight:lighter!important}.fw-normal{font-weight:400!important}.fw-bold{font-weight:700!important}.fw-bolder{font-weight:bolder!important}.lh-1{line-height:1!important}.lh-sm{line-height:1.25!important}.lh-base{line-height:1.5!important}.lh-lg{line-height:2!important}.text-start{text-align:left!important}.text-end{text-align:right!important}.text-center{text-align:center!important}.text-decoration-none{text-decoration:none!important}.text-decoration-underline{text-decoration:underline!important}.text-decoration-line-through{text-decoration:line-through!important}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.text-wrap{white-space:normal!important}.text-nowrap{white-space:nowrap!important}.text-break{word-wrap:break-word!important;word-break:break-word!important}.text-primary{color:#0d6efd!important}.text-secondary{color:#6c757d!important}.text-success{color:#198754!important}.text-info{color:#0dcaf0!important}.text-warning{color:#ffc107!important}.text-danger{color:#dc3545!important}.text-light{color:#f8f9fa!important}.text-dark{color:#212529!important}.text-white{color:#fff!important}.text-body{color:#212529!important}.text-muted{color:#6c757d!important}.text-black-50{color:rgba(0,0,0,.5)!important}.text-white-50{color:rgba(255,255,255,.5)!important}.text-reset{color:inherit!important}.bg-primary{background-color:#0d6efd!important}.bg-secondary{background-color:#6c757d!important}.bg-success{background-color:#198754!important}.bg-info{background-color:#0dcaf0!important}.bg-warning{background-color:#ffc107!important}.bg-danger{background-color:#dc3545!important}.bg-light{background-color:#f8f9fa!important}.bg-dark{background-color:#212529!important}.bg-body{background-color:#fff!important}.bg-white{background-color:#fff!important}.bg-transparent{background-color:transparent!important}.bg-gradient{background-image:var(--bs-gradient)!important}.user-select-all{-webkit-user-select:all!important;-moz-user-select:all!important;user-select:all!important}.user-select-auto{-webkit-user-select:auto!important;-moz-user-select:auto!important;user-select:auto!important}.user-select-none{-webkit-user-select:none!important;-moz-user-select:none!important;user-select:none!important}.pe-none{pointer-events:none!important}.pe-auto{pointer-events:auto!important}.rounded{border-radius:.25rem!important}.rounded-0{border-radius:0!important}.rounded-1{border-radius:.2rem!important}.rounded-2{border-radius:.25rem!important}.rounded-3{border-radius:.3rem!important}.rounded-circle{border-radius:50%!important}.rounded-pill{border-radius:50rem!important}.rounded-top{border-top-left-radius:.25rem!important;border-top-right-radius:.25rem!important}.rounded-end{border-top-right-radius:.25rem!important;border-bottom-right-radius:.25rem!important}.rounded-bottom{border-bottom-right-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-start{border-bottom-left-radius:.25rem!important;border-top-left-radius:.25rem!important}.visible{visibility:visible!important}.invisible{visibility:hidden!important}@media (min-width:576px){.float-sm-start{float:left!important}.float-sm-end{float:right!important}.float-sm-none{float:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-grid{display:grid!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:flex!important}.d-sm-inline-flex{display:inline-flex!important}.d-sm-none{display:none!important}.flex-sm-fill{flex:1 1 auto!important}.flex-sm-row{flex-direction:row!important}.flex-sm-column{flex-direction:column!important}.flex-sm-row-reverse{flex-direction:row-reverse!important}.flex-sm-column-reverse{flex-direction:column-reverse!important}.flex-sm-grow-0{flex-grow:0!important}.flex-sm-grow-1{flex-grow:1!important}.flex-sm-shrink-0{flex-shrink:0!important}.flex-sm-shrink-1{flex-shrink:1!important}.flex-sm-wrap{flex-wrap:wrap!important}.flex-sm-nowrap{flex-wrap:nowrap!important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-sm-0{gap:0!important}.gap-sm-1{gap:.25rem!important}.gap-sm-2{gap:.5rem!important}.gap-sm-3{gap:1rem!important}.gap-sm-4{gap:1.5rem!important}.gap-sm-5{gap:3rem!important}.justify-content-sm-start{justify-content:flex-start!important}.justify-content-sm-end{justify-content:flex-end!important}.justify-content-sm-center{justify-content:center!important}.justify-content-sm-between{justify-content:space-between!important}.justify-content-sm-around{justify-content:space-around!important}.justify-content-sm-evenly{justify-content:space-evenly!important}.align-items-sm-start{align-items:flex-start!important}.align-items-sm-end{align-items:flex-end!important}.align-items-sm-center{align-items:center!important}.align-items-sm-baseline{align-items:baseline!important}.align-items-sm-stretch{align-items:stretch!important}.align-content-sm-start{align-content:flex-start!important}.align-content-sm-end{align-content:flex-end!important}.align-content-sm-center{align-content:center!important}.align-content-sm-between{align-content:space-between!important}.align-content-sm-around{align-content:space-around!important}.align-content-sm-stretch{align-content:stretch!important}.align-self-sm-auto{align-self:auto!important}.align-self-sm-start{align-self:flex-start!important}.align-self-sm-end{align-self:flex-end!important}.align-self-sm-center{align-self:center!important}.align-self-sm-baseline{align-self:baseline!important}.align-self-sm-stretch{align-self:stretch!important}.order-sm-first{order:-1!important}.order-sm-0{order:0!important}.order-sm-1{order:1!important}.order-sm-2{order:2!important}.order-sm-3{order:3!important}.order-sm-4{order:4!important}.order-sm-5{order:5!important}.order-sm-last{order:6!important}.m-sm-0{margin:0!important}.m-sm-1{margin:.25rem!important}.m-sm-2{margin:.5rem!important}.m-sm-3{margin:1rem!important}.m-sm-4{margin:1.5rem!important}.m-sm-5{margin:3rem!important}.m-sm-auto{margin:auto!important}.mx-sm-0{margin-right:0!important;margin-left:0!important}.mx-sm-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-sm-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-sm-3{margin-right:1rem!important;margin-left:1rem!important}.mx-sm-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-sm-5{margin-right:3rem!important;margin-left:3rem!important}.mx-sm-auto{margin-right:auto!important;margin-left:auto!important}.my-sm-0{margin-top:0!important;margin-bottom:0!important}.my-sm-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-sm-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-sm-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-sm-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-sm-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-sm-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-sm-0{margin-top:0!important}.mt-sm-1{margin-top:.25rem!important}.mt-sm-2{margin-top:.5rem!important}.mt-sm-3{margin-top:1rem!important}.mt-sm-4{margin-top:1.5rem!important}.mt-sm-5{margin-top:3rem!important}.mt-sm-auto{margin-top:auto!important}.me-sm-0{margin-right:0!important}.me-sm-1{margin-right:.25rem!important}.me-sm-2{margin-right:.5rem!important}.me-sm-3{margin-right:1rem!important}.me-sm-4{margin-right:1.5rem!important}.me-sm-5{margin-right:3rem!important}.me-sm-auto{margin-right:auto!important}.mb-sm-0{margin-bottom:0!important}.mb-sm-1{margin-bottom:.25rem!important}.mb-sm-2{margin-bottom:.5rem!important}.mb-sm-3{margin-bottom:1rem!important}.mb-sm-4{margin-bottom:1.5rem!important}.mb-sm-5{margin-bottom:3rem!important}.mb-sm-auto{margin-bottom:auto!important}.ms-sm-0{margin-left:0!important}.ms-sm-1{margin-left:.25rem!important}.ms-sm-2{margin-left:.5rem!important}.ms-sm-3{margin-left:1rem!important}.ms-sm-4{margin-left:1.5rem!important}.ms-sm-5{margin-left:3rem!important}.ms-sm-auto{margin-left:auto!important}.p-sm-0{padding:0!important}.p-sm-1{padding:.25rem!important}.p-sm-2{padding:.5rem!important}.p-sm-3{padding:1rem!important}.p-sm-4{padding:1.5rem!important}.p-sm-5{padding:3rem!important}.px-sm-0{padding-right:0!important;padding-left:0!important}.px-sm-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-sm-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-sm-3{padding-right:1rem!important;padding-left:1rem!important}.px-sm-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-sm-5{padding-right:3rem!important;padding-left:3rem!important}.py-sm-0{padding-top:0!important;padding-bottom:0!important}.py-sm-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-sm-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-sm-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-sm-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-sm-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-sm-0{padding-top:0!important}.pt-sm-1{padding-top:.25rem!important}.pt-sm-2{padding-top:.5rem!important}.pt-sm-3{padding-top:1rem!important}.pt-sm-4{padding-top:1.5rem!important}.pt-sm-5{padding-top:3rem!important}.pe-sm-0{padding-right:0!important}.pe-sm-1{padding-right:.25rem!important}.pe-sm-2{padding-right:.5rem!important}.pe-sm-3{padding-right:1rem!important}.pe-sm-4{padding-right:1.5rem!important}.pe-sm-5{padding-right:3rem!important}.pb-sm-0{padding-bottom:0!important}.pb-sm-1{padding-bottom:.25rem!important}.pb-sm-2{padding-bottom:.5rem!important}.pb-sm-3{padding-bottom:1rem!important}.pb-sm-4{padding-bottom:1.5rem!important}.pb-sm-5{padding-bottom:3rem!important}.ps-sm-0{padding-left:0!important}.ps-sm-1{padding-left:.25rem!important}.ps-sm-2{padding-left:.5rem!important}.ps-sm-3{padding-left:1rem!important}.ps-sm-4{padding-left:1.5rem!important}.ps-sm-5{padding-left:3rem!important}.text-sm-start{text-align:left!important}.text-sm-end{text-align:right!important}.text-sm-center{text-align:center!important}}@media (min-width:768px){.float-md-start{float:left!important}.float-md-end{float:right!important}.float-md-none{float:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-grid{display:grid!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:flex!important}.d-md-inline-flex{display:inline-flex!important}.d-md-none{display:none!important}.flex-md-fill{flex:1 1 auto!important}.flex-md-row{flex-direction:row!important}.flex-md-column{flex-direction:column!important}.flex-md-row-reverse{flex-direction:row-reverse!important}.flex-md-column-reverse{flex-direction:column-reverse!important}.flex-md-grow-0{flex-grow:0!important}.flex-md-grow-1{flex-grow:1!important}.flex-md-shrink-0{flex-shrink:0!important}.flex-md-shrink-1{flex-shrink:1!important}.flex-md-wrap{flex-wrap:wrap!important}.flex-md-nowrap{flex-wrap:nowrap!important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-md-0{gap:0!important}.gap-md-1{gap:.25rem!important}.gap-md-2{gap:.5rem!important}.gap-md-3{gap:1rem!important}.gap-md-4{gap:1.5rem!important}.gap-md-5{gap:3rem!important}.justify-content-md-start{justify-content:flex-start!important}.justify-content-md-end{justify-content:flex-end!important}.justify-content-md-center{justify-content:center!important}.justify-content-md-between{justify-content:space-between!important}.justify-content-md-around{justify-content:space-around!important}.justify-content-md-evenly{justify-content:space-evenly!important}.align-items-md-start{align-items:flex-start!important}.align-items-md-end{align-items:flex-end!important}.align-items-md-center{align-items:center!important}.align-items-md-baseline{align-items:baseline!important}.align-items-md-stretch{align-items:stretch!important}.align-content-md-start{align-content:flex-start!important}.align-content-md-end{align-content:flex-end!important}.align-content-md-center{align-content:center!important}.align-content-md-between{align-content:space-between!important}.align-content-md-around{align-content:space-around!important}.align-content-md-stretch{align-content:stretch!important}.align-self-md-auto{align-self:auto!important}.align-self-md-start{align-self:flex-start!important}.align-self-md-end{align-self:flex-end!important}.align-self-md-center{align-self:center!important}.align-self-md-baseline{align-self:baseline!important}.align-self-md-stretch{align-self:stretch!important}.order-md-first{order:-1!important}.order-md-0{order:0!important}.order-md-1{order:1!important}.order-md-2{order:2!important}.order-md-3{order:3!important}.order-md-4{order:4!important}.order-md-5{order:5!important}.order-md-last{order:6!important}.m-md-0{margin:0!important}.m-md-1{margin:.25rem!important}.m-md-2{margin:.5rem!important}.m-md-3{margin:1rem!important}.m-md-4{margin:1.5rem!important}.m-md-5{margin:3rem!important}.m-md-auto{margin:auto!important}.mx-md-0{margin-right:0!important;margin-left:0!important}.mx-md-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-md-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-md-3{margin-right:1rem!important;margin-left:1rem!important}.mx-md-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-md-5{margin-right:3rem!important;margin-left:3rem!important}.mx-md-auto{margin-right:auto!important;margin-left:auto!important}.my-md-0{margin-top:0!important;margin-bottom:0!important}.my-md-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-md-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-md-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-md-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-md-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-md-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-md-0{margin-top:0!important}.mt-md-1{margin-top:.25rem!important}.mt-md-2{margin-top:.5rem!important}.mt-md-3{margin-top:1rem!important}.mt-md-4{margin-top:1.5rem!important}.mt-md-5{margin-top:3rem!important}.mt-md-auto{margin-top:auto!important}.me-md-0{margin-right:0!important}.me-md-1{margin-right:.25rem!important}.me-md-2{margin-right:.5rem!important}.me-md-3{margin-right:1rem!important}.me-md-4{margin-right:1.5rem!important}.me-md-5{margin-right:3rem!important}.me-md-auto{margin-right:auto!important}.mb-md-0{margin-bottom:0!important}.mb-md-1{margin-bottom:.25rem!important}.mb-md-2{margin-bottom:.5rem!important}.mb-md-3{margin-bottom:1rem!important}.mb-md-4{margin-bottom:1.5rem!important}.mb-md-5{margin-bottom:3rem!important}.mb-md-auto{margin-bottom:auto!important}.ms-md-0{margin-left:0!important}.ms-md-1{margin-left:.25rem!important}.ms-md-2{margin-left:.5rem!important}.ms-md-3{margin-left:1rem!important}.ms-md-4{margin-left:1.5rem!important}.ms-md-5{margin-left:3rem!important}.ms-md-auto{margin-left:auto!important}.p-md-0{padding:0!important}.p-md-1{padding:.25rem!important}.p-md-2{padding:.5rem!important}.p-md-3{padding:1rem!important}.p-md-4{padding:1.5rem!important}.p-md-5{padding:3rem!important}.px-md-0{padding-right:0!important;padding-left:0!important}.px-md-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-md-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-md-3{padding-right:1rem!important;padding-left:1rem!important}.px-md-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-md-5{padding-right:3rem!important;padding-left:3rem!important}.py-md-0{padding-top:0!important;padding-bottom:0!important}.py-md-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-md-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-md-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-md-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-md-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-md-0{padding-top:0!important}.pt-md-1{padding-top:.25rem!important}.pt-md-2{padding-top:.5rem!important}.pt-md-3{padding-top:1rem!important}.pt-md-4{padding-top:1.5rem!important}.pt-md-5{padding-top:3rem!important}.pe-md-0{padding-right:0!important}.pe-md-1{padding-right:.25rem!important}.pe-md-2{padding-right:.5rem!important}.pe-md-3{padding-right:1rem!important}.pe-md-4{padding-right:1.5rem!important}.pe-md-5{padding-right:3rem!important}.pb-md-0{padding-bottom:0!important}.pb-md-1{padding-bottom:.25rem!important}.pb-md-2{padding-bottom:.5rem!important}.pb-md-3{padding-bottom:1rem!important}.pb-md-4{padding-bottom:1.5rem!important}.pb-md-5{padding-bottom:3rem!important}.ps-md-0{padding-left:0!important}.ps-md-1{padding-left:.25rem!important}.ps-md-2{padding-left:.5rem!important}.ps-md-3{padding-left:1rem!important}.ps-md-4{padding-left:1.5rem!important}.ps-md-5{padding-left:3rem!important}.text-md-start{text-align:left!important}.text-md-end{text-align:right!important}.text-md-center{text-align:center!important}}@media (min-width:992px){.float-lg-start{float:left!important}.float-lg-end{float:right!important}.float-lg-none{float:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-grid{display:grid!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:flex!important}.d-lg-inline-flex{display:inline-flex!important}.d-lg-none{display:none!important}.flex-lg-fill{flex:1 1 auto!important}.flex-lg-row{flex-direction:row!important}.flex-lg-column{flex-direction:column!important}.flex-lg-row-reverse{flex-direction:row-reverse!important}.flex-lg-column-reverse{flex-direction:column-reverse!important}.flex-lg-grow-0{flex-grow:0!important}.flex-lg-grow-1{flex-grow:1!important}.flex-lg-shrink-0{flex-shrink:0!important}.flex-lg-shrink-1{flex-shrink:1!important}.flex-lg-wrap{flex-wrap:wrap!important}.flex-lg-nowrap{flex-wrap:nowrap!important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-lg-0{gap:0!important}.gap-lg-1{gap:.25rem!important}.gap-lg-2{gap:.5rem!important}.gap-lg-3{gap:1rem!important}.gap-lg-4{gap:1.5rem!important}.gap-lg-5{gap:3rem!important}.justify-content-lg-start{justify-content:flex-start!important}.justify-content-lg-end{justify-content:flex-end!important}.justify-content-lg-center{justify-content:center!important}.justify-content-lg-between{justify-content:space-between!important}.justify-content-lg-around{justify-content:space-around!important}.justify-content-lg-evenly{justify-content:space-evenly!important}.align-items-lg-start{align-items:flex-start!important}.align-items-lg-end{align-items:flex-end!important}.align-items-lg-center{align-items:center!important}.align-items-lg-baseline{align-items:baseline!important}.align-items-lg-stretch{align-items:stretch!important}.align-content-lg-start{align-content:flex-start!important}.align-content-lg-end{align-content:flex-end!important}.align-content-lg-center{align-content:center!important}.align-content-lg-between{align-content:space-between!important}.align-content-lg-around{align-content:space-around!important}.align-content-lg-stretch{align-content:stretch!important}.align-self-lg-auto{align-self:auto!important}.align-self-lg-start{align-self:flex-start!important}.align-self-lg-end{align-self:flex-end!important}.align-self-lg-center{align-self:center!important}.align-self-lg-baseline{align-self:baseline!important}.align-self-lg-stretch{align-self:stretch!important}.order-lg-first{order:-1!important}.order-lg-0{order:0!important}.order-lg-1{order:1!important}.order-lg-2{order:2!important}.order-lg-3{order:3!important}.order-lg-4{order:4!important}.order-lg-5{order:5!important}.order-lg-last{order:6!important}.m-lg-0{margin:0!important}.m-lg-1{margin:.25rem!important}.m-lg-2{margin:.5rem!important}.m-lg-3{margin:1rem!important}.m-lg-4{margin:1.5rem!important}.m-lg-5{margin:3rem!important}.m-lg-auto{margin:auto!important}.mx-lg-0{margin-right:0!important;margin-left:0!important}.mx-lg-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-lg-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-lg-3{margin-right:1rem!important;margin-left:1rem!important}.mx-lg-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-lg-5{margin-right:3rem!important;margin-left:3rem!important}.mx-lg-auto{margin-right:auto!important;margin-left:auto!important}.my-lg-0{margin-top:0!important;margin-bottom:0!important}.my-lg-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-lg-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-lg-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-lg-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-lg-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-lg-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-lg-0{margin-top:0!important}.mt-lg-1{margin-top:.25rem!important}.mt-lg-2{margin-top:.5rem!important}.mt-lg-3{margin-top:1rem!important}.mt-lg-4{margin-top:1.5rem!important}.mt-lg-5{margin-top:3rem!important}.mt-lg-auto{margin-top:auto!important}.me-lg-0{margin-right:0!important}.me-lg-1{margin-right:.25rem!important}.me-lg-2{margin-right:.5rem!important}.me-lg-3{margin-right:1rem!important}.me-lg-4{margin-right:1.5rem!important}.me-lg-5{margin-right:3rem!important}.me-lg-auto{margin-right:auto!important}.mb-lg-0{margin-bottom:0!important}.mb-lg-1{margin-bottom:.25rem!important}.mb-lg-2{margin-bottom:.5rem!important}.mb-lg-3{margin-bottom:1rem!important}.mb-lg-4{margin-bottom:1.5rem!important}.mb-lg-5{margin-bottom:3rem!important}.mb-lg-auto{margin-bottom:auto!important}.ms-lg-0{margin-left:0!important}.ms-lg-1{margin-left:.25rem!important}.ms-lg-2{margin-left:.5rem!important}.ms-lg-3{margin-left:1rem!important}.ms-lg-4{margin-left:1.5rem!important}.ms-lg-5{margin-left:3rem!important}.ms-lg-auto{margin-left:auto!important}.p-lg-0{padding:0!important}.p-lg-1{padding:.25rem!important}.p-lg-2{padding:.5rem!important}.p-lg-3{padding:1rem!important}.p-lg-4{padding:1.5rem!important}.p-lg-5{padding:3rem!important}.px-lg-0{padding-right:0!important;padding-left:0!important}.px-lg-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-lg-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-lg-3{padding-right:1rem!important;padding-left:1rem!important}.px-lg-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-lg-5{padding-right:3rem!important;padding-left:3rem!important}.py-lg-0{padding-top:0!important;padding-bottom:0!important}.py-lg-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-lg-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-lg-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-lg-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-lg-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-lg-0{padding-top:0!important}.pt-lg-1{padding-top:.25rem!important}.pt-lg-2{padding-top:.5rem!important}.pt-lg-3{padding-top:1rem!important}.pt-lg-4{padding-top:1.5rem!important}.pt-lg-5{padding-top:3rem!important}.pe-lg-0{padding-right:0!important}.pe-lg-1{padding-right:.25rem!important}.pe-lg-2{padding-right:.5rem!important}.pe-lg-3{padding-right:1rem!important}.pe-lg-4{padding-right:1.5rem!important}.pe-lg-5{padding-right:3rem!important}.pb-lg-0{padding-bottom:0!important}.pb-lg-1{padding-bottom:.25rem!important}.pb-lg-2{padding-bottom:.5rem!important}.pb-lg-3{padding-bottom:1rem!important}.pb-lg-4{padding-bottom:1.5rem!important}.pb-lg-5{padding-bottom:3rem!important}.ps-lg-0{padding-left:0!important}.ps-lg-1{padding-left:.25rem!important}.ps-lg-2{padding-left:.5rem!important}.ps-lg-3{padding-left:1rem!important}.ps-lg-4{padding-left:1.5rem!important}.ps-lg-5{padding-left:3rem!important}.text-lg-start{text-align:left!important}.text-lg-end{text-align:right!important}.text-lg-center{text-align:center!important}}@media (min-width:1200px){.float-xl-start{float:left!important}.float-xl-end{float:right!important}.float-xl-none{float:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-grid{display:grid!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:flex!important}.d-xl-inline-flex{display:inline-flex!important}.d-xl-none{display:none!important}.flex-xl-fill{flex:1 1 auto!important}.flex-xl-row{flex-direction:row!important}.flex-xl-column{flex-direction:column!important}.flex-xl-row-reverse{flex-direction:row-reverse!important}.flex-xl-column-reverse{flex-direction:column-reverse!important}.flex-xl-grow-0{flex-grow:0!important}.flex-xl-grow-1{flex-grow:1!important}.flex-xl-shrink-0{flex-shrink:0!important}.flex-xl-shrink-1{flex-shrink:1!important}.flex-xl-wrap{flex-wrap:wrap!important}.flex-xl-nowrap{flex-wrap:nowrap!important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-xl-0{gap:0!important}.gap-xl-1{gap:.25rem!important}.gap-xl-2{gap:.5rem!important}.gap-xl-3{gap:1rem!important}.gap-xl-4{gap:1.5rem!important}.gap-xl-5{gap:3rem!important}.justify-content-xl-start{justify-content:flex-start!important}.justify-content-xl-end{justify-content:flex-end!important}.justify-content-xl-center{justify-content:center!important}.justify-content-xl-between{justify-content:space-between!important}.justify-content-xl-around{justify-content:space-around!important}.justify-content-xl-evenly{justify-content:space-evenly!important}.align-items-xl-start{align-items:flex-start!important}.align-items-xl-end{align-items:flex-end!important}.align-items-xl-center{align-items:center!important}.align-items-xl-baseline{align-items:baseline!important}.align-items-xl-stretch{align-items:stretch!important}.align-content-xl-start{align-content:flex-start!important}.align-content-xl-end{align-content:flex-end!important}.align-content-xl-center{align-content:center!important}.align-content-xl-between{align-content:space-between!important}.align-content-xl-around{align-content:space-around!important}.align-content-xl-stretch{align-content:stretch!important}.align-self-xl-auto{align-self:auto!important}.align-self-xl-start{align-self:flex-start!important}.align-self-xl-end{align-self:flex-end!important}.align-self-xl-center{align-self:center!important}.align-self-xl-baseline{align-self:baseline!important}.align-self-xl-stretch{align-self:stretch!important}.order-xl-first{order:-1!important}.order-xl-0{order:0!important}.order-xl-1{order:1!important}.order-xl-2{order:2!important}.order-xl-3{order:3!important}.order-xl-4{order:4!important}.order-xl-5{order:5!important}.order-xl-last{order:6!important}.m-xl-0{margin:0!important}.m-xl-1{margin:.25rem!important}.m-xl-2{margin:.5rem!important}.m-xl-3{margin:1rem!important}.m-xl-4{margin:1.5rem!important}.m-xl-5{margin:3rem!important}.m-xl-auto{margin:auto!important}.mx-xl-0{margin-right:0!important;margin-left:0!important}.mx-xl-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-xl-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-xl-3{margin-right:1rem!important;margin-left:1rem!important}.mx-xl-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-xl-5{margin-right:3rem!important;margin-left:3rem!important}.mx-xl-auto{margin-right:auto!important;margin-left:auto!important}.my-xl-0{margin-top:0!important;margin-bottom:0!important}.my-xl-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-xl-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-xl-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-xl-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-xl-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-xl-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-xl-0{margin-top:0!important}.mt-xl-1{margin-top:.25rem!important}.mt-xl-2{margin-top:.5rem!important}.mt-xl-3{margin-top:1rem!important}.mt-xl-4{margin-top:1.5rem!important}.mt-xl-5{margin-top:3rem!important}.mt-xl-auto{margin-top:auto!important}.me-xl-0{margin-right:0!important}.me-xl-1{margin-right:.25rem!important}.me-xl-2{margin-right:.5rem!important}.me-xl-3{margin-right:1rem!important}.me-xl-4{margin-right:1.5rem!important}.me-xl-5{margin-right:3rem!important}.me-xl-auto{margin-right:auto!important}.mb-xl-0{margin-bottom:0!important}.mb-xl-1{margin-bottom:.25rem!important}.mb-xl-2{margin-bottom:.5rem!important}.mb-xl-3{margin-bottom:1rem!important}.mb-xl-4{margin-bottom:1.5rem!important}.mb-xl-5{margin-bottom:3rem!important}.mb-xl-auto{margin-bottom:auto!important}.ms-xl-0{margin-left:0!important}.ms-xl-1{margin-left:.25rem!important}.ms-xl-2{margin-left:.5rem!important}.ms-xl-3{margin-left:1rem!important}.ms-xl-4{margin-left:1.5rem!important}.ms-xl-5{margin-left:3rem!important}.ms-xl-auto{margin-left:auto!important}.p-xl-0{padding:0!important}.p-xl-1{padding:.25rem!important}.p-xl-2{padding:.5rem!important}.p-xl-3{padding:1rem!important}.p-xl-4{padding:1.5rem!important}.p-xl-5{padding:3rem!important}.px-xl-0{padding-right:0!important;padding-left:0!important}.px-xl-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-xl-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-xl-3{padding-right:1rem!important;padding-left:1rem!important}.px-xl-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-xl-5{padding-right:3rem!important;padding-left:3rem!important}.py-xl-0{padding-top:0!important;padding-bottom:0!important}.py-xl-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-xl-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-xl-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-xl-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-xl-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-xl-0{padding-top:0!important}.pt-xl-1{padding-top:.25rem!important}.pt-xl-2{padding-top:.5rem!important}.pt-xl-3{padding-top:1rem!important}.pt-xl-4{padding-top:1.5rem!important}.pt-xl-5{padding-top:3rem!important}.pe-xl-0{padding-right:0!important}.pe-xl-1{padding-right:.25rem!important}.pe-xl-2{padding-right:.5rem!important}.pe-xl-3{padding-right:1rem!important}.pe-xl-4{padding-right:1.5rem!important}.pe-xl-5{padding-right:3rem!important}.pb-xl-0{padding-bottom:0!important}.pb-xl-1{padding-bottom:.25rem!important}.pb-xl-2{padding-bottom:.5rem!important}.pb-xl-3{padding-bottom:1rem!important}.pb-xl-4{padding-bottom:1.5rem!important}.pb-xl-5{padding-bottom:3rem!important}.ps-xl-0{padding-left:0!important}.ps-xl-1{padding-left:.25rem!important}.ps-xl-2{padding-left:.5rem!important}.ps-xl-3{padding-left:1rem!important}.ps-xl-4{padding-left:1.5rem!important}.ps-xl-5{padding-left:3rem!important}.text-xl-start{text-align:left!important}.text-xl-end{text-align:right!important}.text-xl-center{text-align:center!important}}@media (min-width:1400px){.float-xxl-start{float:left!important}.float-xxl-end{float:right!important}.float-xxl-none{float:none!important}.d-xxl-inline{display:inline!important}.d-xxl-inline-block{display:inline-block!important}.d-xxl-block{display:block!important}.d-xxl-grid{display:grid!important}.d-xxl-table{display:table!important}.d-xxl-table-row{display:table-row!important}.d-xxl-table-cell{display:table-cell!important}.d-xxl-flex{display:flex!important}.d-xxl-inline-flex{display:inline-flex!important}.d-xxl-none{display:none!important}.flex-xxl-fill{flex:1 1 auto!important}.flex-xxl-row{flex-direction:row!important}.flex-xxl-column{flex-direction:column!important}.flex-xxl-row-reverse{flex-direction:row-reverse!important}.flex-xxl-column-reverse{flex-direction:column-reverse!important}.flex-xxl-grow-0{flex-grow:0!important}.flex-xxl-grow-1{flex-grow:1!important}.flex-xxl-shrink-0{flex-shrink:0!important}.flex-xxl-shrink-1{flex-shrink:1!important}.flex-xxl-wrap{flex-wrap:wrap!important}.flex-xxl-nowrap{flex-wrap:nowrap!important}.flex-xxl-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-xxl-0{gap:0!important}.gap-xxl-1{gap:.25rem!important}.gap-xxl-2{gap:.5rem!important}.gap-xxl-3{gap:1rem!important}.gap-xxl-4{gap:1.5rem!important}.gap-xxl-5{gap:3rem!important}.justify-content-xxl-start{justify-content:flex-start!important}.justify-content-xxl-end{justify-content:flex-end!important}.justify-content-xxl-center{justify-content:center!important}.justify-content-xxl-between{justify-content:space-between!important}.justify-content-xxl-around{justify-content:space-around!important}.justify-content-xxl-evenly{justify-content:space-evenly!important}.align-items-xxl-start{align-items:flex-start!important}.align-items-xxl-end{align-items:flex-end!important}.align-items-xxl-center{align-items:center!important}.align-items-xxl-baseline{align-items:baseline!important}.align-items-xxl-stretch{align-items:stretch!important}.align-content-xxl-start{align-content:flex-start!important}.align-content-xxl-end{align-content:flex-end!important}.align-content-xxl-center{align-content:center!important}.align-content-xxl-between{align-content:space-between!important}.align-content-xxl-around{align-content:space-around!important}.align-content-xxl-stretch{align-content:stretch!important}.align-self-xxl-auto{align-self:auto!important}.align-self-xxl-start{align-self:flex-start!important}.align-self-xxl-end{align-self:flex-end!important}.align-self-xxl-center{align-self:center!important}.align-self-xxl-baseline{align-self:baseline!important}.align-self-xxl-stretch{align-self:stretch!important}.order-xxl-first{order:-1!important}.order-xxl-0{order:0!important}.order-xxl-1{order:1!important}.order-xxl-2{order:2!important}.order-xxl-3{order:3!important}.order-xxl-4{order:4!important}.order-xxl-5{order:5!important}.order-xxl-last{order:6!important}.m-xxl-0{margin:0!important}.m-xxl-1{margin:.25rem!important}.m-xxl-2{margin:.5rem!important}.m-xxl-3{margin:1rem!important}.m-xxl-4{margin:1.5rem!important}.m-xxl-5{margin:3rem!important}.m-xxl-auto{margin:auto!important}.mx-xxl-0{margin-right:0!important;margin-left:0!important}.mx-xxl-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-xxl-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-xxl-3{margin-right:1rem!important;margin-left:1rem!important}.mx-xxl-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-xxl-5{margin-right:3rem!important;margin-left:3rem!important}.mx-xxl-auto{margin-right:auto!important;margin-left:auto!important}.my-xxl-0{margin-top:0!important;margin-bottom:0!important}.my-xxl-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-xxl-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-xxl-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-xxl-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-xxl-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-xxl-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-xxl-0{margin-top:0!important}.mt-xxl-1{margin-top:.25rem!important}.mt-xxl-2{margin-top:.5rem!important}.mt-xxl-3{margin-top:1rem!important}.mt-xxl-4{margin-top:1.5rem!important}.mt-xxl-5{margin-top:3rem!important}.mt-xxl-auto{margin-top:auto!important}.me-xxl-0{margin-right:0!important}.me-xxl-1{margin-right:.25rem!important}.me-xxl-2{margin-right:.5rem!important}.me-xxl-3{margin-right:1rem!important}.me-xxl-4{margin-right:1.5rem!important}.me-xxl-5{margin-right:3rem!important}.me-xxl-auto{margin-right:auto!important}.mb-xxl-0{margin-bottom:0!important}.mb-xxl-1{margin-bottom:.25rem!important}.mb-xxl-2{margin-bottom:.5rem!important}.mb-xxl-3{margin-bottom:1rem!important}.mb-xxl-4{margin-bottom:1.5rem!important}.mb-xxl-5{margin-bottom:3rem!important}.mb-xxl-auto{margin-bottom:auto!important}.ms-xxl-0{margin-left:0!important}.ms-xxl-1{margin-left:.25rem!important}.ms-xxl-2{margin-left:.5rem!important}.ms-xxl-3{margin-left:1rem!important}.ms-xxl-4{margin-left:1.5rem!important}.ms-xxl-5{margin-left:3rem!important}.ms-xxl-auto{margin-left:auto!important}.p-xxl-0{padding:0!important}.p-xxl-1{padding:.25rem!important}.p-xxl-2{padding:.5rem!important}.p-xxl-3{padding:1rem!important}.p-xxl-4{padding:1.5rem!important}.p-xxl-5{padding:3rem!important}.px-xxl-0{padding-right:0!important;padding-left:0!important}.px-xxl-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-xxl-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-xxl-3{padding-right:1rem!important;padding-left:1rem!important}.px-xxl-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-xxl-5{padding-right:3rem!important;padding-left:3rem!important}.py-xxl-0{padding-top:0!important;padding-bottom:0!important}.py-xxl-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-xxl-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-xxl-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-xxl-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-xxl-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-xxl-0{padding-top:0!important}.pt-xxl-1{padding-top:.25rem!important}.pt-xxl-2{padding-top:.5rem!important}.pt-xxl-3{padding-top:1rem!important}.pt-xxl-4{padding-top:1.5rem!important}.pt-xxl-5{padding-top:3rem!important}.pe-xxl-0{padding-right:0!important}.pe-xxl-1{padding-right:.25rem!important}.pe-xxl-2{padding-right:.5rem!important}.pe-xxl-3{padding-right:1rem!important}.pe-xxl-4{padding-right:1.5rem!important}.pe-xxl-5{padding-right:3rem!important}.pb-xxl-0{padding-bottom:0!important}.pb-xxl-1{padding-bottom:.25rem!important}.pb-xxl-2{padding-bottom:.5rem!important}.pb-xxl-3{padding-bottom:1rem!important}.pb-xxl-4{padding-bottom:1.5rem!important}.pb-xxl-5{padding-bottom:3rem!important}.ps-xxl-0{padding-left:0!important}.ps-xxl-1{padding-left:.25rem!important}.ps-xxl-2{padding-left:.5rem!important}.ps-xxl-3{padding-left:1rem!important}.ps-xxl-4{padding-left:1.5rem!important}.ps-xxl-5{padding-left:3rem!important}.text-xxl-start{text-align:left!important}.text-xxl-end{text-align:right!important}.text-xxl-center{text-align:center!important}}@media (min-width:1200px){.fs-1{font-size:2.5rem!important}.fs-2{font-size:2rem!important}.fs-3{font-size:1.75rem!important}.fs-4{font-size:1.5rem!important}}@media print{.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-grid{display:grid!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:flex!important}.d-print-inline-flex{display:inline-flex!important}.d-print-none{display:none!important}} +/*# sourceMappingURL=bootstrap.min.css.map */ diff --git a/tracker/tracker-assist/layout/css/styles.css b/tracker/tracker-assist/layout/css/styles.css new file mode 100644 index 000000000..289cddc17 --- /dev/null +++ b/tracker/tracker-assist/layout/css/styles.css @@ -0,0 +1,86 @@ +.connecting-message { + margin-top: 50%; + font-size: 20px; + color: #aaa; + text-align: center; + display: none; +} + +.status-connecting .connecting-message { + display: block; +} +.status-connecting .card { + display: none; +} + +.card{ + min-width: 324px; + width: 350px; + max-width: 800px; + /*min-height: 220px;*/ + max-height: 450px; + /*resize: both; + overflow: auto;*/ +} + +.card .card-header{ + cursor: move; +} +#agent-name, #duration{ + cursor:default; +} + +#local-stream, #remote-stream { + display:none; +} +#video-container.remote #remote-stream { + display: block; +} +#video-container.local { + min-height: 100px; +} +#video-container.local #local-stream { + display: block; +} + +#local-stream{ + width: 35%; + position: absolute; + z-index: 99; + bottom: 5px; + right: 5px; + border: thin solid rgba(255,255,255, .3); +} + + +#audio-btn .bi-mic-mute { + display:none; +} +#audio-btn:after { + content: 'Mute' +} +#audio-btn.muted .bi-mic-mute { + display: inline-block; +} +#audio-btn.muted .bi-mic { + display:none; +} +#audio-btn.muted:after { + content: 'Unmute' +} + +#video-btn .bi-camera-video-off { + display:none; +} +#video-btn.off:after { + content: 'Start Video' +} +#video-btn.off .bi-camera-video-off { + display: inline-block; +} +#video-btn.off .bi-camera-video { + display:none; +} +#video-btn:after { + content: 'Stop Video' +} \ No newline at end of file diff --git a/tracker/tracker-assist/layout/index.html b/tracker/tracker-assist/layout/index.html new file mode 100644 index 000000000..bc323b16c --- /dev/null +++ b/tracker/tracker-assist/layout/index.html @@ -0,0 +1,168 @@ + + + + + + + OpenReplay | Assist + + + + + + + + + + +
+
Connecting...
+
+
+ +
+ + 00:00 +
+
+
+
+

Starting video...

+ +
+ +
+

Starting video...

+ +
+
+ + +
+
+ + \ No newline at end of file diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index d871a51b4..b9d4b77ef 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.3", + "version": "3.4.4", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker-assist/src/CallWindow.ts b/tracker/tracker-assist/src/CallWindow.ts index 995c36e76..3356449ce 100644 --- a/tracker/tracker-assist/src/CallWindow.ts +++ b/tracker/tracker-assist/src/CallWindow.ts @@ -1,16 +1,23 @@ +import type { LocalStream } from './LocalStream'; +const SS_START_TS_KEY = "__openreplay_assist_call_start_ts" export default class CallWindow { - private iframe: HTMLIFrameElement; - private vRemote: HTMLVideoElement | null = null; - private vLocal: HTMLVideoElement | null = null; - private audioBtn: HTMLAnchorElement | null = null; - private videoBtn: HTMLAnchorElement | null = null; - private userNameSpan: HTMLSpanElement | null = null; - private vPlaceholder: HTMLParagraphElement | null = null; + private iframe: HTMLIFrameElement + private vRemote: HTMLVideoElement | null = null + private vLocal: HTMLVideoElement | null = null + private audioBtn: HTMLElement | null = null + private videoBtn: HTMLElement | null = null + private endCallBtn: HTMLElement | null = null + private agentNameElem: HTMLElement | null = null + private videoContainer: HTMLElement | null = null + private vPlaceholder: HTMLElement | null = null - private tsInterval: ReturnType; - constructor(endCall: () => void) { + private tsInterval: ReturnType + + private load: Promise + + constructor() { const iframe = this.iframe = document.createElement('iframe'); Object.assign(iframe.style, { position: "fixed", @@ -31,189 +38,221 @@ export default class CallWindow { console.error("OpenReplay: CallWindow iframe document is not reachable.") return; } - fetch("https://static.openreplay.com/tracker-assist/index.html") - //fetch("file:///Users/shikhu/work/asayer-tester/dist/assist/index.html") + + + //const baseHref = "https://static.openreplay.com/tracker-assist/test" + const baseHref = "https://static.openreplay.com/tracker-assist/3.4.4" + this.load = fetch(baseHref + "/index.html") .then(r => r.text()) .then((text) => { iframe.onload = () => { - doc.body.removeChild(doc.body.children[0]); //?!!>R# const assistSection = doc.getElementById("or-assist") - assistSection && assistSection.removeAttribute("style"); - iframe.style.height = doc.body.scrollHeight + 'px'; - iframe.style.width = doc.body.scrollWidth + 'px'; + assistSection?.classList.remove("status-connecting") + //iframe.style.height = doc.body.scrollHeight + 'px'; + //iframe.style.width = doc.body.scrollWidth + 'px'; + this.adjustIframeSize() iframe.onload = null; } - text = text.replace(/href="css/g, "href=\"https://static.openreplay.com/tracker-assist/css") + // ? + text = text.replace(/href="css/g, `href="${baseHref}/css`) doc.open(); doc.write(text); doc.close(); - this.vLocal = doc.getElementById("video-local") as HTMLVideoElement; - this.vRemote = doc.getElementById("video-remote") as HTMLVideoElement; + this.vLocal = doc.getElementById("video-local") as (HTMLVideoElement | null); + this.vRemote = doc.getElementById("video-remote") as (HTMLVideoElement | null); + this.videoContainer = doc.getElementById("video-container"); - // - this.vLocal.parentElement && this.vLocal.parentElement.classList.add("d-none"); + this.audioBtn = doc.getElementById("audio-btn"); + if (this.audioBtn) { + this.audioBtn.onclick = () => this.toggleAudio(); + } + this.videoBtn = doc.getElementById("video-btn"); + if (this.videoBtn) { + this.videoBtn.onclick = () => this.toggleVideo(); + } + this.endCallBtn = doc.getElementById("end-call-btn"); - this.audioBtn = doc.getElementById("audio-btn") as HTMLAnchorElement; - this.audioBtn.onclick = () => this.toggleAudio(); - this.videoBtn = doc.getElementById("video-btn") as HTMLAnchorElement; - this.videoBtn.onclick = () => this.toggleVideo(); - - this.userNameSpan = doc.getElementById("username") as HTMLSpanElement; + this.agentNameElem = doc.getElementById("agent-name"); this.vPlaceholder = doc.querySelector("#remote-stream p") - this._trySetAssistentName(); - this._trySetStreams(); - const endCallBtn = doc.getElementById("end-call-btn") as HTMLAnchorElement; - endCallBtn.onclick = endCall; - - const tsText = doc.getElementById("time-stamp"); - const startTs = Date.now(); - if (tsText) { + const tsElem = doc.getElementById("duration"); + if (tsElem) { + const startTs = Number(sessionStorage.getItem(SS_START_TS_KEY)) || Date.now() + sessionStorage.setItem(SS_START_TS_KEY, startTs.toString()) this.tsInterval = setInterval(() => { - const ellapsed = Date.now() - startTs; - const secsFull = ~~(ellapsed / 1000); - const mins = ~~(secsFull / 60); + const ellapsed = Date.now() - startTs + const secsFull = ~~(ellapsed / 1000) + const mins = ~~(secsFull / 60) const secs = secsFull - mins * 60 - tsText.innerText = `${mins}:${secs < 10 ? 0 : ''}${secs}`; + tsElem.innerText = `${mins}:${secs < 10 ? 0 : ''}${secs}` }, 500); } // TODO: better D'n'D + // mb set cursor:move here? doc.body.setAttribute("draggable", "true"); doc.body.ondragstart = (e) => { if (!e.dataTransfer || !e.target) { return; } //@ts-ignore - if (!e.target.classList || !e.target.classList.contains("card-header")) { return; } + if (!e.target.classList || !e.target.classList.contains("drag-area")) { return; } e.dataTransfer.setDragImage(doc.body, e.clientX, e.clientY); }; doc.body.ondragend = e => { Object.assign(iframe.style, { - left: `${e.clientX}px`, // TODO: fix in case e is inside the iframe + left: `${e.clientX}px`, // TODO: fix the case when ecoordinates are inside the iframe top: `${e.clientY}px`, bottom: 'auto', right: 'auto', }) } }); + + //this.toggleVideoUI(false) + //this.toggleRemoteVideoUI(false) } - // TODO: load(): Promise + private adjustIframeSize() { + const doc = this.iframe.contentDocument + if (!doc) { return } + this.iframe.style.height = doc.body.scrollHeight + 'px'; + this.iframe.style.width = doc.body.scrollWidth + 'px'; + } - private aRemote: HTMLAudioElement | null = null; - private localStream: MediaStream | null = null; - private remoteStream: MediaStream | null = null; - private setLocalVideoStream: (MediaStream) => void = () => {}; - private videoRequested: boolean = true; // TODO: green camera light - private _trySetStreams() { - if (this.vRemote && !this.vRemote.srcObject && this.remoteStream) { - this.vRemote.srcObject = this.remoteStream; - - if (this.vPlaceholder) { - this.vPlaceholder.innerText = "Video has been paused. Click anywhere to resume."; + setCallEndAction(endCall: () => void) { + this.load.then(() => { + if (this.endCallBtn) { + this.endCallBtn.onclick = endCall } - // Hack for audio (doesen't work in iframe because of some magical reasons (check if it is connected to autoplay?)) - this.aRemote = document.createElement("audio"); - this.aRemote.autoplay = true; - this.aRemote.style.display = "none" - this.aRemote.srcObject = this.remoteStream; - document.body.appendChild(this.aRemote) - } - if (this.vLocal && !this.vLocal.srcObject && this.localStream) { - this.vLocal.srcObject = this.localStream; - } + }) } + + private aRemote: HTMLAudioElement | null = null; + private checkRemoteVideoInterval: ReturnType setRemoteStream(rStream: MediaStream) { - this.remoteStream = rStream; - this._trySetStreams(); + this.load.then(() => { + if (this.vRemote && !this.vRemote.srcObject) { + this.vRemote.srcObject = rStream; + if (this.vPlaceholder) { + this.vPlaceholder.innerText = "Video has been paused. Click anywhere to resume."; + } + + // Hack for audio. Doesen't work inside the iframe because of some magical reasons (check if it is connected to autoplay?) + this.aRemote = document.createElement("audio"); + this.aRemote.autoplay = true; + this.aRemote.style.display = "none" + this.aRemote.srcObject = rStream; + document.body.appendChild(this.aRemote) + } + + // Hack to determine if the remote video is enabled + if (this.checkRemoteVideoInterval) { clearInterval(this.checkRemoteVideoInterval) } // just in case + let enable = false + this.checkRemoteVideoInterval = setInterval(() => { + const settings = rStream.getVideoTracks()[0]?.getSettings() + //console.log(settings) + const isDummyVideoTrack = !!settings && (settings.width === 2 || settings.frameRate === 0) + const shouldEnable = !isDummyVideoTrack + if (enable !== shouldEnable) { + this.toggleRemoteVideoUI(enable=shouldEnable) + } + }, 1000) + }) } - setLocalStream(lStream: MediaStream, setLocalVideoStream: (MediaStream) => void) { - lStream.getVideoTracks().forEach(track => { - track.enabled = false; - }); - this.localStream = lStream; - this.setLocalVideoStream = setLocalVideoStream; - this._trySetStreams(); + + toggleRemoteVideoUI(enable: boolean) { + this.load.then(() => { + if (this.videoContainer) { + if (enable) { + this.videoContainer.classList.add("remote") + } else { + this.videoContainer.classList.remove("remote") + } + this.adjustIframeSize() + } + }) + } + + private localStream: LocalStream | null = null; + + // TODO: on construction? + setLocalStream(lStream: LocalStream) { + this.localStream = lStream } playRemote() { this.vRemote && this.vRemote.play() } - - // TODO: determined workflow - _trySetAssistentName() { - if (this.userNameSpan && this.assistentName) { - this.userNameSpan.innerText = this.assistentName; - } - } - private assistentName: string = ""; setAssistentName(name: string) { - this.assistentName = name; - this._trySetAssistentName(); + this.load.then(() => { + if (this.agentNameElem) { + this.agentNameElem.innerText = name + } + }) } - toggleAudio() { - let enabled = true; - this.localStream?.getAudioTracks().forEach(track => { - enabled = enabled && !track.enabled; - track.enabled = enabled; - }); - const cList = this.audioBtn?.classList; + + private toggleAudioUI(enabled: boolean) { if (!this.audioBtn) { return; } if (enabled) { - this.audioBtn.classList.remove("muted"); - this.audioBtn.childNodes[1].textContent = "Mute"; + this.audioBtn.classList.remove("muted") } else { - this.audioBtn.classList.add("muted"); - this.audioBtn.childNodes[1].textContent = "Unmute"; + this.audioBtn.classList.add("muted") } } - private _toggleVideoUI(enabled) { - if (!this.videoBtn || !this.vLocal || !this.vLocal.parentElement) { return; } + private toggleAudio() { + const enabled = this.localStream?.toggleAudio() || false + this.toggleAudioUI(enabled) + // if (!this.audioBtn) { return; } + // if (enabled) { + // this.audioBtn.classList.remove("muted"); + // this.audioBtn.childNodes[1].textContent = "Mute"; + // } else { + // this.audioBtn.classList.add("muted"); + // this.audioBtn.childNodes[1].textContent = "Unmute"; + // } + } + + private toggleVideoUI(enabled: boolean) { + if (!this.videoBtn || !this.videoContainer) { return; } if (enabled) { - this.vLocal.parentElement.classList.remove("d-none"); + this.videoContainer.classList.add("local") this.videoBtn.classList.remove("off"); - this.videoBtn.childNodes[1].textContent = "Stop Video"; } else { - this.vLocal.parentElement.classList.add("d-none"); + this.videoContainer.classList.remove("local") this.videoBtn.classList.add("off"); - this.videoBtn.childNodes[1].textContent = "Start Video"; } + this.adjustIframeSize() } - toggleVideo() { - if (!this.videoRequested) { - navigator.mediaDevices.getUserMedia({video:true, audio:false}).then(vd => { - this.videoRequested = true; - this.setLocalVideoStream(vd); - this._toggleVideoUI(true); - this.localStream?.getVideoTracks().forEach(track => { - track.enabled = true; - }) - }); - return; - } - let enabled = true; - this.localStream?.getVideoTracks().forEach(track => { - enabled = enabled && !track.enabled; - track.enabled = enabled; - }); - this._toggleVideoUI(enabled); - + private videoRequested: boolean = false + private toggleVideo() { + this.localStream?.toggleVideo() + .then(enabled => { + this.toggleVideoUI(enabled) + this.load.then(() => { + if (this.vLocal && this.localStream && !this.vLocal.srcObject) { + this.vLocal.srcObject = this.localStream.stream + } + }) + }) } remove() { - clearInterval(this.tsInterval); + this.localStream?.stop() + clearInterval(this.tsInterval) + clearInterval(this.checkRemoteVideoInterval) if (this.iframe.parentElement) { - document.body.removeChild(this.iframe); + document.body.removeChild(this.iframe) } if (this.aRemote && this.aRemote.parentElement) { - document.body.removeChild(this.aRemote); + document.body.removeChild(this.aRemote) } + sessionStorage.removeItem(SS_START_TS_KEY) } } \ No newline at end of file diff --git a/tracker/tracker-assist/src/LocalStream.ts b/tracker/tracker-assist/src/LocalStream.ts new file mode 100644 index 000000000..63f01ad58 --- /dev/null +++ b/tracker/tracker-assist/src/LocalStream.ts @@ -0,0 +1,85 @@ +declare global { + interface HTMLCanvasElement { + captureStream(frameRate?: number): MediaStream; + } +} + +function dummyTrack(): MediaStreamTrack { + const canvas = document.createElement("canvas")//, { width: 0, height: 0}) + canvas.width=canvas.height=2 // Doesn't work when 1 (?!) + const ctx = canvas.getContext('2d'); + ctx?.fillRect(0, 0, canvas.width, canvas.height); + requestAnimationFrame(function draw(){ + ctx?.fillRect(0,0, canvas.width, canvas.height) + requestAnimationFrame(draw); + }); + // Also works. Probably it should be done once connected. + //setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000) + return canvas.captureStream(60).getTracks()[0]; +} + +export default function RequestLocalStream(): Promise { + return navigator.mediaDevices.getUserMedia({ audio:true }) + .then(aStream => { + const aTrack = aStream.getAudioTracks()[0] + if (!aTrack) { throw new Error("No audio tracks provided") } + return new _LocalStream(aTrack) + }) +} + +class _LocalStream { + private mediaRequested: boolean = false + readonly stream: MediaStream + private readonly vdTrack: MediaStreamTrack + constructor(aTrack: MediaStreamTrack) { + this.vdTrack = dummyTrack() + this.stream = new MediaStream([ aTrack, this.vdTrack ]) + } + + toggleVideo(): Promise { + if (!this.mediaRequested) { + return navigator.mediaDevices.getUserMedia({video:true}) + .then(vStream => { + const vTrack = vStream.getVideoTracks()[0] + if (!vTrack) { + throw new Error("No video track provided") + } + this.stream.addTrack(vTrack) + this.stream.removeTrack(this.vdTrack) + this.mediaRequested = true + if (this.onVideoTrackCb) { + this.onVideoTrackCb(vTrack) + } + return true + }) + .catch(e => { + // TODO: log + return false + }) + } + let enabled = true + this.stream.getVideoTracks().forEach(track => { + track.enabled = enabled = enabled && !track.enabled + }) + return Promise.resolve(enabled) + } + + toggleAudio(): boolean { + let enabled = true + this.stream.getAudioTracks().forEach(track => { + track.enabled = enabled = enabled && !track.enabled + }) + return enabled + } + + private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null + onVideoTrack(cb: (t: MediaStreamTrack) => void) { + this.onVideoTrackCb = cb + } + + stop() { + this.stream.getTracks().forEach(t => t.stop()) + } +} + +export type LocalStream = InstanceType diff --git a/tracker/tracker-assist/src/_slim.ts b/tracker/tracker-assist/src/_slim.ts index cf8d2205e..72c52dbd3 100644 --- a/tracker/tracker-assist/src/_slim.ts +++ b/tracker/tracker-assist/src/_slim.ts @@ -1,7 +1,7 @@ /** - * Hach for the issue of peerjs compilation on angular - * Mor info here: https://github.com/peers/peerjs/issues/552 + * Hack for the peerjs compilation on angular + * About this issue: https://github.com/peers/peerjs/issues/552 */ // @ts-ignore diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index c782ed55e..e09bc0ab8 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -8,7 +8,7 @@ import BufferingConnection from './BufferingConnection'; import Mouse from './Mouse'; import CallWindow from './CallWindow'; import ConfirmWindow from './ConfirmWindow'; - +import RequestLocalStream from './LocalStream'; export interface Options { confirmText: string, @@ -16,7 +16,6 @@ export interface Options { session_calling_peer_key: string, } - enum CallingState { Requesting, True, @@ -39,7 +38,7 @@ export default function(opts: Partial = {}) { } function log(...args) { - // TODO: use warn/log from assist + // TODO: use centralised warn/log from tracker (?) appOptions.__debug_log && console.log("OpenReplay Assist. ", ...args) } function warn(...args) { @@ -89,6 +88,7 @@ export default function(opts: Partial = {}) { let callingState: CallingState = CallingState.False; + peer.on('call', function(call) { log("Call: ", call) if (!peer) { return; } @@ -96,6 +96,7 @@ export default function(opts: Partial = {}) { openDataConnections[call.peer]?.conn; if (callingState !== CallingState.False || !dataConn || !dataConn.open) { call.close(); + warn("Call closed instantly: ", callingState, dataConn, dataConn.open) return; } @@ -106,7 +107,7 @@ export default function(opts: Partial = {}) { sessionStorage.removeItem(options.session_calling_peer_key); } callingState = newState; - } + } const notifyCallEnd = () => { dataConn.open && dataConn.send("call_end"); @@ -121,74 +122,59 @@ export default function(opts: Partial = {}) { setCallingState(CallingState.Requesting); const confirm = new ConfirmWindow(options.confirmText, options.confirmStyle); confirmAnswer = confirm.mount(); - dataConn.on('data', (data) => { // if call closed by a caller before confirm + dataConn.on('data', (data) => { // if call cancelled by a caller before confirmation if (data === "call_end") { - log("Recieved call_end during confirm opened") - setCallingState(CallingState.False); + log("Recieved call_end during confirm window opened") confirm.remove(); + setCallingState(CallingState.False); } }); } confirmAnswer.then(agreed => { if (!agreed || !dataConn.open) { - call.close(); - notifyCallEnd(); - setCallingState(CallingState.False); - return; + !dataConn.open && warn("Call cancelled because data connection is closed.") + call.close() + notifyCallEnd() + setCallingState(CallingState.False) + return } - const mouse = new Mouse(); - let callUI; - - const onCallConnect = lStream => { - const onCallEnd = () => { - mouse.remove(); - callUI?.remove(); - lStream.getTracks().forEach(t => t.stop()); - setCallingState(CallingState.False); - } - const initiateCallEnd = () => { - log("initiateCallEnd") - call.close() - notifyCallEnd(); - onCallEnd(); - } - - call.answer(lStream); - setCallingState(CallingState.True) - - dataConn.on("close", onCallEnd); + const mouse = new Mouse() + let callUI = new CallWindow() + const onCallEnd = () => { + mouse.remove(); + callUI.remove(); + setCallingState(CallingState.False); + } + const initiateCallEnd = () => { + log("initiateCallEnd") + call.close() + notifyCallEnd(); + onCallEnd(); + } + RequestLocalStream().then(lStream => { + dataConn.on("close", onCallEnd); // For what case? //call.on('close', onClose); // Works from time to time (peerjs bug) - const intervalID = setInterval(() => { + const checkConnInterval = setInterval(() => { if (!dataConn.open) { initiateCallEnd(); - clearInterval(intervalID); + clearInterval(checkConnInterval); } if (!call.open) { onCallEnd(); - clearInterval(intervalID); + clearInterval(checkConnInterval); } }, 3000); - call.on('error', initiateCallEnd); - - callUI = new CallWindow(initiateCallEnd); - callUI.setLocalStream(lStream, (stream) => { - //let videoTrack = stream.getVideoTracks()[0]; - //lStream.addTrack(videoTrack); - - //call.peerConnection.addTrack(videoTrack); - - // call.peerConnection.getSenders() - // var sender = call.peerConnection.getSenders().find(function(s) { - // return s.track .kind == videoTrack.kind; - // }); - //sender.replaceTrack(videoTrack); + call.on('error', e => { + warn("Call error:", e) + initiateCallEnd() }); + call.on('stream', function(rStream) { callUI.setRemoteStream(rStream); - const onInteraction = () => { + const onInteraction = () => { // only if hidden? callUI.playRemote() document.removeEventListener("click", onInteraction) } @@ -200,6 +186,10 @@ export default function(opts: Partial = {}) { onCallEnd(); return; } + // if (data && typeof data.video === 'boolean') { + // log('Recieved video toggle signal: ', data.video) + // callUI.toggleRemoteVideo(data.video) + // } if (data && typeof data.name === 'string') { log('Recieved name: ', data.name) callUI.setAssistentName(data.name); @@ -208,14 +198,25 @@ export default function(opts: Partial = {}) { mouse.move(data); } }); - } - navigator.mediaDevices.getUserMedia({video:true, audio:true}) - .then(onCallConnect) - .catch(_ => { // TODO retry only if specific error - navigator.mediaDevices.getUserMedia({audio:true}) // in case there is no camera on device - .then(onCallConnect) - .catch(e => warn("Can't reach media devices. ", e)); + lStream.onVideoTrack(vTrack => { + const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video") + if (!sender) { + warn("No video sender found") + return + } + log("sender found:", sender) + sender.replaceTrack(vTrack) + }) + + callUI.setCallEndAction(initiateCallEnd) + callUI.setLocalStream(lStream) + call.answer(lStream.stream) + setCallingState(CallingState.True) + }) + .catch(e => { + warn("Audio mediadevice request error:", e) + onCallEnd() }); }).catch(); // in case of Confirm.remove() without any confirmation }); From e3d8807cc75b275c63d6de3edad24a24e3662c36 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 18 Oct 2021 13:02:28 +0200 Subject: [PATCH 069/218] fix(backend-pg): no-rows to a project query response is an error --- backend/pkg/db/cache/messages_common.go | 49 +++++++++++++++++++++++++ backend/pkg/db/cache/messages_ios.go | 43 ---------------------- backend/pkg/db/cache/project.go | 4 +- backend/pkg/db/postgres/errors.go | 14 ++++--- backend/pkg/db/postgres/project.go | 7 ---- backend/pkg/messages/primitives.go | 2 +- backend/pkg/url/assets/url.go | 2 +- backend/services/http/handlers.go | 4 +- backend/services/http/handlers_ios.go | 4 +- 9 files changed, 65 insertions(+), 64 deletions(-) diff --git a/backend/pkg/db/cache/messages_common.go b/backend/pkg/db/cache/messages_common.go index 0b7d9a885..c05422cb2 100644 --- a/backend/pkg/db/cache/messages_common.go +++ b/backend/pkg/db/cache/messages_common.go @@ -28,3 +28,52 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { } return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash) } + + +func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error { + if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil { + return err + } + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + session.UserID = &userID.Value + return nil +} + +func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error { + if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil { + return err + } + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + session.UserAnonymousID = &userAnonymousID.Value + return nil +} + +func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { + session, err := c.GetSession(sessionID) + if err != nil { + return err + } + project, err := c.GetProject(session.ProjectID) + if err != nil { + return err + } + + keyNo := project.GetMetadataNo(metadata.Key) + + if keyNo == 0 { + // insert project metadata + } + + if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil { + return err + } + + session.SetMetadata(keyNo, metadata.Value) + return nil +} diff --git a/backend/pkg/db/cache/messages_ios.go b/backend/pkg/db/cache/messages_ios.go index 151ffe58e..f630de53d 100644 --- a/backend/pkg/db/cache/messages_ios.go +++ b/backend/pkg/db/cache/messages_ios.go @@ -95,46 +95,3 @@ func (c *PGCache) InsertIOSIssueEvent(sessionID uint64, issueEvent *IOSIssueEven return nil } -func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error { - if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserID = &userID.Value - return nil -} - -func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error { - if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserAnonymousID = &userAnonymousID.Value - return nil -} - -func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - project, err := c.GetProject(session.ProjectID) - if err != nil { - return err - } - - keyNo := project.GetMetadataNo(metadata.Key) - if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil { - return err - } - - session.SetMetadata(keyNo, metadata.Value) - return nil -} - diff --git a/backend/pkg/db/cache/project.go b/backend/pkg/db/cache/project.go index dacb46633..1411e608b 100644 --- a/backend/pkg/db/cache/project.go +++ b/backend/pkg/db/cache/project.go @@ -11,7 +11,7 @@ func (c *PGCache) GetProjectByKey(projectKey string) (*Project, error) { return c.projectsByKeys[ projectKey ].Project, nil } p, err := c.Conn.GetProjectByKey(projectKey) - if p == nil { + if err != nil { return nil, err } c.projectsByKeys[ projectKey ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } @@ -27,7 +27,7 @@ func (c *PGCache) GetProject(projectID uint32) (*Project, error) { return c.projects[ projectID ].Project, nil } p, err := c.Conn.GetProject(projectID) - if p == nil { + if err != nil { return nil, err } c.projects[ projectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } diff --git a/backend/pkg/db/postgres/errors.go b/backend/pkg/db/postgres/errors.go index 9012bfe6b..a83c8f03a 100644 --- a/backend/pkg/db/postgres/errors.go +++ b/backend/pkg/db/postgres/errors.go @@ -2,15 +2,17 @@ package postgres import ( "errors" - + + "github.com/jackc/pgx/v4" "github.com/jackc/pgconn" "github.com/jackc/pgerrcode" ) func IsPkeyViolation(err error) bool { var pgErr *pgconn.PgError - if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation { - return true - } - return false -} \ No newline at end of file + return errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation +} + +func IsNoRowsErr(err error) bool { + return err == pgx.ErrNoRows +} diff --git a/backend/pkg/db/postgres/project.go b/backend/pkg/db/postgres/project.go index 461db66fb..2eea30662 100644 --- a/backend/pkg/db/postgres/project.go +++ b/backend/pkg/db/postgres/project.go @@ -1,7 +1,6 @@ package postgres import ( - "github.com/jackc/pgx/v4" . "openreplay/backend/pkg/db/types" ) @@ -14,9 +13,6 @@ func (conn *Conn) GetProjectByKey(projectKey string) (*Project, error) { `, projectKey, ).Scan(&p.MaxSessionDuration, &p.SampleRate, &p.ProjectID); err != nil { - if err == pgx.ErrNoRows { - err = nil - } return nil, err } return p, nil @@ -36,9 +32,6 @@ func (conn *Conn) GetProject(projectID uint32) (*Project, error) { ).Scan(&p.ProjectKey,&p.MaxSessionDuration, &p.Metadata1, &p.Metadata2, &p.Metadata3, &p.Metadata4, &p.Metadata5, &p.Metadata6, &p.Metadata7, &p.Metadata8, &p.Metadata9, &p.Metadata10); err != nil { - if err == pgx.ErrNoRows { - err = nil - } return nil, err } return p, nil diff --git a/backend/pkg/messages/primitives.go b/backend/pkg/messages/primitives.go index 0c938d2b2..70952eeab 100644 --- a/backend/pkg/messages/primitives.go +++ b/backend/pkg/messages/primitives.go @@ -49,7 +49,7 @@ func ReadUint(reader io.Reader) (uint64, error) { } if b < 0x80 { if i > 9 || i == 9 && b > 1 { - return x, errors.New("overflow") + return x, errors.New("uint overflow") } return x | uint64(b)<>50, 10) } diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 975abe31b..81cd6e9c7 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -57,8 +57,8 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { } p, err := pgconn.GetProjectByKey(*req.ProjectKey) - if p == nil { - if err == nil { + if err != nil { + if postgres.IsNoRowsErr(err) { responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) } else { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 2c874a312..32f4a271a 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -51,8 +51,8 @@ package main // return // } // p, err := pgconn.GetProject(uint32(projectID)) -// if p == nil { -// if err == nil { +// if err != nil { +// if postgres.IsNoRowsErr(err) { // responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) // } else { // responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging From 5523df8fb2a394f6e32424093b28f01016c9c5b6 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 20 Oct 2021 21:55:29 +0200 Subject: [PATCH 070/218] feat (backend-http): enable ios endpoints --- backend/services/http/handlers.go | 2 +- backend/services/http/handlers_ios.go | 295 ++++++++++++++------------ backend/services/http/main.go | 56 ++--- 3 files changed, 192 insertions(+), 161 deletions(-) diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 81cd6e9c7..02b8b0c13 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -14,8 +14,8 @@ import ( gzip "github.com/klauspost/pgzip" "openreplay/backend/pkg/db/postgres" - . "openreplay/backend/pkg/messages" "openreplay/backend/pkg/token" + . "openreplay/backend/pkg/messages" ) const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 32f4a271a..110cd2874 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -1,145 +1,176 @@ package main -// const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb +import ( + "encoding/json" + "net/http" + "errors" + "time" + "math/rand" + "strconv" -// func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { -// type request struct { -// // SessionID *string -// EncodedProjectID *uint64 `json:"projectID"` -// TrackerVersion string `json:"trackerVersion"` -// RevID string `json:"revID"` -// UserUUID *string `json:"userUUID"` -// //UserOS string `json"userOS"` //hardcoded 'MacOS' -// UserOSVersion string `json:"userOSVersion"` -// UserDevice string `json:"userDevice"` -// Timestamp uint64 `json:"timestamp"` -// // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac -// // “performances”:{ -// // “activeProcessorCount”:8, -// // “isLowPowerModeEnabled”:0, -// // “orientation”:0, -// // “systemUptime”:585430, -// // “batteryState”:0, -// // “thermalState”:0, -// // “batteryLevel”:0, -// // “processorCount”:8, -// // “physicalMemory”:17179869184 -// // }, -// } -// type response struct { -// Token string `json:"token"` -// ImagesHashList []string `json:"imagesHashList"` -// UserUUID string `json:"userUUID"` -// SESSION_ID uint64 `json:"SESSION_ID"` ///TEMP -// } -// startTime := time.Now() -// req := &request{} -// body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) -// //defer body.Close() -// if err := json.NewDecoder(body).Decode(req); err != nil { -// responseWithError(w, http.StatusBadRequest, err) -// return -// } + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/token" + . "openreplay/backend/pkg/messages" +) -// if req.EncodedProjectID == nil { -// responseWithError(w, http.StatusForbidden, errors.New("ProjectID value required")) -// return -// } -// projectID := decodeProjectID(*(req.EncodedProjectID)) -// if projectID == 0 { -// responseWithError(w, http.StatusUnprocessableEntity, errors.New("ProjectID value is invalid")) -// return -// } -// p, err := pgconn.GetProject(uint32(projectID)) -// if err != nil { -// if postgres.IsNoRowsErr(err) { -// responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) -// } else { -// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging -// } -// return -// } -// sessionID, err := flaker.Compose(req.Timestamp) -// if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) -// return -// } -// userUUID := getUUID(req.UserUUID) -// country := geoIP.ExtractISOCodeFromHTTPRequest(r) -// expirationTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) +const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb -// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size -// if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) -// return -// } +func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { + type request struct { + Token string `json:"token"` + ProjectKey *string `json:"projectKey"` + TrackerVersion string `json:"trackerVersion"` + RevID string `json:"revID"` + UserUUID *string `json:"userUUID"` + //UserOS string `json"userOS"` //hardcoded 'MacOS' + UserOSVersion string `json:"userOSVersion"` + UserDevice string `json:"userDevice"` + Timestamp uint64 `json:"timestamp"` + // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac + // “performances”:{ + // “activeProcessorCount”:8, + // “isLowPowerModeEnabled”:0, + // “orientation”:0, + // “systemUptime”:585430, + // “batteryState”:0, + // “thermalState”:0, + // “batteryLevel”:0, + // “processorCount”:8, + // “physicalMemory”:17179869184 + // }, + } + type response struct { + Token string `json:"token"` + ImagesHashList []string `json:"imagesHashList"` + UserUUID string `json:"userUUID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` + SessionID string `json:"sessionID"` + } + startTime := time.Now() + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) + //defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } -// responseWithJSON(w, &response{ -// Token: tokenizer.Compose(sessionID, uint64(expirationTime.UnixNano()/1e6)), -// ImagesHashList: imagesHashList, -// UserUUID: userUUID, -// //TEMP: -// SESSION_ID: sessionID, -// }) -// producer.Produce(topicRaw, sessionID, messages.Encode(&messages.IOSSessionStart{ -// Timestamp: req.Timestamp, -// ProjectID: projectID, -// TrackerVersion: req.TrackerVersion, -// RevID: req.RevID, -// UserUUID: userUUID, -// UserOS: "MacOS", -// UserOSVersion: req.UserOSVersion, -// UserDevice: MapIOSDevice(req.UserDevice), -// UserDeviceType: GetIOSDeviceType(req.UserDevice), // string `json:"userDeviceType"` // From UserDevice; ENUM ? -// UserCountry: country, -// })) -// } + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + + p, err := pgconn.GetProjectByKey(*req.ProjectKey) + if err != nil { + if postgres.IsNoRowsErr(err) { + responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) + } else { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } + return + } + userUUID := getUUID(req.UserUUID) + tokenData, err := tokenizer.Parse(req.Token) + + if err != nil { // Starting the new one + dice := byte(rand.Intn(100)) // [0, 100) + if dice >= p.SampleRate { + responseWithError(w, http.StatusForbidden, errors.New("cancel")) + return + } + + ua := uaParser.ParseFromHTTPRequest(r) + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) + return + } + // TODO: if EXPIRED => send message for two sessions association + expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) + tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} + + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + + // The difference with web is mostly here: + producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&IOSSessionStart{ + Timestamp: req.Timestamp, + ProjectID: uint64(p.ProjectID), + TrackerVersion: req.TrackerVersion, + RevID: req.RevID, + UserUUID: userUUID, + UserOS: "IOS", + UserOSVersion: req.UserOSVersion, + UserDevice: MapIOSDevice(req.UserDevice), + UserDeviceType: GetIOSDeviceType(req.UserDevice), + UserCountry: country, + })) + } + + // imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size + // if err != nil { + // responseWithError(w, http.StatusInternalServerError, err) + // return + // } + + responseWithJSON(w, &response{ + // ImagesHashList: imagesHashList, + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), + BeaconSizeLimit: BEACON_SIZE_LIMIT, + }) +} -// func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) { -// sessionData, err := tokenizer.ParseFromHTTPRequest(r) -// if err != nil && err != token.EXPIRED { -// responseWithError(w, http.StatusUnauthorized, err) -// return -// } -// // Check timestamps here? -// pushMessages(w, r, sessionData.ID) -// } +func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil && err != token.EXPIRED { + responseWithError(w, http.StatusUnauthorized, err) + return + } + // Check timestamps here? + pushMessages(w, r, sessionData.ID) +} -// func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { -// r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) -// // defer r.Body.Close() -// err := r.ParseMultipartForm(1e5) // 100Kb -// if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { -// responseWithError(w, http.StatusUnsupportedMediaType, err) -// // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB -// } else if err != nil { -// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging -// } +func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { // Should accept expired token? + responseWithError(w, http.StatusUnauthorized, err) + return + } -// if len(r.MultipartForm.Value["projectID"]) == 0 { -// responseWithError(w, http.StatusBadRequest, errors.New("projectID parameter required")) // status for missing/wrong parameter? -// return -// } -// // encodedProjectID, err := strconv.ParseUint(r.MultipartForm.Value["projectID"][0], 10, 64) -// // projectID := decodeProjectID(encodedProjectID) -// // if projectID == 0 || err != nil { -// // responseWithError(w, http.StatusUnprocessableEntity, errors.New("projectID value is incorrect")) -// // return -// // } -// prefix := r.MultipartForm.Value["projectID"][0] + "/" //strconv.FormatUint(uint64(projectID), 10) + "/" + r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) + // defer r.Body.Close() + err = r.ParseMultipartForm(1e5) // 100Kb + if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { + responseWithError(w, http.StatusUnsupportedMediaType, err) + // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB + } else if err != nil { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } -// for _, fileHeaderList := range r.MultipartForm.File { -// for _, fileHeader := range fileHeaderList { -// file, err := fileHeader.Open() -// if err != nil { -// continue // TODO: send server error or accumulate successful files -// } -// key := prefix + fileHeader.Filename // TODO: Malicious image put: use jwt? -// go s3.Upload(file, key, "image/png", false) -// } -// } + if len(r.MultipartForm.Value["projectKey"]) == 0 { + responseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter? + return + } -// w.WriteHeader(http.StatusOK) -// } + prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/" + + for _, fileHeaderList := range r.MultipartForm.File { + for _, fileHeader := range fileHeaderList { + file, err := fileHeader.Open() + if err != nil { + continue // TODO: send server error or accumulate successful files + } + key := prefix + fileHeader.Filename + go s3.Upload(file, key, "image/png", false) + } + } + + w.WriteHeader(http.StatusOK) +} diff --git a/backend/services/http/main.go b/backend/services/http/main.go index dc2eb1720..7853dc624 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -100,34 +100,34 @@ func main() { default: w.WriteHeader(http.StatusMethodNotAllowed) } - // case "/v1/ios/start": - // switch r.Method { - // case http.MethodPost: - // startSessionHandlerIOS(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/append": - // switch r.Method { - // case http.MethodPost: - // pushMessagesHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/late": - // switch r.Method { - // case http.MethodPost: - // pushLateMessagesHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } - // case "/v1/ios/images": - // switch r.Method { - // case http.MethodPost: - // iosImagesUploadHandler(w, r) - // default: - // w.WriteHeader(http.StatusMethodNotAllowed) - // } + case "/v1/ios/start": + switch r.Method { + case http.MethodPost: + startSessionHandlerIOS(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/i": + switch r.Method { + case http.MethodPost: + pushMessagesHandler(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/late": + switch r.Method { + case http.MethodPost: + pushLateMessagesHandler(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + case "/v1/ios/images": + switch r.Method { + case http.MethodPost: + iosImagesUploadHandler(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } default: w.WriteHeader(http.StatusNotFound) } From 26a0aad2a3c19d523e7aea68be92c6213b094930 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Tue, 12 Oct 2021 19:17:17 +0530 Subject: [PATCH 071/218] fix(postgres): variable name --- scripts/helm/app/openreplay/templates/deployment.yaml | 4 ++-- scripts/helm/roles/openreplay/templates/chalice.yaml | 6 +++--- scripts/helm/roles/openreplay/templates/db.yaml | 2 +- scripts/helm/roles/openreplay/templates/http.yaml | 2 +- scripts/helm/roles/openreplay/templates/integrations.yaml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/helm/app/openreplay/templates/deployment.yaml b/scripts/helm/app/openreplay/templates/deployment.yaml index 187025b74..da9963fe7 100644 --- a/scripts/helm/app/openreplay/templates/deployment.yaml +++ b/scripts/helm/app/openreplay/templates/deployment.yaml @@ -50,9 +50,9 @@ spec: {{- if eq .Values.pvc.name "hostPath" }} volumeMounts: - mountPath: {{ .Values.pvc.mountPath }} - name: {{ .Values.pvc.name }} + name: datadir volumes: - - name: mydir + - name: datadir hostPath: # Ensure the file directory is created. path: {{ .Values.pvc.hostMountPath }} diff --git a/scripts/helm/roles/openreplay/templates/chalice.yaml b/scripts/helm/roles/openreplay/templates/chalice.yaml index 46de7488b..8b0596f86 100644 --- a/scripts/helm/roles/openreplay/templates/chalice.yaml +++ b/scripts/helm/roles/openreplay/templates/chalice.yaml @@ -17,7 +17,7 @@ env: jwt_secret: "{{ jwt_secret_key }}" pg_host: "{{ postgres_endpoint }}" pg_port: "{{ postgres_port }}" - pg_dbname: "{{ postgres_port }}" + pg_dbname: "{{ postgres_db_name }}" pg_user: "{{ postgres_db_user }}" pg_password: "{{ postgres_db_password }}" EMAIL_HOST: "{{ email_host }}" @@ -29,8 +29,8 @@ env: EMAIL_SSL_KEY: "{{ email_ssl_key }}" EMAIL_SSL_CERT: "{{ email_ssl_cert }}" EMAIL_FROM: "{{ email_from }}" - AWS_DEFAULT_REGION: "{{ aws_default_region }}" - sessions_region: "{{ aws_default_region }}" + AWS_DEFAULT_REGION: "{{ aws_region }}" + sessions_region: "{{ aws_region }}" {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/db.yaml b/scripts/helm/roles/openreplay/templates/db.yaml index 7456794c8..699843036 100644 --- a/scripts/helm/roles/openreplay/templates/db.yaml +++ b/scripts/helm/roles/openreplay/templates/db.yaml @@ -5,7 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" KAFKA_USE_SSL: "{{ kafka_ssl }}" diff --git a/scripts/helm/roles/openreplay/templates/http.yaml b/scripts/helm/roles/openreplay/templates/http.yaml index da7b0979f..1ec67bc0a 100644 --- a/scripts/helm/roles/openreplay/templates/http.yaml +++ b/scripts/helm/roles/openreplay/templates/http.yaml @@ -8,7 +8,7 @@ env: AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" AWS_REGION: "{{ aws_region }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" KAFKA_USE_SSL: "{{ kafka_ssl }}" diff --git a/scripts/helm/roles/openreplay/templates/integrations.yaml b/scripts/helm/roles/openreplay/templates/integrations.yaml index 9cc8f8b76..953b9d87f 100644 --- a/scripts/helm/roles/openreplay/templates/integrations.yaml +++ b/scripts/helm/roles/openreplay/templates/integrations.yaml @@ -5,7 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" # REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" From 91110489fbf4f15b95e8d86c4a874bb1bdfe65d7 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 21 Oct 2021 22:03:33 +0530 Subject: [PATCH 072/218] Squashed commit of the following: chore(env): injecting postgres db name with connection string chore(install): fail if postgresql migration failed fix(variable): templating for chalice fix(postgres): variable name chore(vars): overriding variables for - s3 - postgres - aws credentials --- scripts/helm/roles/openreplay/tasks/install-dbs.yaml | 2 +- scripts/helm/roles/openreplay/templates/alerts.yaml | 2 +- scripts/helm/roles/openreplay/templates/assets.yaml | 1 + scripts/helm/roles/openreplay/templates/chalice.yaml | 11 ++++++++++- scripts/helm/roles/openreplay/templates/db.yaml | 2 +- scripts/helm/roles/openreplay/templates/http.yaml | 2 +- .../helm/roles/openreplay/templates/integrations.yaml | 2 +- scripts/helm/roles/openreplay/templates/storage.yaml | 2 ++ scripts/helm/vars.yaml | 3 +++ 9 files changed, 21 insertions(+), 6 deletions(-) diff --git a/scripts/helm/roles/openreplay/tasks/install-dbs.yaml b/scripts/helm/roles/openreplay/tasks/install-dbs.yaml index 443e7a14f..ef7c1ff2f 100644 --- a/scripts/helm/roles/openreplay/tasks/install-dbs.yaml +++ b/scripts/helm/roles/openreplay/tasks/install-dbs.yaml @@ -21,7 +21,7 @@ file="{{ item|basename }}" kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "rm -rf /tmp/$file" kubectl cp -n db $file postgresql-postgresql-0:/tmp/ - kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log + kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -v ON_ERROR_STOP=1 -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log args: chdir: db/init_dbs/postgresql with_fileglob: diff --git a/scripts/helm/roles/openreplay/templates/alerts.yaml b/scripts/helm/roles/openreplay/templates/alerts.yaml index b28a73a53..b2a91832b 100644 --- a/scripts/helm/roles/openreplay/templates/alerts.yaml +++ b/scripts/helm/roles/openreplay/templates/alerts.yaml @@ -5,7 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" - POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}" + POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}/{{ postgres_db_name }}" {% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] diff --git a/scripts/helm/roles/openreplay/templates/assets.yaml b/scripts/helm/roles/openreplay/templates/assets.yaml index 41f898260..740617166 100644 --- a/scripts/helm/roles/openreplay/templates/assets.yaml +++ b/scripts/helm/roles/openreplay/templates/assets.yaml @@ -6,6 +6,7 @@ image: env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" + S3_BUCKET_ASSETS: "{{ assets_bucket }}" LICENSE_KEY: "{{ enterprise_edition_license }}" AWS_ENDPOINT: "{{ s3_endpoint }}" AWS_REGION: "{{ aws_region }}" diff --git a/scripts/helm/roles/openreplay/templates/chalice.yaml b/scripts/helm/roles/openreplay/templates/chalice.yaml index 8b0596f86..68c1cf9ff 100644 --- a/scripts/helm/roles/openreplay/templates/chalice.yaml +++ b/scripts/helm/roles/openreplay/templates/chalice.yaml @@ -12,7 +12,6 @@ env: S3_SECRET: "{{ minio_secret_key }}" sourcemaps_bucket_key: "{{ minio_access_key }}" sourcemaps_bucket_secret: "{{ minio_secret_key }}" - S3_HOST: "https://{{ domain_name }}" SITE_URL: "https://{{ domain_name }}" jwt_secret: "{{ jwt_secret_key }}" pg_host: "{{ postgres_endpoint }}" @@ -31,6 +30,16 @@ env: EMAIL_FROM: "{{ email_from }}" AWS_DEFAULT_REGION: "{{ aws_region }}" sessions_region: "{{ aws_region }}" + sessions_bucket: "{{ recordings_bucket }}" + sourcemaps_bucket: "{{ sourcemaps_bucket }}" + js_cache_bucket: "{{ assets_bucket }}" + # In case of minio, the instance is running inside kuberntes, + # which is accessible via nginx ingress. +{% if s3_endpoint == "http://minio.db.svc.cluster.local:9000" %} + S3_HOST: "https://{{ domain_name }}" +{% else %} + S3_HOST: "{{ s3_endpoint }}" +{% endif %} {% if env is defined and env.chalice is defined and env.chalice%} {{ env.chalice | to_nice_yaml | trim | indent(2) }} {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/db.yaml b/scripts/helm/roles/openreplay/templates/db.yaml index 699843036..bc128593d 100644 --- a/scripts/helm/roles/openreplay/templates/db.yaml +++ b/scripts/helm/roles/openreplay/templates/db.yaml @@ -5,7 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" KAFKA_USE_SSL: "{{ kafka_ssl }}" diff --git a/scripts/helm/roles/openreplay/templates/http.yaml b/scripts/helm/roles/openreplay/templates/http.yaml index 1ec67bc0a..a6f9d86b4 100644 --- a/scripts/helm/roles/openreplay/templates/http.yaml +++ b/scripts/helm/roles/openreplay/templates/http.yaml @@ -8,7 +8,7 @@ env: AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" LICENSE_KEY: "{{ enterprise_edition_license }}" AWS_REGION: "{{ aws_region }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" KAFKA_USE_SSL: "{{ kafka_ssl }}" diff --git a/scripts/helm/roles/openreplay/templates/integrations.yaml b/scripts/helm/roles/openreplay/templates/integrations.yaml index 953b9d87f..f7ea17428 100644 --- a/scripts/helm/roles/openreplay/templates/integrations.yaml +++ b/scripts/helm/roles/openreplay/templates/integrations.yaml @@ -5,7 +5,7 @@ image: {% endif %} env: LICENSE_KEY: "{{ enterprise_edition_license }}" - POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}" + POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}" # REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" diff --git a/scripts/helm/roles/openreplay/templates/storage.yaml b/scripts/helm/roles/openreplay/templates/storage.yaml index 6a70f3a4c..7a4e080d5 100644 --- a/scripts/helm/roles/openreplay/templates/storage.yaml +++ b/scripts/helm/roles/openreplay/templates/storage.yaml @@ -10,6 +10,8 @@ env: AWS_ENDPOINT: "{{ s3_endpoint }}" AWS_REGION_WEB: "{{ aws_region }}" AWS_REGION_IOS: "{{ aws_region }}" + S3_BUCKET_WEB: "{{ recordings_bucket }}" + S3_BUCKET_IOS: "{{ recordings_bucket }}" REDIS_STRING: "{{ redis_endpoint }}" KAFKA_SERVERS: "{{ kafka_endpoint }}" KAFKA_USE_SSL: "{{ kafka_ssl }}" diff --git a/scripts/helm/vars.yaml b/scripts/helm/vars.yaml index ca0037b27..098c1ad8f 100644 --- a/scripts/helm/vars.yaml +++ b/scripts/helm/vars.yaml @@ -90,6 +90,9 @@ db_resource_override: ## Sane defaults s3_endpoint: "http://minio.db.svc.cluster.local:9000" aws_region: "us-east-1" +assets_bucket: sessions-assets +recordings_bucket: mobs +sourcemaps_bucket: sourcemaps kafka_endpoint: kafka.db.svc.cluster.local:9042 kafka_ssl: false postgres_endpoint: postgresql.db.svc.cluster.local From 4821391af9475ecee658a63538401988de1bd891 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 22 Oct 2021 15:58:16 +0200 Subject: [PATCH 073/218] dev (backend-http): request log --- backend/services/http/main.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 7853dc624..cac5a2842 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -76,6 +76,9 @@ func main() { return } + log.Printf("Request: %v - %v ", r.Method, r.URL.Path) + + switch r.URL.Path { case "/": w.WriteHeader(http.StatusOK) From 168a840e0866a3a924860beb74205f9e14e726ab Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 22 Oct 2021 17:38:41 +0200 Subject: [PATCH 074/218] feat(api): accelerated session-events search when meta is present --- api/chalicelib/core/sessions.py | 163 ++++++++++++++++++-------------- 1 file changed, 93 insertions(+), 70 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 340733d30..d76b188b0 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -162,6 +162,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False "projectId": project_id, "userId": user_id} with pg_client.PostgresClient() as cur: + ss_constraints = [] extra_constraints = [ cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}), cur.mogrify("s.duration IS NOT NULL", {}) @@ -173,7 +174,96 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id})) events_query_part = "" + if "filters" in data: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + for f in data["filters"]: + if not isinstance(f.get("value"), list): + f["value"] = [f.get("value")] + if len(f["value"]) == 0 or f["value"][0] is None: + continue + filter_type = f["type"].upper() + f["value"] = __get_sql_value_multiple(f["value"]) + if filter_type == sessions_metas.meta_type.USERBROWSER: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_browser {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_os {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_device {op} %(value)s', {"value": f["value"]})) + + elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]: + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]})) + ss_constraints.append(cur.mogrify(f'ms.user_country {op} %(value)s', {"value": f["value"]})) + elif filter_type == "duration".upper(): + if len(f["value"]) > 0 and f["value"][0] is not None: + extra_constraints.append( + cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) + ss_constraints.append( + cur.mogrify("ms.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) + if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0: + extra_constraints.append( + cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) + ss_constraints.append( + cur.mogrify("ms.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) + elif filter_type == sessions_metas.meta_type.REFERRER: + # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" + extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + op = __get_sql_operator_multiple(f["operator"]) + extra_constraints.append( + cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]})) + elif filter_type == events.event_type.METADATA.ui_type: + op = __get_sql_operator(f["operator"]) + if f.get("key") in meta_keys.keys(): + extra_constraints.append( + cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)})) + ss_constraints.append( + cur.mogrify(f"ms.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)})) + elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.user_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.user_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID, + sessions_metas.meta_type.USERANONYMOUSID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.user_anonymous_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.user_anonymous_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]: + op = __get_sql_operator(f["operator"]) + extra_constraints.append( + cur.mogrify(f"s.rev_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + ss_constraints.append( + cur.mogrify(f"ms.rev_id {op} %(value)s", + {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) + ) + + # --------------------------------------------------------------------------- if len(data.get("events", [])) > 0: + ss_constraints = [s.decode('UTF-8') for s in ss_constraints] events_query_from = [] event_index = 0 @@ -279,7 +369,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False FROM sessions WHERE EXISTS(SELECT session_id FROM {event_from} - WHERE {" AND ".join(event_where)} + WHERE {" AND ".join(event_where + ss_constraints)} AND sessions.session_id=ms.session_id) IS FALSE AND project_id = %(projectId)s AND start_ts >= %(startDate)s @@ -293,14 +383,14 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False event_0.session_id, event_{event_index - 1}.timestamp AS timestamp, {event_index} AS funnel_step - WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE + WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where + ss_constraints)}) IS FALSE ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ """, {**generic_args, **event_args}).decode('UTF-8')) else: events_query_from.append(cur.mogrify(f"""\ (SELECT main.session_id, MIN(timestamp) AS timestamp,{event_index} AS funnel_step FROM {event_from} - WHERE {" AND ".join(event_where)} + WHERE {" AND ".join(event_where + ss_constraints)} GROUP BY 1 ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ """, {**generic_args, **event_args}).decode('UTF-8')) @@ -316,73 +406,6 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False else: data["events"] = [] - # --------------------------------------------------------------------------- - if "filters" in data: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - for f in data["filters"]: - if not isinstance(f.get("value"), list): - f["value"] = [f.get("value")] - if len(f["value"]) == 0 or f["value"][0] is None: - continue - filter_type = f["type"].upper() - f["value"] = __get_sql_value_multiple(f["value"]) - if filter_type == sessions_metas.meta_type.USERBROWSER: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]})) - elif filter_type == "duration".upper(): - if len(f["value"]) > 0 and f["value"][0] is not None: - extra_constraints.append( - cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) - if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0: - extra_constraints.append( - cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) - elif filter_type == sessions_metas.meta_type.REFERRER: - # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]})) - elif filter_type == events.event_type.METADATA.ui_type: - op = __get_sql_operator(f["operator"]) - if f.get("key") in meta_keys.keys(): - extra_constraints.append( - cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID, - sessions_metas.meta_type.USERANONYMOUSID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_anonymous_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.rev_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - # --------------------------------------------------------------------------- if data.get("startDate") is not None: From 45091edd1425e4c6431c215559888c0c04b056e4 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 22 Oct 2021 19:06:07 +0200 Subject: [PATCH 075/218] feat(backend-http): Content-Type header for json responses --- backend/services/http/response.go | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/services/http/response.go b/backend/services/http/response.go index ffd22875e..11d9b328d 100644 --- a/backend/services/http/response.go +++ b/backend/services/http/response.go @@ -11,6 +11,7 @@ func responseWithJSON(w http.ResponseWriter, res interface{}) { if err != nil { log.Println(err) } + w.Header().Set("Content-Type", "application/json") w.Write(body) } From f497978f5e1868b80effa80dd6aeaed762f553a5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 22 Oct 2021 23:49:01 +0200 Subject: [PATCH 076/218] feat(api): accelerated sessions multievent search fix --- api/chalicelib/core/sessions.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index d76b188b0..884e69c57 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -358,7 +358,8 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False else: continue - + if event_index == 0: + event_where += ss_constraints if is_not: if event_index == 0: events_query_from.append(cur.mogrify(f"""\ @@ -369,7 +370,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False FROM sessions WHERE EXISTS(SELECT session_id FROM {event_from} - WHERE {" AND ".join(event_where + ss_constraints)} + WHERE {" AND ".join(event_where)} AND sessions.session_id=ms.session_id) IS FALSE AND project_id = %(projectId)s AND start_ts >= %(startDate)s @@ -383,14 +384,14 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False event_0.session_id, event_{event_index - 1}.timestamp AS timestamp, {event_index} AS funnel_step - WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where + ss_constraints)}) IS FALSE + WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ """, {**generic_args, **event_args}).decode('UTF-8')) else: events_query_from.append(cur.mogrify(f"""\ (SELECT main.session_id, MIN(timestamp) AS timestamp,{event_index} AS funnel_step FROM {event_from} - WHERE {" AND ".join(event_where + ss_constraints)} + WHERE {" AND ".join(event_where)} GROUP BY 1 ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ """, {**generic_args, **event_args}).decode('UTF-8')) From 33719cbc5ae09fe32941986bb3158c95b0d3f200 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 25 Oct 2021 14:25:22 +0200 Subject: [PATCH 077/218] feat(db): conditional DB creation --- .../db/init_dbs/postgresql/init_schema.sql | 1683 +++++++++-------- 1 file changed, 859 insertions(+), 824 deletions(-) diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index a2b0c72bd..8458199cb 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1,8 +1,8 @@ BEGIN; --- --- public.sql --- +-- Schemas and functions definitions: +CREATE SCHEMA IF NOT EXISTS events_common; +CREATE SCHEMA IF NOT EXISTS events; -CREATE EXTENSION IF NOT EXISTS pg_trgm; -CREATE EXTENSION IF NOT EXISTS pgcrypto; -- --- accounts.sql --- CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS @@ -23,799 +23,7 @@ begin end; $$ LANGUAGE plpgsql; - - -CREATE TABLE public.tenants -( - tenant_id integer NOT NULL DEFAULT 1, - user_id text NOT NULL DEFAULT generate_api_key(20), - name text NOT NULL, - api_key text NOT NULL DEFAULT generate_api_key(20), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - edition varchar(3) NOT NULL, - version_number text NOT NULL, - license text NULL, - opt_out bool NOT NULL DEFAULT FALSE, - t_projects integer NOT NULL DEFAULT 1, - t_sessions bigint NOT NULL DEFAULT 0, - t_users integer NOT NULL DEFAULT 1, - t_integrations integer NOT NULL DEFAULT 0, - CONSTRAINT onerow_uni CHECK (tenant_id = 1) -); - -CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); - -CREATE TABLE users -( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, - api_key text UNIQUE default generate_api_key(20) not null, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT '{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE -); - -CREATE TABLE basic_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, - changed_at timestamp, - UNIQUE (user_id) -); - -CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); -CREATE TABLE oauth_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - provider oauth_provider NOT NULL, - provider_user_id text NOT NULL, - token text NOT NULL, - UNIQUE (user_id, provider) -); - --- --- projects.sql --- - -CREATE TABLE projects -( - project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), - name text NOT NULL, - active boolean NOT NULL, - sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - max_session_duration integer NOT NULL DEFAULT 7200000, - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL, - gdpr jsonb NOT NULL DEFAULT '{ - "maskEmails": true, - "sampleRate": 33, - "maskNumbers": false, - "defaultInputMode": "plain" - }'::jsonb -- ?????? -); - -CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS -$$ -BEGIN - PERFORM pg_notify('project', row_to_json(NEW)::text); - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update - AFTER INSERT OR UPDATE - ON projects - FOR EACH ROW -EXECUTE PROCEDURE notify_project(); - --- --- alerts.sql --- - -CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); - -CREATE TABLE alerts -( - alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - name text NOT NULL, - description text NULL DEFAULT NULL, - active boolean NOT NULL DEFAULT TRUE, - detection_method alert_detection_method NOT NULL, - query jsonb NOT NULL, - deleted_at timestamp NULL DEFAULT NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{ - "renotifyInterval": 1440 - }'::jsonb -); - - -CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS -$$ -DECLARE - clone jsonb; -BEGIN - clone = to_jsonb(NEW); - clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); - IF NEW.deleted_at NOTNULL THEN - clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); - END IF; - PERFORM pg_notify('alert', clone::text); - RETURN NEW; -END ; -$$ LANGUAGE plpgsql; - - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON alerts - FOR EACH ROW -EXECUTE PROCEDURE notify_alert(); - --- --- webhooks.sql --- - -create type webhook_type as enum ('webhook', 'slack', 'email'); - -create table webhooks -( - webhook_id integer generated by default as identity - constraint webhooks_pkey - primary key, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - auth_header text, - type webhook_type not null, - index integer default 0 not null, - name varchar(100) -); - --- --- notifications.sql --- - -CREATE TABLE notifications -( - notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id integer REFERENCES users (user_id) ON DELETE CASCADE, - title text NOT NULL, - description text NOT NULL, - button_text varchar(80) NULL, - button_url text NULL, - image_url text NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{}'::jsonb -); - -CREATE INDEX notifications_user_id_index ON notifications (user_id); -CREATE INDEX notifications_created_at_index ON notifications (created_at DESC); -CREATE INDEX notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); - -CREATE TABLE user_viewed_notifications -( - user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, - notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, - constraint user_viewed_notifications_pkey primary key (user_id, notification_id) -); - --- --- funnels.sql --- - -CREATE TABLE funnels -( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False -); - -CREATE INDEX ON public.funnels (user_id, is_public); - --- --- announcements.sql --- - -create type announcement_type as enum ('notification', 'alert'); - -create table announcements -( - announcement_id serial not null - constraint announcements_pk - primary key, - title text not null, - description text not null, - button_text varchar(30), - button_url text, - image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null -); - --- --- integrations.sql --- - -CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); -CREATE TABLE integrations -( - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - provider integration_provider NOT NULL, - options jsonb NOT NULL, - request_data jsonb NOT NULL DEFAULT '{}'::jsonb, - PRIMARY KEY (project_id, provider) -); - -CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS -$$ -BEGIN - IF NEW IS NULL THEN - PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); - ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN - PERFORM pg_notify('integration', row_to_json(NEW)::text); - END IF; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON integrations - FOR EACH ROW -EXECUTE PROCEDURE notify_integration(); - - -create table jira_cloud -( - user_id integer not null - constraint jira_cloud_pk - primary key - constraint jira_cloud_users_fkey - references users - on delete cascade, - username text not null, - token text not null, - url text -); - --- --- issues.sql --- - -CREATE TYPE issue_type AS ENUM ( - 'click_rage', - 'dead_click', - 'excessive_scrolling', - 'bad_request', - 'missing_resource', - 'memory', - 'cpu', - 'slow_resource', - 'slow_page_load', - 'crash', - 'ml_cpu', - 'ml_memory', - 'ml_dead_click', - 'ml_click_rage', - 'ml_mouse_thrashing', - 'ml_excessive_scrolling', - 'ml_slow_resources', - 'custom', - 'js_exception' - ); - -CREATE TABLE issues -( - issue_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - type issue_type NOT NULL, - context_string text NOT NULL, - context jsonb DEFAULT NULL -); -CREATE INDEX ON issues (issue_id, type); -CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); -CREATE INDEX issues_project_id_idx ON issues (project_id); - --- --- errors.sql --- - -CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); -CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); -CREATE TABLE errors -( - error_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - source error_source NOT NULL, - name text DEFAULT NULL, - message text NOT NULL, - payload jsonb NOT NULL, - status error_status NOT NULL DEFAULT 'unresolved', - parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, - stacktrace jsonb, --to save the stacktrace and not query S3 another time - stacktrace_parsed_at timestamp -); -CREATE INDEX ON errors (project_id, source); -CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); -CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); -CREATE INDEX errors_project_id_idx ON public.errors (project_id); -CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); -CREATE INDEX errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception'; -CREATE INDEX errors_project_id_error_id_idx ON public.errors (project_id, error_id); -CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception'; - -CREATE TABLE user_favorite_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); - -CREATE TABLE user_viewed_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); -CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); -CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - - --- --- sessions.sql --- -CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); -CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); -CREATE TYPE platform AS ENUM ('web','ios','android'); - -CREATE TABLE sessions -( - session_id bigint PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - tracker_version text NOT NULL, - start_ts bigint NOT NULL, - duration integer NULL, - rev_id text DEFAULT NULL, - platform platform NOT NULL DEFAULT 'web', - is_snippet boolean NOT NULL DEFAULT FALSE, - user_id text DEFAULT NULL, - user_anonymous_id text DEFAULT NULL, - user_uuid uuid NOT NULL, - user_agent text DEFAULT NULL, - user_os text NOT NULL, - user_os_version text DEFAULT NULL, - user_browser text DEFAULT NULL, - user_browser_version text DEFAULT NULL, - user_device text NOT NULL, - user_device_type device_type NOT NULL, - user_device_memory_size integer DEFAULT NULL, - user_device_heap_size bigint DEFAULT NULL, - user_country country NOT NULL, - pages_count integer NOT NULL DEFAULT 0, - events_count integer NOT NULL DEFAULT 0, - errors_count integer NOT NULL DEFAULT 0, - watchdogs_score bigint NOT NULL DEFAULT 0, - issue_score bigint NOT NULL DEFAULT 0, - issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL --- , --- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL -); -CREATE INDEX ON sessions (project_id, start_ts); -CREATE INDEX ON sessions (project_id, user_id); -CREATE INDEX ON sessions (project_id, user_anonymous_id); -CREATE INDEX ON sessions (project_id, user_device); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX ON sessions (project_id, metadata_1); -CREATE INDEX ON sessions (project_id, metadata_2); -CREATE INDEX ON sessions (project_id, metadata_3); -CREATE INDEX ON sessions (project_id, metadata_4); -CREATE INDEX ON sessions (project_id, metadata_5); -CREATE INDEX ON sessions (project_id, metadata_6); -CREATE INDEX ON sessions (project_id, metadata_7); -CREATE INDEX ON sessions (project_id, metadata_8); -CREATE INDEX ON sessions (project_id, metadata_9); -CREATE INDEX ON sessions (project_id, metadata_10); --- CREATE INDEX ON sessions (rehydration_id); -CREATE INDEX ON sessions (project_id, watchdogs_score DESC); -CREATE INDEX platform_idx ON public.sessions (platform); - -CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); -CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); -CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); -CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); -CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); -CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); -CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); -CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); -CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); -CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); -CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); -CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); -CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); -CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); -CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); -CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; -CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; -CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0; -CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; -CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; -CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; - -ALTER TABLE public.sessions - ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_browser ISNULL)); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_agent_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_agent ISNULL)); - - - -CREATE TABLE user_viewed_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - -CREATE TABLE user_favorite_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - - --- --- assignments.sql --- - -create table assigned_sessions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - issue_id text NOT NULL, - provider oauth_provider NOT NULL, - created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default '{}'::jsonb NOT NULL -); -CREATE INDEX ON assigned_sessions (session_id); - --- --- events_common.sql --- - -CREATE SCHEMA events_common; - -CREATE TYPE events_common.custom_level AS ENUM ('info','error'); - -CREATE TABLE events_common.customs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - name text NOT NULL, - payload jsonb NOT NULL, - level events_common.custom_level NOT NULL DEFAULT 'info', - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.customs (name); -CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); -CREATE INDEX ON events_common.customs (timestamp); - - -CREATE TABLE events_common.issues -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, - payload jsonb DEFAULT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); - - -CREATE TABLE events_common.requests -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - url text NOT NULL, - duration integer NOT NULL, - success boolean NOT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.requests (url); -CREATE INDEX ON events_common.requests (duration); -CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); -CREATE INDEX ON events_common.requests (timestamp); -CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); - -- --- events.sql --- -CREATE SCHEMA events; - -CREATE TABLE events.pages -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - host text NOT NULL, - path text NOT NULL, - base_path text NOT NULL, - referrer text DEFAULT NULL, - base_referrer text DEFAULT NULL, - dom_building_time integer DEFAULT NULL, - dom_content_loaded_time integer DEFAULT NULL, - load_time integer DEFAULT NULL, - first_paint_time integer DEFAULT NULL, - first_contentful_paint_time integer DEFAULT NULL, - speed_index integer DEFAULT NULL, - visually_complete integer DEFAULT NULL, - time_to_interactive integer DEFAULT NULL, - response_time bigint DEFAULT NULL, - response_end bigint DEFAULT NULL, - ttfb integer DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.pages (session_id); -CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); -CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); -CREATE INDEX ON events.pages (timestamp); -CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); -CREATE INDEX pages_base_path_idx ON events.pages (base_path); -CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); -CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); -CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, length(base_referrer) - (CASE - WHEN base_referrer LIKE 'http://%' - THEN 7 - WHEN base_referrer LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); -CREATE INDEX ON events.pages (response_time); -CREATE INDEX ON events.pages (response_end); -CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); -CREATE INDEX pages_path_idx ON events.pages (path); -CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; -CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; -CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; -CREATE INDEX pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0; -CREATE INDEX pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0; -CREATE INDEX pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0; -CREATE INDEX pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0; -CREATE INDEX pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0; -CREATE INDEX pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL; -CREATE INDEX pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL; -CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR first_paint_time > 0 OR - dom_content_loaded_time > 0 OR ttfb > 0 OR - time_to_interactive > 0; -CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; -CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; -CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); -CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; - - -CREATE TABLE events.clicks -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, - selector text DEFAULT '' NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.clicks (session_id); -CREATE INDEX ON events.clicks (label); -CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); -CREATE INDEX ON events.clicks (timestamp); -CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); -CREATE INDEX clicks_url_idx ON events.clicks (url); -CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); -CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); - - -CREATE TABLE events.inputs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.inputs (session_id); -CREATE INDEX ON events.inputs (label, value); -CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); -CREATE INDEX inputs_label_idx ON events.inputs (label); -CREATE INDEX ON events.inputs (timestamp); -CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); - -CREATE TABLE events.errors -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.errors (session_id); -CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id); -CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp); -CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id); -CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); - -CREATE TABLE events.graphql -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.graphql (name); -CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.graphql (timestamp); - -CREATE TABLE events.state_actions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.state_actions (name); -CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.state_actions (timestamp); - -CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); -CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); -CREATE TABLE events.resources -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - duration bigint NULL, - type events.resource_type NOT NULL, - url text NOT NULL, - url_host text NOT NULL, - url_hostpath text NOT NULL, - success boolean NOT NULL, - status smallint NULL, - method events.resource_method NULL, - ttfb bigint NULL, - header_size bigint NULL, - encoded_body_size integer NULL, - decoded_body_size integer NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.resources (session_id); -CREATE INDEX ON events.resources (status); -CREATE INDEX ON events.resources (type); -CREATE INDEX ON events.resources (duration) WHERE duration > 0; -CREATE INDEX ON events.resources (url_host); - -CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); -CREATE INDEX resources_url_idx ON events.resources (url); -CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); -CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); -CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL; -CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp); -CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type); -CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch'; -CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE; -CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script'); -CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img'; -CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id); - -CREATE TABLE events.performance -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - message_id bigint NOT NULL, - min_fps smallint NOT NULL, - avg_fps smallint NOT NULL, - max_fps smallint NOT NULL, - min_cpu smallint NOT NULL, - avg_cpu smallint NOT NULL, - max_cpu smallint NOT NULL, - min_total_js_heap_size bigint NOT NULL, - avg_total_js_heap_size bigint NOT NULL, - max_total_js_heap_size bigint NOT NULL, - min_used_js_heap_size bigint NOT NULL, - avg_used_js_heap_size bigint NOT NULL, - max_used_js_heap_size bigint NOT NULL, - PRIMARY KEY (session_id, message_id) -); - CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS $$ @@ -840,39 +48,866 @@ BEGIN END; $$ LANGUAGE plpgsql IMMUTABLE; +-- --- integrations.sql --- + +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +-- --- alerts.sql --- + +CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS +$$ +DECLARE + clone jsonb; +BEGIN + clone = to_jsonb(NEW); + clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); + IF NEW.deleted_at NOTNULL THEN + clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); + END IF; + PERFORM pg_notify('alert', clone::text); + RETURN NEW; +END ; +$$ LANGUAGE plpgsql; + +-- --- projects.sql --- + +CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS +$$ +BEGIN + PERFORM pg_notify('project', row_to_json(NEW)::text); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- All tables and types: + +DO +$$ + BEGIN + IF EXISTS(SELECT + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = 'tenants') THEN + raise notice 'DB exists, skipping creation query'; + ELSE + raise notice 'Creating DB'; + + -- --- public.sql --- + + CREATE EXTENSION IF NOT EXISTS pg_trgm; + CREATE EXTENSION IF NOT EXISTS pgcrypto; +-- --- accounts.sql --- + + CREATE TABLE IF NOT EXISTS public.tenants + ( + tenant_id integer NOT NULL DEFAULT 1, + user_id text NOT NULL DEFAULT generate_api_key(20), + name text NOT NULL, + api_key text NOT NULL DEFAULT generate_api_key(20), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + edition varchar(3) NOT NULL, + version_number text NOT NULL, + license text NULL, + opt_out bool NOT NULL DEFAULT FALSE, + t_projects integer NOT NULL DEFAULT 1, + t_sessions bigint NOT NULL DEFAULT 0, + t_users integer NOT NULL DEFAULT 1, + t_integrations integer NOT NULL DEFAULT 0, + CONSTRAINT onerow_uni CHECK (tenant_id = 1) + ); + + CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); + + CREATE TABLE users + ( + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + appearance jsonb NOT NULL default '{ + "role": "dev", + "dashboard": { + "cpu": true, + "fps": false, + "avgCpu": true, + "avgFps": true, + "errors": true, + "crashes": true, + "overview": true, + "sessions": true, + "topMetrics": true, + "callsErrors": true, + "pageMetrics": true, + "performance": true, + "timeToRender": false, + "userActivity": false, + "avgFirstPaint": false, + "countSessions": true, + "errorsPerType": true, + "slowestImages": true, + "speedLocation": true, + "slowestDomains": true, + "avgPageLoadTime": true, + "avgTillFirstBit": false, + "avgTimeToRender": true, + "avgVisitedPages": false, + "avgImageLoadTime": true, + "busiestTimeOfDay": true, + "errorsPerDomains": true, + "missingResources": true, + "resourcesByParty": true, + "sessionsFeedback": false, + "slowestResources": true, + "avgUsedJsHeapSize": true, + "domainsErrors_4xx": true, + "domainsErrors_5xx": true, + "memoryConsumption": true, + "pagesDomBuildtime": false, + "pagesResponseTime": true, + "avgRequestLoadTime": true, + "avgSessionDuration": false, + "sessionsPerBrowser": false, + "applicationActivity": true, + "sessionsFrustration": false, + "avgPagesDomBuildtime": true, + "avgPagesResponseTime": false, + "avgTimeToInteractive": true, + "resourcesCountByType": true, + "resourcesLoadingTime": true, + "avgDomContentLoadStart": true, + "avgFirstContentfulPixel": false, + "resourceTypeVsResponseEnd": true, + "impactedSessionsByJsErrors": true, + "impactedSessionsBySlowPages": true, + "resourcesVsVisuallyComplete": true, + "pagesResponseTimeDistribution": true + }, + "sessionsLive": false, + "sessionsDevtools": true + }'::jsonb, + api_key text UNIQUE default generate_api_key(20) not null, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT '{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE + ); + + CREATE TABLE basic_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + password text DEFAULT NULL, + generated_password boolean NOT NULL DEFAULT false, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + changed_at timestamp, + UNIQUE (user_id) + ); + + CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); + CREATE TABLE oauth_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + provider oauth_provider NOT NULL, + provider_user_id text NOT NULL, + token text NOT NULL, + UNIQUE (user_id, provider) + ); + +-- --- projects.sql --- + + CREATE TABLE projects + ( + project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), + name text NOT NULL, + active boolean NOT NULL, + sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + max_session_duration integer NOT NULL DEFAULT 7200000, + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL, + gdpr jsonb NOT NULL DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "plain" + }'::jsonb -- ?????? + ); + + CREATE TRIGGER on_insert_or_update + AFTER INSERT OR UPDATE + ON projects + FOR EACH ROW + EXECUTE PROCEDURE notify_project(); + +-- --- alerts.sql --- + + CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); + + CREATE TABLE alerts + ( + alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + description text NULL DEFAULT NULL, + active boolean NOT NULL DEFAULT TRUE, + detection_method alert_detection_method NOT NULL, + query jsonb NOT NULL, + deleted_at timestamp NULL DEFAULT NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{ + "renotifyInterval": 1440 + }'::jsonb + ); + + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON alerts + FOR EACH ROW + EXECUTE PROCEDURE notify_alert(); + +-- --- webhooks.sql --- + + create type webhook_type as enum ('webhook', 'slack', 'email'); + + create table webhooks + ( + webhook_id integer generated by default as identity + constraint webhooks_pkey + primary key, + endpoint text not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + auth_header text, + type webhook_type not null, + index integer default 0 not null, + name varchar(100) + ); + +-- --- notifications.sql --- + + CREATE TABLE notifications + ( + notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + user_id integer REFERENCES users (user_id) ON DELETE CASCADE, + title text NOT NULL, + description text NOT NULL, + button_text varchar(80) NULL, + button_url text NULL, + image_url text NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{}'::jsonb + ); + + CREATE INDEX notifications_user_id_index ON notifications (user_id); + CREATE INDEX notifications_created_at_index ON notifications (created_at DESC); + CREATE INDEX notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); + + CREATE TABLE user_viewed_notifications + ( + user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, + notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, + constraint user_viewed_notifications_pkey primary key (user_id, notification_id) + ); + +-- --- funnels.sql --- + + CREATE TABLE funnels + ( + funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + + CREATE INDEX ON public.funnels (user_id, is_public); + +-- --- announcements.sql --- + + create type announcement_type as enum ('notification', 'alert'); + + create table announcements + ( + announcement_id serial not null + constraint announcements_pk + primary key, + title text not null, + description text not null, + button_text varchar(30), + button_url text, + image_url text, + created_at timestamp default timezone('utc'::text, now()) not null, + type announcement_type default 'notification'::announcement_type not null + ); + +-- --- integrations.sql --- + + CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); + CREATE TABLE integrations + ( + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + provider integration_provider NOT NULL, + options jsonb NOT NULL, + request_data jsonb NOT NULL DEFAULT '{}'::jsonb, + PRIMARY KEY (project_id, provider) + ); + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON integrations + FOR EACH ROW + EXECUTE PROCEDURE notify_integration(); + + + create table jira_cloud + ( + user_id integer not null + constraint jira_cloud_pk + primary key + constraint jira_cloud_users_fkey + references users + on delete cascade, + username text not null, + token text not null, + url text + ); + +-- --- issues.sql --- + + CREATE TYPE issue_type AS ENUM ( + 'click_rage', + 'dead_click', + 'excessive_scrolling', + 'bad_request', + 'missing_resource', + 'memory', + 'cpu', + 'slow_resource', + 'slow_page_load', + 'crash', + 'ml_cpu', + 'ml_memory', + 'ml_dead_click', + 'ml_click_rage', + 'ml_mouse_thrashing', + 'ml_excessive_scrolling', + 'ml_slow_resources', + 'custom', + 'js_exception' + ); + + CREATE TABLE issues + ( + issue_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + type issue_type NOT NULL, + context_string text NOT NULL, + context jsonb DEFAULT NULL + ); + CREATE INDEX ON issues (issue_id, type); + CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); + CREATE INDEX issues_project_id_idx ON issues (project_id); + +-- --- errors.sql --- + + CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); + CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); + CREATE TABLE errors + ( + error_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + source error_source NOT NULL, + name text DEFAULT NULL, + message text NOT NULL, + payload jsonb NOT NULL, + status error_status NOT NULL DEFAULT 'unresolved', + parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, + stacktrace jsonb, --to save the stacktrace and not query S3 another time + stacktrace_parsed_at timestamp + ); + CREATE INDEX ON errors (project_id, source); + CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); + CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); + CREATE INDEX errors_project_id_idx ON public.errors (project_id); + CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); + CREATE INDEX errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception'; + CREATE INDEX errors_project_id_error_id_idx ON public.errors (project_id, error_id); + CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception'; + + CREATE TABLE user_favorite_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + + CREATE TABLE user_viewed_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); + CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + + +-- --- sessions.sql --- + CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); + CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); + CREATE TYPE platform AS ENUM ('web','ios','android'); + + CREATE TABLE sessions + ( + session_id bigint PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + tracker_version text NOT NULL, + start_ts bigint NOT NULL, + duration integer NULL, + rev_id text DEFAULT NULL, + platform platform NOT NULL DEFAULT 'web', + is_snippet boolean NOT NULL DEFAULT FALSE, + user_id text DEFAULT NULL, + user_anonymous_id text DEFAULT NULL, + user_uuid uuid NOT NULL, + user_agent text DEFAULT NULL, + user_os text NOT NULL, + user_os_version text DEFAULT NULL, + user_browser text DEFAULT NULL, + user_browser_version text DEFAULT NULL, + user_device text NOT NULL, + user_device_type device_type NOT NULL, + user_device_memory_size integer DEFAULT NULL, + user_device_heap_size bigint DEFAULT NULL, + user_country country NOT NULL, + pages_count integer NOT NULL DEFAULT 0, + events_count integer NOT NULL DEFAULT 0, + errors_count integer NOT NULL DEFAULT 0, + watchdogs_score bigint NOT NULL DEFAULT 0, + issue_score bigint NOT NULL DEFAULT 0, + issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL +-- , +-- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL + ); + CREATE INDEX ON sessions (project_id, start_ts); + CREATE INDEX ON sessions (project_id, user_id); + CREATE INDEX ON sessions (project_id, user_anonymous_id); + CREATE INDEX ON sessions (project_id, user_device); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX ON sessions (project_id, metadata_1); + CREATE INDEX ON sessions (project_id, metadata_2); + CREATE INDEX ON sessions (project_id, metadata_3); + CREATE INDEX ON sessions (project_id, metadata_4); + CREATE INDEX ON sessions (project_id, metadata_5); + CREATE INDEX ON sessions (project_id, metadata_6); + CREATE INDEX ON sessions (project_id, metadata_7); + CREATE INDEX ON sessions (project_id, metadata_8); + CREATE INDEX ON sessions (project_id, metadata_9); + CREATE INDEX ON sessions (project_id, metadata_10); +-- CREATE INDEX ON sessions (rehydration_id); + CREATE INDEX ON sessions (project_id, watchdogs_score DESC); + CREATE INDEX platform_idx ON public.sessions (platform); + + CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); + CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); + CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); + CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); + CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); + CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); + CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); + CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); + CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); + CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); + CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); + CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); + CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); + CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); + CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); + CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; + CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; + CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0; + CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; + CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL; + CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0; + + ALTER TABLE public.sessions + ADD CONSTRAINT web_browser_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_browser ISNULL)); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_agent_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_agent ISNULL)); + + + CREATE TABLE user_viewed_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + CREATE TABLE user_favorite_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + +-- --- assignments.sql --- + + create table assigned_sessions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL + ); + CREATE INDEX ON assigned_sessions (session_id); + +-- --- events_common.sql --- + + CREATE SCHEMA IF NOT EXISTS events_common; + + CREATE TYPE events_common.custom_level AS ENUM ('info','error'); + + CREATE TABLE events_common.customs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + name text NOT NULL, + payload jsonb NOT NULL, + level events_common.custom_level NOT NULL DEFAULT 'info', + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.customs (name); + CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); + CREATE INDEX ON events_common.customs (timestamp); + + + CREATE TABLE events_common.issues + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, + payload jsonb DEFAULT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + + + CREATE TABLE events_common.requests + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + url text NOT NULL, + duration integer NOT NULL, + success boolean NOT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.requests (url); + CREATE INDEX ON events_common.requests (duration); + CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); + CREATE INDEX ON events_common.requests (timestamp); + CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE + WHEN url LIKE 'http://%' + THEN 7 + WHEN url LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + +-- --- events.sql --- + CREATE SCHEMA IF NOT EXISTS events; + + CREATE TABLE events.pages + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + host text NOT NULL, + path text NOT NULL, + base_path text NOT NULL, + referrer text DEFAULT NULL, + base_referrer text DEFAULT NULL, + dom_building_time integer DEFAULT NULL, + dom_content_loaded_time integer DEFAULT NULL, + load_time integer DEFAULT NULL, + first_paint_time integer DEFAULT NULL, + first_contentful_paint_time integer DEFAULT NULL, + speed_index integer DEFAULT NULL, + visually_complete integer DEFAULT NULL, + time_to_interactive integer DEFAULT NULL, + response_time bigint DEFAULT NULL, + response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.pages (session_id); + CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); + CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); + CREATE INDEX ON events.pages (timestamp); + CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); + CREATE INDEX pages_base_path_idx ON events.pages (base_path); + CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); + CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); + CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, + length(base_referrer) - (CASE + WHEN base_referrer LIKE 'http://%' + THEN 7 + WHEN base_referrer LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + CREATE INDEX ON events.pages (response_time); + CREATE INDEX ON events.pages (response_end); + CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); + CREATE INDEX pages_path_idx ON events.pages (path); + CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; + CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; + CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; + CREATE INDEX pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0; + CREATE INDEX pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0; + CREATE INDEX pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0; + CREATE INDEX pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0; + CREATE INDEX pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0; + CREATE INDEX pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL; + CREATE INDEX pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL; + CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR + first_paint_time > 0 OR + dom_content_loaded_time > 0 OR + ttfb > 0 OR + time_to_interactive > 0; + CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; + CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; + CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); + CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + + + CREATE TABLE events.clicks + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.clicks (session_id); + CREATE INDEX ON events.clicks (label); + CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); + CREATE INDEX ON events.clicks (timestamp); + CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); + CREATE INDEX clicks_url_idx ON events.clicks (url); + CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); + CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); + + + CREATE TABLE events.inputs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.inputs (session_id); + CREATE INDEX ON events.inputs (label, value); + CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); + CREATE INDEX inputs_label_idx ON events.inputs (label); + CREATE INDEX ON events.inputs (timestamp); + CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); + + CREATE TABLE events.errors + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.errors (session_id); + CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id); + CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp); + CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id); + CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); + + CREATE TABLE events.graphql + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.graphql (name); + CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.graphql (timestamp); + + CREATE TABLE events.state_actions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.state_actions (name); + CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.state_actions (timestamp); + + CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); + CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); + CREATE TABLE events.resources + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + duration bigint NULL, + type events.resource_type NOT NULL, + url text NOT NULL, + url_host text NOT NULL, + url_hostpath text NOT NULL, + success boolean NOT NULL, + status smallint NULL, + method events.resource_method NULL, + ttfb bigint NULL, + header_size bigint NULL, + encoded_body_size integer NULL, + decoded_body_size integer NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.resources (session_id); + CREATE INDEX ON events.resources (status); + CREATE INDEX ON events.resources (type); + CREATE INDEX ON events.resources (duration) WHERE duration > 0; + CREATE INDEX ON events.resources (url_host); + + CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); + CREATE INDEX resources_url_idx ON events.resources (url); + CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); + CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); + CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL; + CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp); + CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type); + CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch'; + CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE; + CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script'); + CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img'; + CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id); + + CREATE TABLE events.performance + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + message_id bigint NOT NULL, + min_fps smallint NOT NULL, + avg_fps smallint NOT NULL, + max_fps smallint NOT NULL, + min_cpu smallint NOT NULL, + avg_cpu smallint NOT NULL, + max_cpu smallint NOT NULL, + min_total_js_heap_size bigint NOT NULL, + avg_total_js_heap_size bigint NOT NULL, + max_total_js_heap_size bigint NOT NULL, + min_used_js_heap_size bigint NOT NULL, + avg_used_js_heap_size bigint NOT NULL, + max_used_js_heap_size bigint NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + -- --- autocomplete.sql --- -CREATE TABLE autocomplete -( - value text NOT NULL, - type text NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE -); + CREATE TABLE autocomplete + ( + value text NOT NULL, + type text NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE + ); -CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); -CREATE index autocomplete_project_id_idx ON autocomplete (project_id); -CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); -CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); + CREATE index autocomplete_project_id_idx ON autocomplete (project_id); + CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); + CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); -- --- jobs.sql --- -CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); -CREATE TYPE job_action AS ENUM ('delete_user_data'); -CREATE TABLE jobs -( - job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - description text NOT NULL, - status job_status NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - action job_action NOT NULL, - reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, - start_at timestamp NOT NULL, - errors text NULL -); -CREATE INDEX ON jobs (status); -CREATE INDEX ON jobs (start_at); -CREATE INDEX jobs_project_id_idx ON jobs (project_id); + CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); + CREATE TYPE job_action AS ENUM ('delete_user_data'); + CREATE TABLE jobs + ( + job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + description text NOT NULL, + status job_status NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + action job_action NOT NULL, + reference_id text NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + updated_at timestamp default timezone('utc'::text, now()) NULL, + start_at timestamp NOT NULL, + errors text NULL + ); + CREATE INDEX ON jobs (status); + CREATE INDEX ON jobs (start_at); + CREATE INDEX jobs_project_id_idx ON jobs (project_id); -COMMIT; + + raise notice 'DB created'; + END IF; + END; + +$$ +LANGUAGE plpgsql; + +COMMIT; \ No newline at end of file From 3cc3fc27a172d472adc3ed5eaba7eba7df0ddc24 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 25 Oct 2021 14:57:41 +0200 Subject: [PATCH 078/218] feat(api): count recorded sessions endpoint feat(nginx): block public access to count recorded sessions endpoint --- api/chalicelib/blueprints/bp_core.py | 5 +++++ api/chalicelib/core/sessions.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py index 18773f68c..e99d6e297 100644 --- a/api/chalicelib/blueprints/bp_core.py +++ b/api/chalicelib/blueprints/bp_core.py @@ -897,3 +897,8 @@ def sessions_live_search(projectId, context): def get_heatmaps_by_url(projectId, context): data = app.current_request.json_body return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} + + +@app.route('/general_stats', methods=['GET'], authorizer=None) +def get_general_stats(): + return {"data": {"sessions:": sessions.count_all()}} diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 884e69c57..31d88ebf2 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -765,3 +765,9 @@ def delete_sessions_by_user_ids(project_id, user_ids): cur.execute(query=query) return True + + +def count_all(): + with pg_client.PostgresClient() as cur: + row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") + return row.get("count", 0) From a76d958208a42877bda90df298f3eaefffba7d79 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 25 Oct 2021 14:57:51 +0200 Subject: [PATCH 079/218] feat(api): count recorded sessions endpoint feat(nginx): block public access to count recorded sessions endpoint --- .../nginx-ingress/nginx-ingress/templates/configmap.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 84cc6337d..47d5e6751 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: ConfigMap metadata: name: nginx - namespace: {{ .Release.Namespace }} + namespace: { { .Release.Namespace } } data: location.list: |- location /healthz { @@ -85,6 +85,9 @@ data: proxy_intercept_errors on; # see http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_intercept_errors error_page 404 =200 /index.html; } + location ~*/general_stats { + deny all; + } compression.conf: |- # Compression gzip on; From e6d6028a70fd29383b3f0e43402ebfd8d3202890 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 25 Oct 2021 17:27:45 +0200 Subject: [PATCH 080/218] feat(nginx): block public access to count recorded sessions endpoint --- .../helm/nginx-ingress/nginx-ingress/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 47d5e6751..81af26bbb 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -85,7 +85,7 @@ data: proxy_intercept_errors on; # see http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_intercept_errors error_page 404 =200 /index.html; } - location ~*/general_stats { + location ~* /general_stats { deny all; } compression.conf: |- From 83389bfda7d4bde25bcb8e913b60a5cae97c354e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 25 Oct 2021 17:58:38 +0200 Subject: [PATCH 081/218] feat(nginx): fixed block public access to count recorded sessions endpoint --- .../nginx-ingress/nginx-ingress/templates/configmap.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 81af26bbb..2e6f259a1 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -2,9 +2,12 @@ apiVersion: v1 kind: ConfigMap metadata: name: nginx - namespace: { { .Release.Namespace } } + namespace: {{ .Release.Namespace }} data: location.list: |- + location ~* /general_stats { + deny all; + } location /healthz { return 200 'OK'; } @@ -85,9 +88,6 @@ data: proxy_intercept_errors on; # see http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_intercept_errors error_page 404 =200 /index.html; } - location ~* /general_stats { - deny all; - } compression.conf: |- # Compression gzip on; From d1386460cdc67feefc286903a99ed2646d452520 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 26 Oct 2021 23:15:10 +0200 Subject: [PATCH 082/218] feat (tracker):v3.4.5: capture iframe console; simple selector detector; log fixes; code fixes --- tracker/tracker/package-lock.json | 7 +-- tracker/tracker/package.json | 3 +- tracker/tracker/src/main/app/index.ts | 8 ++- tracker/tracker/src/main/app/observer.ts | 8 +-- tracker/tracker/src/main/index.ts | 6 +-- tracker/tracker/src/main/modules/console.ts | 44 ++++++++++----- tracker/tracker/src/main/modules/longtasks.ts | 2 +- tracker/tracker/src/main/modules/mouse.ts | 53 ++++++++++--------- 8 files changed, 76 insertions(+), 55 deletions(-) diff --git a/tracker/tracker/package-lock.json b/tracker/tracker/package-lock.json index 8d1c160b5..e1d647441 100644 --- a/tracker/tracker/package-lock.json +++ b/tracker/tracker/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker", - "version": "3.4.1", + "version": "3.4.4", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -293,11 +293,6 @@ "to-fast-properties": "^2.0.0" } }, - "@medv/finder": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@medv/finder/-/finder-2.0.0.tgz", - "integrity": "sha512-gV4jOsGpiWNDGd8Dw7tod1Fc9Gc7StaOT4oZ/6srHRWtsHU+HYWzmkYsa3Qy/z0e9tY1WpJ9wWdBFGskfbzoug==" - }, "@nodelib/fs.scandir": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz", diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 41934717c..808aef967 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.4", + "version": "3.4.5", "keywords": [ "logging", "replay" @@ -38,7 +38,6 @@ "typescript": "^4.3.4" }, "dependencies": { - "@medv/finder": "^2.0.0", "error-stack-parser": "^2.0.6" }, "engines": { diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index b50bb5731..d23094dfc 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -131,7 +131,7 @@ export default class App { }); } if(this.options.__debug_log) { - warn("OpenReplay errror: ", context, e) + warn("OpenReplay error: ", context, e) } } @@ -153,9 +153,13 @@ export default class App { } } - addCommitCallback(cb: CommitCallback): void { + attachCommitCallback(cb: CommitCallback): void { this.commitCallbacks.push(cb) } + // @Depricated (TODO: remove in 3.5.*) + addCommitCallback(cb: CommitCallback): void { + this.attachCommitCallback(cb) + } safe void>(fn: T): T { diff --git a/tracker/tracker/src/main/app/observer.ts b/tracker/tracker/src/main/app/observer.ts index 493c7aaac..3fad12c60 100644 --- a/tracker/tracker/src/main/app/observer.ts +++ b/tracker/tracker/src/main/app/observer.ts @@ -411,11 +411,13 @@ export default class Observer { private iframeObservers: Observer[] = []; private handleIframe(iframe: HTMLIFrameElement): void { + let context: Window | null = null const handle = () => { - const context = iframe.contentWindow as Window | null const id = this.app.nodes.getID(iframe) - if (!context || id === undefined) { return } - + if (id === undefined) { return } + if (iframe.contentWindow === context) { return } + context = iframe.contentWindow as Window | null; + if (!context) { return } const observer = new Observer(this.app, this.options, context) this.iframeObservers.push(observer) observer.observeIframe(id, context) diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index ca0dd9208..b9fdad6cc 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -23,11 +23,11 @@ import { Options as AppOptions } from './app'; import { Options as ConsoleOptions } from './modules/console'; import { Options as ExceptionOptions } from './modules/exception'; import { Options as InputOptions } from './modules/input'; -import { Options as MouseOptions } from './modules/mouse'; import { Options as PerformanceOptions } from './modules/performance'; import { Options as TimingOptions } from './modules/timing'; + export type Options = Partial< - AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & MouseOptions & PerformanceOptions & TimingOptions + AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & PerformanceOptions & TimingOptions > & { projectID?: number; // For the back compatibility only (deprecated) projectKey: string; @@ -98,7 +98,7 @@ export default class API { Exception(this.app, options); Img(this.app); Input(this.app, options); - Mouse(this.app, options); + Mouse(this.app); Timing(this.app, options); Performance(this.app, options); Scroll(this.app); diff --git a/tracker/tracker/src/main/modules/console.ts b/tracker/tracker/src/main/modules/console.ts index 34be0264a..e625961a7 100644 --- a/tracker/tracker/src/main/modules/console.ts +++ b/tracker/tracker/src/main/modules/console.ts @@ -110,7 +110,7 @@ export default function (app: App, opts: Partial): void { return; } - const sendConsoleLog = app.safe((level: string, args: any[]): void => + const sendConsoleLog = app.safe((level: string, args: unknown[]): void => app.send(new ConsoleLog(level, printf(args))), ); @@ -121,18 +121,36 @@ export default function (app: App, opts: Partial): void { app.attachStartCallback(reset); app.ticker.attach(reset, 33, false); - options.consoleMethods.forEach((method) => { - if (consoleMethods.indexOf(method) === -1) { - console.error(`OpenReplay: unsupported console method "${method}"`); - return; - } - const fn = (console as any)[method]; - (console as any)[method] = function (...args: any[]): void { - fn.apply(this, args); - if (n++ > options.consoleThrottling) { + const patchConsole = (console: Console) => + options.consoleMethods!.forEach((method) => { + if (consoleMethods.indexOf(method) === -1) { + console.error(`OpenReplay: unsupported console method "${method}"`); return; } - sendConsoleLog(method, args); - }; - }); + const fn = (console as any)[method]; + (console as any)[method] = function (...args: unknown[]): void { + fn.apply(this, args); + if (n++ > options.consoleThrottling) { + return; + } + sendConsoleLog(method, args); + }; + }); + patchConsole(window.console); + + app.nodes.attachNodeCallback(node => { + if (node instanceof HTMLIFrameElement) { + let context = node.contentWindow + if (context) { + patchConsole((context as (Window & typeof globalThis)).console) + } + app.attachEventListener(node, "load", () => { + if (node.contentWindow !== context) { + context = node.contentWindow + patchConsole((context as (Window & typeof globalThis)).console) + } + }) + } + + }) } diff --git a/tracker/tracker/src/main/modules/longtasks.ts b/tracker/tracker/src/main/modules/longtasks.ts index e74110b71..c7515c88f 100644 --- a/tracker/tracker/src/main/modules/longtasks.ts +++ b/tracker/tracker/src/main/modules/longtasks.ts @@ -47,5 +47,5 @@ export default function (app: App): void { const observer: PerformanceObserver = new PerformanceObserver((list) => list.getEntries().forEach(longTask), ); - observer.observe({ entryTypes: ['longtask'], buffered: true }); + observer.observe({ entryTypes: ['longtask'] }); } \ No newline at end of file diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index 40bcbeb61..8a808f4bf 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -1,10 +1,30 @@ -import type { Options as FinderOptions } from '../vendors/finder/finder'; -import { finder } from '../vendors/finder/finder'; import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils'; import App from '../app'; import { MouseMove, MouseClick } from '../../messages'; import { getInputLabel } from './input'; +function _getSelector(target: Element): string { + let el: Element | null = target + let selector: string | null = null + do { + if (el.id) { + return `#${el.id}` + (selector ? ` > ${selector}` : '') + } + selector = + el.className.split(' ') + .map(cn => cn.trim()) + .filter(cn => cn !== '') + .reduce((sel, cn) => `${sel}.${cn}`, el.tagName.toLowerCase()) + + (selector ? ` > ${selector}` : ''); + if (el === document.body) { + return selector + } + el = el.parentElement + } while (el !== document.body && el !== null) + return selector +} + +//TODO: fix (typescript doesn't allow work when the guard is inside the function) function getTarget(target: EventTarget | null): Element | null { if (target instanceof Element) { return _getTarget(target); @@ -72,26 +92,11 @@ function getTargetLabel(target: Element): string { return ''; } -interface HeatmapsOptions { - finder: FinderOptions, -} - -export interface Options { - heatmaps: boolean | HeatmapsOptions; -} - -export default function (app: App, opts: Partial): void { - const options: Options = Object.assign( - { - heatmaps: false // { - // finder: { - // threshold: 5, - // maxNumberOfTries: 600, - // }, - // }, - }, - opts, - ); +export default function (app: App): void { + // const options: Options = Object.assign( + // {}, + // opts, + // ); let mousePositionX = -1; let mousePositionY = -1; @@ -115,9 +120,7 @@ export default function (app: App, opts: Partial): void { const selectorMap: {[id:number]: string} = {}; function getSelector(id: number, target: Element): string { - if (options.heatmaps === false) { return '' } - return selectorMap[id] = selectorMap[id] || - finder(target, options.heatmaps === true ? undefined : options.heatmaps.finder); + return selectorMap[id] = selectorMap[id] || _getSelector(target); } app.attachEventListener( From 0055ed39c38a1afd9adfaeb665f49f0b5794e0e9 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 27 Oct 2021 16:27:20 +0200 Subject: [PATCH 083/218] dev(backend-http): log gzip ingest --- backend/services/http/handlers.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 02b8b0c13..6d65ed282 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -126,6 +126,8 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { var reader io.ReadCloser switch r.Header.Get("Content-Encoding") { case "gzip": + log.Println("Gzip", reader) + reader, err := gzip.NewReader(body) if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce @@ -135,6 +137,7 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { default: reader = body } + log.Println("Reader after switch:", reader) buf, err := ioutil.ReadAll(reader) if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging From b5eb6ebd59e1ad77fb08f902d0bc6d1b71a662fa Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 27 Oct 2021 17:28:07 +0200 Subject: [PATCH 084/218] dev(backend-http): log gzip init --- backend/services/http/handlers.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 6d65ed282..85ed32a3d 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -124,15 +124,17 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) //defer body.Close() var reader io.ReadCloser + var err Error switch r.Header.Get("Content-Encoding") { case "gzip": log.Println("Gzip", reader) - reader, err := gzip.NewReader(body) + reader, err = gzip.NewReader(body) if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce return } + log.Println("Gzip reader init", reader) defer reader.Close() default: reader = body From b9b68487a0b089fda3e8e4a1ee613020f3135d26 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 27 Oct 2021 17:45:55 +0200 Subject: [PATCH 085/218] dev(backend-http): log --- backend/services/http/handlers.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 85ed32a3d..2ac2852a2 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -124,7 +124,7 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) //defer body.Close() var reader io.ReadCloser - var err Error + var err error switch r.Header.Get("Content-Encoding") { case "gzip": log.Println("Gzip", reader) From bc249730f03fb266af2d3260d45ac80361e73ed7 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 27 Oct 2021 18:12:56 +0200 Subject: [PATCH 086/218] fix(frontend): mobx migrations display fix --- frontend/app/components/Session_/Storage/Storage.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index db6d9216b..27162b5da 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -26,7 +26,7 @@ import stl from './storage.css'; function getActionsName(type) { switch(type) { case STORAGE_TYPES.MOBX: - return "EVENTS"; + return "MUTATIONS"; case STORAGE_TYPES.VUEX: return "MUTATIONS"; default: @@ -141,7 +141,7 @@ export default class Storage extends React.PureComponent { break; case STORAGE_TYPES.MOBX: src = item.payload; - name = `@${item.type} ${src && src.name}`; + name = `@${item.type} ${src && src.type}`; break; } From 9734564da2a95967b3bd3f2c02b10c9ffd15f24b Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Oct 2021 14:31:52 +0200 Subject: [PATCH 087/218] fix(backend-http): check nil MultipartForm --- backend/services/http/handlers_ios.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 110cd2874..81ab036af 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -154,6 +154,10 @@ func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging } + if (r.MultipartForm == nil) { + responseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed")) + } + if len(r.MultipartForm.Value["projectKey"]) == 0 { responseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter? return From 883019433aca866997df6b3df7529f0c33d7e91f Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 28 Oct 2021 15:45:17 +0200 Subject: [PATCH 088/218] fix(backend-http):increased size for the ios image edp --- backend/services/http/handlers_ios.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 81ab036af..fd30eb5c2 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -146,7 +146,7 @@ func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) // defer r.Body.Close() - err = r.ParseMultipartForm(1e5) // 100Kb + err = r.ParseMultipartForm(1e6) // ~1Mb if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { responseWithError(w, http.StatusUnsupportedMediaType, err) // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB From e17246e0912f138494a6762b860ca73be95dcf52 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Oct 2021 18:42:34 +0200 Subject: [PATCH 089/218] feat(db): EE conditional DB creation --- .../db/init_dbs/postgresql/init_schema.sql | 1662 +++++++++-------- 1 file changed, 841 insertions(+), 821 deletions(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 7bbcf4e80..73cdffeae 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1,10 +1,7 @@ BEGIN; - --- --- public.sql --- - -CREATE EXTENSION IF NOT EXISTS pg_trgm; -CREATE EXTENSION IF NOT EXISTS pgcrypto; - +-- Schemas and functions definitions: +CREATE SCHEMA IF NOT EXISTS events_common; +CREATE SCHEMA IF NOT EXISTS events; -- --- accounts.sql --- @@ -26,795 +23,7 @@ begin end; $$ LANGUAGE plpgsql; - -CREATE TABLE tenants -( - tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - user_id text NOT NULL DEFAULT generate_api_key(20), - name text, - api_key text UNIQUE default generate_api_key(20) not null, - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - edition varchar(3) NOT NULL, - version_number text NOT NULL, - license text NULL, - opt_out bool NOT NULL DEFAULT FALSE, - t_projects integer NOT NULL DEFAULT 1, - t_sessions bigint NOT NULL DEFAULT 0, - t_users integer NOT NULL DEFAULT 1, - t_integrations integer NOT NULL DEFAULT 0 -); - -CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); -CREATE TYPE user_origin AS ENUM ('saml'); -CREATE TABLE users -( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ - "role": "dev", - "dashboard": { - "cpu": true, - "fps": false, - "avgCpu": true, - "avgFps": true, - "errors": true, - "crashes": true, - "overview": true, - "sessions": true, - "topMetrics": true, - "callsErrors": true, - "pageMetrics": true, - "performance": true, - "timeToRender": false, - "userActivity": false, - "avgFirstPaint": false, - "countSessions": true, - "errorsPerType": true, - "slowestImages": true, - "speedLocation": true, - "slowestDomains": true, - "avgPageLoadTime": true, - "avgTillFirstBit": false, - "avgTimeToRender": true, - "avgVisitedPages": false, - "avgImageLoadTime": true, - "busiestTimeOfDay": true, - "errorsPerDomains": true, - "missingResources": true, - "resourcesByParty": true, - "sessionsFeedback": false, - "slowestResources": true, - "avgUsedJsHeapSize": true, - "domainsErrors_4xx": true, - "domainsErrors_5xx": true, - "memoryConsumption": true, - "pagesDomBuildtime": false, - "pagesResponseTime": true, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "sessionsPerBrowser": false, - "applicationActivity": true, - "sessionsFrustration": false, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgTimeToInteractive": true, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "resourceTypeVsResponseEnd": true, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "resourcesVsVisuallyComplete": true, - "pagesResponseTimeDistribution": true - }, - "sessionsLive": false, - "sessionsDevtools": true - }'::jsonb, - api_key text UNIQUE default generate_api_key(20) not null, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT '{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE, - origin user_origin NULL DEFAULT NULL -); - - -CREATE TABLE basic_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - password text DEFAULT NULL, - generated_password boolean NOT NULL DEFAULT false, - invitation_token text NULL DEFAULT NULL, - invited_at timestamp without time zone NULL DEFAULT NULL, - change_pwd_token text NULL DEFAULT NULL, - change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, - changed_at timestamp, - UNIQUE (user_id) -); - - -CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); -CREATE TABLE oauth_authentication -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - provider oauth_provider NOT NULL, - provider_user_id text NOT NULL, - token text NOT NULL, - UNIQUE (user_id, provider) -); - - --- --- projects.sql --- - -CREATE TABLE projects -( - project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - name text NOT NULL, - active boolean NOT NULL, - sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - max_session_duration integer NOT NULL DEFAULT 7200000, - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL, - gdpr jsonb NOT NULL DEFAULT '{ - "maskEmails": true, - "sampleRate": 33, - "maskNumbers": false, - "defaultInputMode": "plain" - }'::jsonb -- ?????? -); - -CREATE INDEX ON public.projects (project_key); - -CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS -$$ -BEGIN - PERFORM pg_notify('project', row_to_json(NEW)::text); - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update - AFTER INSERT OR UPDATE - ON projects - FOR EACH ROW -EXECUTE PROCEDURE notify_project(); - --- --- alerts.sql --- - -CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); - -CREATE TABLE alerts -( - alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - name text NOT NULL, - description text NULL DEFAULT NULL, - active boolean NOT NULL DEFAULT TRUE, - detection_method alert_detection_method NOT NULL, - query jsonb NOT NULL, - deleted_at timestamp NULL DEFAULT NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{ - "renotifyInterval": 1440 - }'::jsonb -); - - -CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS -$$ -DECLARE - clone jsonb; - tenant_id integer; -BEGIN - clone = to_jsonb(NEW); - clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); - IF NEW.deleted_at NOTNULL THEN - clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); - END IF; - SELECT projects.tenant_id INTO tenant_id FROM public.projects WHERE projects.project_id = NEW.project_id LIMIT 1; - clone = jsonb_set(clone, '{tenant_id}', to_jsonb(tenant_id)); - PERFORM pg_notify('alert', clone::text); - RETURN NEW; -END ; -$$ LANGUAGE plpgsql; - - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON alerts - FOR EACH ROW -EXECUTE PROCEDURE notify_alert(); - - --- --- webhooks.sql --- - -create type webhook_type as enum ('webhook', 'slack', 'email'); - -create table webhooks -( - webhook_id integer generated by default as identity - constraint webhooks_pkey - primary key, - tenant_id integer not null - constraint webhooks_tenant_id_fkey - references tenants - on delete cascade, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - auth_header text, - type webhook_type not null, - index integer default 0 not null, - name varchar(100) -); - --- --- notifications.sql --- - - -CREATE TABLE notifications -( - notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer REFERENCES tenants (tenant_id) ON DELETE CASCADE, - user_id integer REFERENCES users (user_id) ON DELETE CASCADE, - title text NOT NULL, - description text NOT NULL, - button_text varchar(80) NULL, - button_url text NULL, - image_url text NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{}'::jsonb, - CONSTRAINT notification_tenant_xor_user CHECK ( tenant_id NOTNULL AND user_id ISNULL OR - tenant_id ISNULL AND user_id NOTNULL ) -); -CREATE INDEX notifications_user_id_index ON public.notifications (user_id); -CREATE INDEX notifications_tenant_id_index ON public.notifications (tenant_id); -CREATE INDEX notifications_created_at_index ON public.notifications (created_at DESC); -CREATE INDEX notifications_created_at_epoch_idx ON public.notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); - -CREATE TABLE user_viewed_notifications -( - user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, - notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, - constraint user_viewed_notifications_pkey primary key (user_id, notification_id) -); - --- --- funnels.sql --- - -CREATE TABLE funnels -( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False -); - -CREATE INDEX ON public.funnels (user_id, is_public); - --- --- announcements.sql --- - -create type announcement_type as enum ('notification', 'alert'); - -create table announcements -( - announcement_id serial not null - constraint announcements_pk - primary key, - title text not null, - description text not null, - button_text varchar(30), - button_url text, - image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null -); - --- --- integrations.sql --- - -CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); -CREATE TABLE integrations -( - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - provider integration_provider NOT NULL, - options jsonb NOT NULL, - request_data jsonb NOT NULL DEFAULT '{}'::jsonb, - PRIMARY KEY (project_id, provider) -); - -CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS -$$ -BEGIN - IF NEW IS NULL THEN - PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); - ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN - PERFORM pg_notify('integration', row_to_json(NEW)::text); - END IF; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON integrations - FOR EACH ROW -EXECUTE PROCEDURE notify_integration(); - - -create table jira_cloud -( - user_id integer not null - constraint jira_cloud_pk - primary key - constraint jira_cloud_users_fkey - references users - on delete cascade, - username text not null, - token text not null, - url text -); - - --- --- issues.sql --- - -CREATE TYPE issue_type AS ENUM ( - 'click_rage', - 'dead_click', - 'excessive_scrolling', - 'bad_request', - 'missing_resource', - 'memory', - 'cpu', - 'slow_resource', - 'slow_page_load', - 'crash', - 'ml_cpu', - 'ml_memory', - 'ml_dead_click', - 'ml_click_rage', - 'ml_mouse_thrashing', - 'ml_excessive_scrolling', - 'ml_slow_resources', - 'custom', - 'js_exception' - ); - -CREATE TABLE issues -( - issue_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - type issue_type NOT NULL, - context_string text NOT NULL, - context jsonb DEFAULT NULL -); -CREATE INDEX ON issues (issue_id, type); -CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); - --- --- errors.sql --- - -CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); -CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); -CREATE TABLE errors -( - error_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - source error_source NOT NULL, - name text DEFAULT NULL, - message text NOT NULL, - payload jsonb NOT NULL, - status error_status NOT NULL DEFAULT 'unresolved', - parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, - stacktrace jsonb, --to save the stacktrace and not query S3 another time - stacktrace_parsed_at timestamp -); -CREATE INDEX errors_error_id_idx ON errors (error_id); -CREATE INDEX ON errors (project_id, source); -CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); -CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); -CREATE INDEX errors_project_id_idx ON public.errors (project_id); -CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); - -CREATE TABLE user_favorite_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); - -CREATE TABLE user_viewed_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); -CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); -CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - - --- --- sessions.sql --- -CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); -CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); -CREATE TYPE platform AS ENUM ('web','ios','android'); - -CREATE TABLE sessions -( - session_id bigint PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - tracker_version text NOT NULL, - start_ts bigint NOT NULL, - duration integer NULL, - rev_id text DEFAULT NULL, - platform platform NOT NULL DEFAULT 'web', - is_snippet boolean NOT NULL DEFAULT FALSE, - user_id text DEFAULT NULL, - user_anonymous_id text DEFAULT NULL, - user_uuid uuid NOT NULL, - user_agent text DEFAULT NULL, - user_os text NOT NULL, - user_os_version text DEFAULT NULL, - user_browser text DEFAULT NULL, - user_browser_version text DEFAULT NULL, - user_device text NOT NULL, - user_device_type device_type NOT NULL, - user_device_memory_size integer DEFAULT NULL, - user_device_heap_size bigint DEFAULT NULL, - user_country country NOT NULL, - pages_count integer NOT NULL DEFAULT 0, - events_count integer NOT NULL DEFAULT 0, - errors_count integer NOT NULL DEFAULT 0, - watchdogs_score bigint NOT NULL DEFAULT 0, - issue_score bigint NOT NULL DEFAULT 0, - issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL --- , --- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL -); -CREATE INDEX ON sessions (project_id, start_ts); -CREATE INDEX ON sessions (project_id, user_id); -CREATE INDEX ON sessions (project_id, user_anonymous_id); -CREATE INDEX ON sessions (project_id, user_device); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX ON sessions (project_id, metadata_1); -CREATE INDEX ON sessions (project_id, metadata_2); -CREATE INDEX ON sessions (project_id, metadata_3); -CREATE INDEX ON sessions (project_id, metadata_4); -CREATE INDEX ON sessions (project_id, metadata_5); -CREATE INDEX ON sessions (project_id, metadata_6); -CREATE INDEX ON sessions (project_id, metadata_7); -CREATE INDEX ON sessions (project_id, metadata_8); -CREATE INDEX ON sessions (project_id, metadata_9); -CREATE INDEX ON sessions (project_id, metadata_10); --- CREATE INDEX ON sessions (rehydration_id); -CREATE INDEX ON sessions (project_id, watchdogs_score DESC); -CREATE INDEX platform_idx ON public.sessions (platform); - -CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); -CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); -CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); -CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); -CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); -CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); -CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); -CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); -CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); -CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); -CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); -CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); -CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); -CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); -CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); -CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_country); -CREATE INDEX ON sessions (project_id, user_browser); -CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; - - -ALTER TABLE public.sessions - ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_browser ISNULL)); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); - -ALTER TABLE public.sessions - ADD CONSTRAINT web_user_agent_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR - (sessions.platform != 'web' AND sessions.user_agent ISNULL)); - - - -CREATE TABLE user_viewed_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - -CREATE TABLE user_favorite_sessions -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, session_id) -); - - --- --- assignments.sql --- - -create table assigned_sessions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - issue_id text NOT NULL, - provider oauth_provider NOT NULL, - created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default '{}'::jsonb NOT NULL -); -CREATE INDEX ON assigned_sessions (session_id); - --- --- events_common.sql --- - -CREATE SCHEMA events_common; - -CREATE TYPE events_common.custom_level AS ENUM ('info','error'); - -CREATE TABLE events_common.customs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - name text NOT NULL, - payload jsonb NOT NULL, - level events_common.custom_level NOT NULL DEFAULT 'info', - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.customs (name); -CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); -CREATE INDEX ON events_common.customs (timestamp); - - -CREATE TABLE events_common.issues -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, - payload jsonb DEFAULT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); - - -CREATE TABLE events_common.requests -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - url text NOT NULL, - duration integer NOT NULL, - success boolean NOT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.requests (url); -CREATE INDEX ON events_common.requests (duration); -CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); -CREATE INDEX ON events_common.requests (timestamp); -CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); - -- --- events.sql --- -CREATE SCHEMA events; - -CREATE TABLE events.pages -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - host text NOT NULL, - path text NOT NULL, - base_path text NOT NULL, - referrer text DEFAULT NULL, - base_referrer text DEFAULT NULL, - dom_building_time integer DEFAULT NULL, - dom_content_loaded_time integer DEFAULT NULL, - load_time integer DEFAULT NULL, - first_paint_time integer DEFAULT NULL, - first_contentful_paint_time integer DEFAULT NULL, - speed_index integer DEFAULT NULL, - visually_complete integer DEFAULT NULL, - time_to_interactive integer DEFAULT NULL, - response_time bigint DEFAULT NULL, - response_end bigint DEFAULT NULL, - ttfb integer DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.pages (session_id); -CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); -CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); -CREATE INDEX ON events.pages (timestamp); -CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); -CREATE INDEX pages_base_path_idx ON events.pages (base_path); -CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); -CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); -CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, length(base_referrer) - (CASE - WHEN base_referrer LIKE 'http://%' - THEN 7 - WHEN base_referrer LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); -CREATE INDEX ON events.pages (response_time); -CREATE INDEX ON events.pages (response_end); -CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); -CREATE INDEX pages_path_idx ON events.pages (path); -CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; -CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; -CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; -CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); -CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); - -CREATE TABLE events.clicks -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, - selector text DEFAULT '' NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.clicks (session_id); -CREATE INDEX ON events.clicks (label); -CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); -CREATE INDEX ON events.clicks (timestamp); -CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); -CREATE INDEX clicks_url_idx ON events.clicks (url); -CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); -CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); -CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); - - -CREATE TABLE events.inputs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.inputs (session_id); -CREATE INDEX ON events.inputs (label, value); -CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); -CREATE INDEX inputs_label_idx ON events.inputs (label); -CREATE INDEX ON events.inputs (timestamp); -CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); - -CREATE TABLE events.errors -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.errors (session_id); -CREATE INDEX ON events.errors (timestamp); -CREATE INDEX errors_error_id_idx ON events.errors (error_id); - - -CREATE TABLE events.graphql -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.graphql (name); -CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.graphql (timestamp); - -CREATE TABLE events.state_actions -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - name text NOT NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.state_actions (name); -CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); -CREATE INDEX ON events.state_actions (timestamp); - -CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); -CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); -CREATE TABLE events.resources -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - duration bigint NULL, - type events.resource_type NOT NULL, - url text NOT NULL, - url_host text NOT NULL, - url_hostpath text NOT NULL, - success boolean NOT NULL, - status smallint NULL, - method events.resource_method NULL, - ttfb bigint NULL, - header_size bigint NULL, - encoded_body_size integer NULL, - decoded_body_size integer NULL, - PRIMARY KEY (session_id, message_id) -); -CREATE INDEX ON events.resources (session_id); -CREATE INDEX ON events.resources (timestamp); -CREATE INDEX ON events.resources (success); -CREATE INDEX ON events.resources (status); -CREATE INDEX ON events.resources (type); -CREATE INDEX ON events.resources (duration) WHERE duration > 0; -CREATE INDEX ON events.resources (url_host); - -CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); -CREATE INDEX resources_url_idx ON events.resources (url); -CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); -CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); - - - -CREATE TABLE events.performance -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - message_id bigint NOT NULL, - min_fps smallint NOT NULL, - avg_fps smallint NOT NULL, - max_fps smallint NOT NULL, - min_cpu smallint NOT NULL, - avg_cpu smallint NOT NULL, - max_cpu smallint NOT NULL, - min_total_js_heap_size bigint NOT NULL, - avg_total_js_heap_size bigint NOT NULL, - max_total_js_heap_size bigint NOT NULL, - min_used_js_heap_size bigint NOT NULL, - avg_used_js_heap_size bigint NOT NULL, - max_used_js_heap_size bigint NOT NULL, - PRIMARY KEY (session_id, message_id) -); - CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS $$ @@ -839,38 +48,849 @@ BEGIN END; $$ LANGUAGE plpgsql IMMUTABLE; +-- --- integrations.sql --- + +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +-- --- alerts.sql --- + +CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS +$$ +DECLARE + clone jsonb; +BEGIN + clone = to_jsonb(NEW); + clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); + IF NEW.deleted_at NOTNULL THEN + clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); + END IF; + PERFORM pg_notify('alert', clone::text); + RETURN NEW; +END ; +$$ LANGUAGE plpgsql; + +-- --- projects.sql --- + +CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS +$$ +BEGIN + PERFORM pg_notify('project', row_to_json(NEW)::text); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- All tables and types: + +DO +$$ + BEGIN + IF EXISTS(SELECT + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = 'tenants') THEN + raise notice 'DB exists, skipping creation query'; + ELSE + raise notice 'Creating DB'; + +-- --- public.sql --- + + CREATE EXTENSION IF NOT EXISTS pg_trgm; + CREATE EXTENSION IF NOT EXISTS pgcrypto; + + +-- --- accounts.sql --- + + CREATE TABLE tenants + ( + tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + user_id text NOT NULL DEFAULT generate_api_key(20), + name text, + api_key text UNIQUE default generate_api_key(20) not null, + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + edition varchar(3) NOT NULL, + version_number text NOT NULL, + license text NULL, + opt_out bool NOT NULL DEFAULT FALSE, + t_projects integer NOT NULL DEFAULT 1, + t_sessions bigint NOT NULL DEFAULT 0, + t_users integer NOT NULL DEFAULT 1, + t_integrations integer NOT NULL DEFAULT 0 + ); + + CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); + CREATE TYPE user_origin AS ENUM ('saml'); + CREATE TABLE users + ( + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + appearance jsonb NOT NULL default '{ + "role": "dev", + "dashboard": { + "cpu": true, + "fps": false, + "avgCpu": true, + "avgFps": true, + "errors": true, + "crashes": true, + "overview": true, + "sessions": true, + "topMetrics": true, + "callsErrors": true, + "pageMetrics": true, + "performance": true, + "timeToRender": false, + "userActivity": false, + "avgFirstPaint": false, + "countSessions": true, + "errorsPerType": true, + "slowestImages": true, + "speedLocation": true, + "slowestDomains": true, + "avgPageLoadTime": true, + "avgTillFirstBit": false, + "avgTimeToRender": true, + "avgVisitedPages": false, + "avgImageLoadTime": true, + "busiestTimeOfDay": true, + "errorsPerDomains": true, + "missingResources": true, + "resourcesByParty": true, + "sessionsFeedback": false, + "slowestResources": true, + "avgUsedJsHeapSize": true, + "domainsErrors_4xx": true, + "domainsErrors_5xx": true, + "memoryConsumption": true, + "pagesDomBuildtime": false, + "pagesResponseTime": true, + "avgRequestLoadTime": true, + "avgSessionDuration": false, + "sessionsPerBrowser": false, + "applicationActivity": true, + "sessionsFrustration": false, + "avgPagesDomBuildtime": true, + "avgPagesResponseTime": false, + "avgTimeToInteractive": true, + "resourcesCountByType": true, + "resourcesLoadingTime": true, + "avgDomContentLoadStart": true, + "avgFirstContentfulPixel": false, + "resourceTypeVsResponseEnd": true, + "impactedSessionsByJsErrors": true, + "impactedSessionsBySlowPages": true, + "resourcesVsVisuallyComplete": true, + "pagesResponseTimeDistribution": true + }, + "sessionsLive": false, + "sessionsDevtools": true + }'::jsonb, + api_key text UNIQUE default generate_api_key(20) not null, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT '{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE, + origin user_origin NULL DEFAULT NULL + ); + + + CREATE TABLE basic_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + password text DEFAULT NULL, + generated_password boolean NOT NULL DEFAULT false, + invitation_token text NULL DEFAULT NULL, + invited_at timestamp without time zone NULL DEFAULT NULL, + change_pwd_token text NULL DEFAULT NULL, + change_pwd_expire_at timestamp without time zone NULL DEFAULT NULL, + changed_at timestamp, + UNIQUE (user_id) + ); + + + CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); + CREATE TABLE oauth_authentication + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + provider oauth_provider NOT NULL, + provider_user_id text NOT NULL, + token text NOT NULL, + UNIQUE (user_id, provider) + ); + + +-- --- projects.sql --- + + CREATE TABLE projects + ( + project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + active boolean NOT NULL, + sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + max_session_duration integer NOT NULL DEFAULT 7200000, + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL, + gdpr jsonb NOT NULL DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "plain" + }'::jsonb -- ?????? + ); + + CREATE INDEX ON public.projects (project_key); + +-- --- alerts.sql --- + + CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); + + CREATE TABLE alerts + ( + alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + description text NULL DEFAULT NULL, + active boolean NOT NULL DEFAULT TRUE, + detection_method alert_detection_method NOT NULL, + query jsonb NOT NULL, + deleted_at timestamp NULL DEFAULT NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{ + "renotifyInterval": 1440 + }'::jsonb + ); + + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON alerts + FOR EACH ROW + EXECUTE PROCEDURE notify_alert(); + + +-- --- webhooks.sql --- + + create type webhook_type as enum ('webhook', 'slack', 'email'); + + create table webhooks + ( + webhook_id integer generated by default as identity + constraint webhooks_pkey + primary key, + tenant_id integer not null + constraint webhooks_tenant_id_fkey + references tenants + on delete cascade, + endpoint text not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + auth_header text, + type webhook_type not null, + index integer default 0 not null, + name varchar(100) + ); + +-- --- notifications.sql --- + + + CREATE TABLE notifications + ( + notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer REFERENCES tenants (tenant_id) ON DELETE CASCADE, + user_id integer REFERENCES users (user_id) ON DELETE CASCADE, + title text NOT NULL, + description text NOT NULL, + button_text varchar(80) NULL, + button_url text NULL, + image_url text NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{}'::jsonb, + CONSTRAINT notification_tenant_xor_user CHECK ( tenant_id NOTNULL AND user_id ISNULL OR + tenant_id ISNULL AND user_id NOTNULL ) + ); + CREATE INDEX notifications_user_id_index ON public.notifications (user_id); + CREATE INDEX notifications_tenant_id_index ON public.notifications (tenant_id); + CREATE INDEX notifications_created_at_index ON public.notifications (created_at DESC); + CREATE INDEX notifications_created_at_epoch_idx ON public.notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC); + + CREATE TABLE user_viewed_notifications + ( + user_id integer NOT NULL REFERENCES users (user_id) on delete cascade, + notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade, + constraint user_viewed_notifications_pkey primary key (user_id, notification_id) + ); + +-- --- funnels.sql --- + + CREATE TABLE funnels + ( + funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + + CREATE INDEX ON public.funnels (user_id, is_public); + +-- --- announcements.sql --- + + create type announcement_type as enum ('notification', 'alert'); + + create table announcements + ( + announcement_id serial not null + constraint announcements_pk + primary key, + title text not null, + description text not null, + button_text varchar(30), + button_url text, + image_url text, + created_at timestamp default timezone('utc'::text, now()) not null, + type announcement_type default 'notification'::announcement_type not null + ); + +-- --- integrations.sql --- + + CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); + CREATE TABLE integrations + ( + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + provider integration_provider NOT NULL, + options jsonb NOT NULL, + request_data jsonb NOT NULL DEFAULT '{}'::jsonb, + PRIMARY KEY (project_id, provider) + ); + + CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON integrations + FOR EACH ROW + EXECUTE PROCEDURE notify_integration(); + + + create table jira_cloud + ( + user_id integer not null + constraint jira_cloud_pk + primary key + constraint jira_cloud_users_fkey + references users + on delete cascade, + username text not null, + token text not null, + url text + ); + + +-- --- issues.sql --- + + CREATE TYPE issue_type AS ENUM ( + 'click_rage', + 'dead_click', + 'excessive_scrolling', + 'bad_request', + 'missing_resource', + 'memory', + 'cpu', + 'slow_resource', + 'slow_page_load', + 'crash', + 'ml_cpu', + 'ml_memory', + 'ml_dead_click', + 'ml_click_rage', + 'ml_mouse_thrashing', + 'ml_excessive_scrolling', + 'ml_slow_resources', + 'custom', + 'js_exception' + ); + + CREATE TABLE issues + ( + issue_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + type issue_type NOT NULL, + context_string text NOT NULL, + context jsonb DEFAULT NULL + ); + CREATE INDEX ON issues (issue_id, type); + CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); + +-- --- errors.sql --- + + CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); + CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); + CREATE TABLE errors + ( + error_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + source error_source NOT NULL, + name text DEFAULT NULL, + message text NOT NULL, + payload jsonb NOT NULL, + status error_status NOT NULL DEFAULT 'unresolved', + parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, + stacktrace jsonb, --to save the stacktrace and not query S3 another time + stacktrace_parsed_at timestamp + ); + CREATE INDEX errors_error_id_idx ON errors (error_id); + CREATE INDEX ON errors (project_id, source); + CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); + CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); + CREATE INDEX errors_project_id_idx ON public.errors (project_id); + CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); + + CREATE TABLE user_favorite_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + + CREATE TABLE user_viewed_errors + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) + ); + CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); + CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + + +-- --- sessions.sql --- + CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); + CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); + CREATE TYPE platform AS ENUM ('web','ios','android'); + + CREATE TABLE sessions + ( + session_id bigint PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + tracker_version text NOT NULL, + start_ts bigint NOT NULL, + duration integer NULL, + rev_id text DEFAULT NULL, + platform platform NOT NULL DEFAULT 'web', + is_snippet boolean NOT NULL DEFAULT FALSE, + user_id text DEFAULT NULL, + user_anonymous_id text DEFAULT NULL, + user_uuid uuid NOT NULL, + user_agent text DEFAULT NULL, + user_os text NOT NULL, + user_os_version text DEFAULT NULL, + user_browser text DEFAULT NULL, + user_browser_version text DEFAULT NULL, + user_device text NOT NULL, + user_device_type device_type NOT NULL, + user_device_memory_size integer DEFAULT NULL, + user_device_heap_size bigint DEFAULT NULL, + user_country country NOT NULL, + pages_count integer NOT NULL DEFAULT 0, + events_count integer NOT NULL DEFAULT 0, + errors_count integer NOT NULL DEFAULT 0, + watchdogs_score bigint NOT NULL DEFAULT 0, + issue_score bigint NOT NULL DEFAULT 0, + issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[], + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL +-- , +-- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL + ); + CREATE INDEX ON sessions (project_id, start_ts); + CREATE INDEX ON sessions (project_id, user_id); + CREATE INDEX ON sessions (project_id, user_anonymous_id); + CREATE INDEX ON sessions (project_id, user_device); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX ON sessions (project_id, metadata_1); + CREATE INDEX ON sessions (project_id, metadata_2); + CREATE INDEX ON sessions (project_id, metadata_3); + CREATE INDEX ON sessions (project_id, metadata_4); + CREATE INDEX ON sessions (project_id, metadata_5); + CREATE INDEX ON sessions (project_id, metadata_6); + CREATE INDEX ON sessions (project_id, metadata_7); + CREATE INDEX ON sessions (project_id, metadata_8); + CREATE INDEX ON sessions (project_id, metadata_9); + CREATE INDEX ON sessions (project_id, metadata_10); +-- CREATE INDEX ON sessions (rehydration_id); + CREATE INDEX ON sessions (project_id, watchdogs_score DESC); + CREATE INDEX platform_idx ON public.sessions (platform); + + CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops); + CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops); + CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops); + CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops); + CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops); + CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops); + CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops); + CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops); + CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops); + CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops); + CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops); + CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops); + CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops); + CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops); + CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops); + CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_country); + CREATE INDEX ON sessions (project_id, user_browser); + CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL; + + + ALTER TABLE public.sessions + ADD CONSTRAINT web_browser_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_browser ISNULL)); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL); + + ALTER TABLE public.sessions + ADD CONSTRAINT web_user_agent_constraint CHECK ( + (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR + (sessions.platform != 'web' AND sessions.user_agent ISNULL)); + + + CREATE TABLE user_viewed_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + CREATE TABLE user_favorite_sessions + ( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, session_id) + ); + + +-- --- assignments.sql --- + + create table assigned_sessions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL + ); + CREATE INDEX ON assigned_sessions (session_id); + +-- --- events_common.sql --- + + CREATE SCHEMA IF NOT EXISTS events_common; + + CREATE TYPE events_common.custom_level AS ENUM ('info','error'); + + CREATE TABLE events_common.customs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + name text NOT NULL, + payload jsonb NOT NULL, + level events_common.custom_level NOT NULL DEFAULT 'info', + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.customs (name); + CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); + CREATE INDEX ON events_common.customs (timestamp); + + + CREATE TABLE events_common.issues + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, + payload jsonb DEFAULT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + + + CREATE TABLE events_common.requests + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + url text NOT NULL, + duration integer NOT NULL, + success boolean NOT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) + ); + CREATE INDEX ON events_common.requests (url); + CREATE INDEX ON events_common.requests (duration); + CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); + CREATE INDEX ON events_common.requests (timestamp); + CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE + WHEN url LIKE 'http://%' + THEN 7 + WHEN url LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + +-- --- events.sql --- + CREATE SCHEMA IF NOT EXISTS events; + + CREATE TABLE events.pages + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + host text NOT NULL, + path text NOT NULL, + base_path text NOT NULL, + referrer text DEFAULT NULL, + base_referrer text DEFAULT NULL, + dom_building_time integer DEFAULT NULL, + dom_content_loaded_time integer DEFAULT NULL, + load_time integer DEFAULT NULL, + first_paint_time integer DEFAULT NULL, + first_contentful_paint_time integer DEFAULT NULL, + speed_index integer DEFAULT NULL, + visually_complete integer DEFAULT NULL, + time_to_interactive integer DEFAULT NULL, + response_time bigint DEFAULT NULL, + response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.pages (session_id); + CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); + CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); + CREATE INDEX ON events.pages (timestamp); + CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); + CREATE INDEX pages_base_path_idx ON events.pages (base_path); + CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); + CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer); + CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, + length(base_referrer) - (CASE + WHEN base_referrer LIKE 'http://%' + THEN 7 + WHEN base_referrer LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + CREATE INDEX ON events.pages (response_time); + CREATE INDEX ON events.pages (response_end); + CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); + CREATE INDEX pages_path_idx ON events.pages (path); + CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; + CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; + CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; + CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); + CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); + + CREATE TABLE events.clicks + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.clicks (session_id); + CREATE INDEX ON events.clicks (label); + CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops); + CREATE INDEX ON events.clicks (timestamp); + CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp); + CREATE INDEX clicks_url_idx ON events.clicks (url); + CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops); + CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector); + CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp); + + + CREATE TABLE events.inputs + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.inputs (session_id); + CREATE INDEX ON events.inputs (label, value); + CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops); + CREATE INDEX inputs_label_idx ON events.inputs (label); + CREATE INDEX ON events.inputs (timestamp); + CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp); + + CREATE TABLE events.errors + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.errors (session_id); + CREATE INDEX ON events.errors (timestamp); + CREATE INDEX errors_error_id_idx ON events.errors (error_id); + + + CREATE TABLE events.graphql + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.graphql (name); + CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.graphql (timestamp); + + CREATE TABLE events.state_actions + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + name text NOT NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.state_actions (name); + CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); + CREATE INDEX ON events.state_actions (timestamp); + + CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); + CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); + CREATE TABLE events.resources + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + duration bigint NULL, + type events.resource_type NOT NULL, + url text NOT NULL, + url_host text NOT NULL, + url_hostpath text NOT NULL, + success boolean NOT NULL, + status smallint NULL, + method events.resource_method NULL, + ttfb bigint NULL, + header_size bigint NULL, + encoded_body_size integer NULL, + decoded_body_size integer NULL, + PRIMARY KEY (session_id, message_id) + ); + CREATE INDEX ON events.resources (session_id); + CREATE INDEX ON events.resources (timestamp); + CREATE INDEX ON events.resources (success); + CREATE INDEX ON events.resources (status); + CREATE INDEX ON events.resources (type); + CREATE INDEX ON events.resources (duration) WHERE duration > 0; + CREATE INDEX ON events.resources (url_host); + + CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); + CREATE INDEX resources_url_idx ON events.resources (url); + CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); + CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); + + + CREATE TABLE events.performance + ( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + message_id bigint NOT NULL, + min_fps smallint NOT NULL, + avg_fps smallint NOT NULL, + max_fps smallint NOT NULL, + min_cpu smallint NOT NULL, + avg_cpu smallint NOT NULL, + max_cpu smallint NOT NULL, + min_total_js_heap_size bigint NOT NULL, + avg_total_js_heap_size bigint NOT NULL, + max_total_js_heap_size bigint NOT NULL, + min_used_js_heap_size bigint NOT NULL, + avg_used_js_heap_size bigint NOT NULL, + max_used_js_heap_size bigint NOT NULL, + PRIMARY KEY (session_id, message_id) + ); -- --- autocomplete.sql --- -CREATE TABLE autocomplete -( - value text NOT NULL, - type text NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE -); + CREATE TABLE autocomplete + ( + value text NOT NULL, + type text NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE + ); -CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); -CREATE index autocomplete_project_id_idx ON autocomplete (project_id); -CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); -CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); + CREATE index autocomplete_project_id_idx ON autocomplete (project_id); + CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); + CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); -- --- jobs.sql --- -CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); -CREATE TYPE job_action AS ENUM ('delete_user_data'); -CREATE TABLE jobs -( - job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - description text NOT NULL, - status job_status NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - action job_action NOT NULL, - reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, - start_at timestamp NOT NULL, - errors text NULL -); -CREATE INDEX ON jobs (status); -CREATE INDEX ON jobs (start_at); + CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed'); + CREATE TYPE job_action AS ENUM ('delete_user_data'); + CREATE TABLE jobs + ( + job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + description text NOT NULL, + status job_status NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + action job_action NOT NULL, + reference_id text NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + updated_at timestamp default timezone('utc'::text, now()) NULL, + start_at timestamp NOT NULL, + errors text NULL + ); + CREATE INDEX ON jobs (status); + CREATE INDEX ON jobs (start_at); + raise notice 'DB created'; + END IF; + END; + +$$ +LANGUAGE plpgsql; COMMIT; From 6c4b1356a13748e162473270b195f5d73dffc082 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Oct 2021 19:27:56 +0200 Subject: [PATCH 090/218] feat(api): v1.4.0 patch --- api/app.py | 3 +- api/chalicelib/blueprints/subs/bp_insights.py | 163 --- api/chalicelib/core/insights.py | 932 --------------- ee/api/app.py | 3 +- .../chalicelib/blueprints/subs/bp_insights.py | 163 --- ee/api/chalicelib/core/insights.py | 1047 ----------------- .../{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} | 0 .../{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} | 0 8 files changed, 2 insertions(+), 2309 deletions(-) delete mode 100644 api/chalicelib/blueprints/subs/bp_insights.py delete mode 100644 api/chalicelib/core/insights.py delete mode 100644 ee/api/chalicelib/blueprints/subs/bp_insights.py delete mode 100644 ee/api/chalicelib/core/insights.py rename ee/scripts/helm/db/init_dbs/postgresql/{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} (100%) rename scripts/helm/db/init_dbs/postgresql/{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} (100%) diff --git a/api/app.py b/api/app.py index 92224f99e..e67810de5 100644 --- a/api/app.py +++ b/api/app.py @@ -7,7 +7,7 @@ from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons from chalicelib.blueprints.app import v1_api from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints.subs import bp_dashboard,bp_insights +from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ @@ -106,5 +106,4 @@ app.register_blueprint(bp_core_crons.app) app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) -app.register_blueprint(bp_insights.app) app.register_blueprint(v1_api.app) diff --git a/api/chalicelib/blueprints/subs/bp_insights.py b/api/chalicelib/blueprints/subs/bp_insights.py deleted file mode 100644 index 6546bfd12..000000000 --- a/api/chalicelib/blueprints/subs/bp_insights.py +++ /dev/null @@ -1,163 +0,0 @@ -from chalice import Blueprint -from chalicelib.utils import helper -from chalicelib import _overrides - -from chalicelib.core import dashboard, insights -from chalicelib.core import metadata - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/{projectId}/insights/journey', methods=['GET', 'POST']) -def get_insights_journey(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.journey(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_acquisition', methods=['GET', 'POST']) -def get_users_acquisition(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_acquisition(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) -def get_users_retention(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_retention(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) -def get_feature_rentention(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_retention(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) -def get_feature_acquisition(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_acquisition(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) -def get_feature_popularity_frequency(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_intensity', methods=['GET', 'POST']) -def get_feature_intensity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_intensity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption', methods=['GET', 'POST']) -def get_feature_adoption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption_top_users', methods=['GET', 'POST']) -def get_feature_adoption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption_top_users(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption_daily_usage', methods=['GET', 'POST']) -def get_feature_adoption_daily_usage(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption_daily_usage(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) -def get_users_active(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_active(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_power', methods=['GET', 'POST']) -def get_users_power(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_power(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_slipping', methods=['GET', 'POST']) -def get_users_slipping(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_slipping(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/search', methods=['GET']) -def get_insights_autocomplete(projectId, context): - params = app.current_request.query_params - if params is None or params.get('q') is None or len(params.get('q')) == 0: - return {"data": []} - # params['q'] = '^' + params['q'] - - return {'data': insights.search(params.get('q', ''), project_id=projectId, - platform=params.get('platform', None), feature_type=params.get("key"))} diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py deleted file mode 100644 index af4a48c09..000000000 --- a/api/chalicelib/core/insights.py +++ /dev/null @@ -1,932 +0,0 @@ -from chalicelib.core import sessions_metas -from chalicelib.utils import helper, dev -from chalicelib.utils import pg_client -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.metrics_helper import __get_step_size -import math -from chalicelib.core.dashboard import __get_constraints, __get_constraint_values - - -def __transform_journey(rows): - nodes = [] - links = [] - for r in rows: - source = r["source_event"][r["source_event"].index("_") + 1:] - target = r["target_event"][r["target_event"].index("_") + 1:] - if source not in nodes: - nodes.append(source) - if target not in nodes: - nodes.append(target) - links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) - return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} - - -JOURNEY_DEPTH = 5 -JOURNEY_TYPES = { - "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, - "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, - # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only - "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} -} - - -@dev.timed -def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_start = None - event_table = JOURNEY_TYPES["PAGES"]["table"] - event_column = JOURNEY_TYPES["PAGES"]["column"] - event_table_id = JOURNEY_TYPES["PAGES"]["table_id"] - extra_values = {} - for f in filters: - if f["type"] == "START_POINT": - event_start = f["value"] - elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT source_event, - target_event, - count(*) AS value - - FROM (SELECT event_number || '_' || value as target_event, - LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event - FROM (SELECT value, - session_rank, - message_id, - ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number - - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" - if event_start else ""} - - FROM (SELECT session_id, - message_id, - timestamp, - value, - SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank - FROM (SELECT *, - CASE - WHEN source_timestamp IS NULL THEN 1 - ELSE 0 END AS new_session - FROM (SELECT session_id, - {event_table_id} AS message_id, - timestamp, - {event_column} AS value, - LAG(timestamp) - OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp - FROM {event_table} INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_subset)} - ) AS related_events) AS ranked_events) AS processed - {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} - ) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE source_event IS NOT NULL - and target_event IS NOT NULL - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, - **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - - return __transform_journey(rows) - - -def __compute_weekly_percentage(rows): - if rows is None or len(rows) == 0: - return rows - t = -1 - for r in rows: - if r["week"] == 0: - t = r["usersCount"] - r["percentage"] = r["usersCount"] / t - return rows - - -def __complete_retention(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if i < len(rows) \ - and i != rows[i]["week"]: - rows.insert(i, neutral) - elif i >= len(rows): - rows.append(neutral) - return rows - - -def __complete_acquisition(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - week = 0 - delta_date = 0 - while max_week > 0: - start_date += TimeUTC.MS_WEEK - if end_date is not None and start_date >= end_date: - break - delta = 0 - if delta_date + week >= len(rows) \ - or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - rows.insert(delta_date + week + i, neutral) - delta = i - else: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if delta_date + week + i < len(rows) \ - and i != rows[delta_date + week + i]["week"]: - rows.insert(delta_date + week + i, neutral) - elif delta_date + week + i >= len(rows): - rows.append(neutral) - delta = i - week += delta - max_week -= 1 - delta_date += 1 - return rows - - -@dev.timed -def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts < %(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - LIMIT 1)) - ) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <=sessions.start_ts - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id - ) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <= sessions.start_ts - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week - FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS raw_users_list) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING(session_id) - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: - return [] - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL - GROUP BY value - ORDER BY count DESC - LIMIT 7;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - popularity = cur.fetchall() - pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY value;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - frequencies = cur.fetchall() - total_usage = sum([f["count"] for f in frequencies]) - frequencies = {f["value"]: f["count"] for f in frequencies} - for p in popularity: - p["popularity"] = p.pop("count") / all_user_count - p["frequency"] = frequencies[p["value"]] / total_usage - - return popularity - - -@dev.timed -def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: - return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": event_value}], } - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - adoption = cur.fetchone()["count"] / all_user_count - return {"target": all_user_count, "adoption": adoption, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1 - ORDER BY 2 DESC - LIMIT 10;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return {"users": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND length({event_column})>2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(session_id), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT session_id - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""" - params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return {"chart": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - pg_sub_query.append(f"length({event_column})>2") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY value - ORDER BY avg DESC - LIMIT 7;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # TODO: solve full scan issue - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - - return rows - - -@dev.timed -def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) - - pg_sub_query_chart.append("user_id IS NOT NULL") - period = "DAY" - extra_values = {} - for f in filters: - if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: - period = f["value"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart - FROM (SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(users), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT user_id - FROM public.sessions - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp) AS chart;""" - params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, - "project_id": project_id, - "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( - startTimestamp), - "endTimestamp": endTimestamp, **__get_constraint_values(args), - **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() - - return row_users - - -@dev.timed -def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) - pg_sub_query.append("user_id IS NOT NULL") - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition - FROM (SELECT number_of_days, COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days) AS day_users_partition;""" - params = {"project_id": project_id, - "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() - - return helper.dict_to_camel_case(row_users) - - -@dev.timed -def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen - FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY user_id) AS user_last_usage - INNER JOIN sessions USING (user_id) - WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 - GROUP BY user_id, last_time,interactions_count;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "list": helper.list_to_camel_case(rows) - } - - -@dev.timed -def search(text, feature_type, project_id, platform=None): - if not feature_type: - resource_type = "ALL" - data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) - return data - - pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True, - data={} if platform is None else {"platform": platform}) - - params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, - "endTimestamp": TimeUTC.now(), - "project_id": project_id, - "value": helper.string_to_sql_like(text.lower()), - "platform_0": platform} - if feature_type == "ALL": - with pg_client.PostgresClient() as cur: - sub_queries = [] - for e in JOURNEY_TYPES: - sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s - LIMIT 10)""") - pg_query = "UNION ALL".join(sub_queries) - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - elif JOURNEY_TYPES.get(feature_type) is not None: - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s - LIMIT 10;""" - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - else: - return [] - return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/api/app.py b/ee/api/app.py index 1cde5efe4..cc901c6f8 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -7,7 +7,7 @@ from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons from chalicelib.blueprints.app import v1_api from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints.subs import bp_dashboard,bp_insights +from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ @@ -122,7 +122,6 @@ app.register_blueprint(bp_core_crons.app) app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) -app.register_blueprint(bp_insights.app) app.register_blueprint(v1_api.app) # Enterprise app.register_blueprint(bp_ee.app) diff --git a/ee/api/chalicelib/blueprints/subs/bp_insights.py b/ee/api/chalicelib/blueprints/subs/bp_insights.py deleted file mode 100644 index 6546bfd12..000000000 --- a/ee/api/chalicelib/blueprints/subs/bp_insights.py +++ /dev/null @@ -1,163 +0,0 @@ -from chalice import Blueprint -from chalicelib.utils import helper -from chalicelib import _overrides - -from chalicelib.core import dashboard, insights -from chalicelib.core import metadata - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/{projectId}/insights/journey', methods=['GET', 'POST']) -def get_insights_journey(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.journey(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_acquisition', methods=['GET', 'POST']) -def get_users_acquisition(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_acquisition(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST']) -def get_users_retention(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_retention(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_retention', methods=['GET', 'POST']) -def get_feature_rentention(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_retention(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_acquisition', methods=['GET', 'POST']) -def get_feature_acquisition(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_acquisition(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_popularity_frequency', methods=['GET', 'POST']) -def get_feature_popularity_frequency(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_popularity_frequency(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_intensity', methods=['GET', 'POST']) -def get_feature_intensity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_intensity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption', methods=['GET', 'POST']) -def get_feature_adoption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption_top_users', methods=['GET', 'POST']) -def get_feature_adoption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption_top_users(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/feature_adoption_daily_usage', methods=['GET', 'POST']) -def get_feature_adoption_daily_usage(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.feature_adoption_daily_usage(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_active', methods=['GET', 'POST']) -def get_users_active(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_active(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_power', methods=['GET', 'POST']) -def get_users_power(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_power(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/users_slipping', methods=['GET', 'POST']) -def get_users_slipping(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": insights.users_slipping(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/insights/search', methods=['GET']) -def get_insights_autocomplete(projectId, context): - params = app.current_request.query_params - if params is None or params.get('q') is None or len(params.get('q')) == 0: - return {"data": []} - # params['q'] = '^' + params['q'] - - return {'data': insights.search(params.get('q', ''), project_id=projectId, - platform=params.get('platform', None), feature_type=params.get("key"))} diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py deleted file mode 100644 index 2a8febe5d..000000000 --- a/ee/api/chalicelib/core/insights.py +++ /dev/null @@ -1,1047 +0,0 @@ -from chalicelib.core import sessions_metas -from chalicelib.utils import helper, dev -from chalicelib.utils import ch_client -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps -from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint - - -def __transform_journey(rows): - nodes = [] - links = [] - for r in rows: - source = r["source_event"][r["source_event"].index("_") + 1:] - target = r["target_event"][r["target_event"].index("_") + 1:] - if source not in nodes: - nodes.append(source) - if target not in nodes: - nodes.append(target) - links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) - return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} - - -JOURNEY_DEPTH = 5 -JOURNEY_TYPES = { - "PAGES": {"table": "pages", "column": "url_path"}, - "CLICK": {"table": "clicks", "column": "label"}, - # "VIEW": {"table": "events_ios.views", "column": "name"}, TODO: enable this for SAAS only - "EVENT": {"table": "customs", "column": "name"} -} - - -@dev.timed -def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): - event_start = None - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - meta_condition = [] - for f in filters: - if f["type"] == "START_POINT": - event_start = f["value"] - elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append(f"sessions_metadata.project_id = %(project_id)s") - meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") - meta_condition.append(f"sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000)") - extra_values["user_id"] = f["value"] - ch_sub_query = __get_basic_constraints(table_name=event_table, data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT source_event, - target_event, - count(*) AS value - FROM (SELECT toString(event_number) || '_' || value AS target_event, - lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event - FROM (SELECT session_rank, - datetime, - value, - row_number AS event_number - FROM (SELECT session_rank, - groupArray(datetime) AS arr_datetime, - groupArray(value) AS arr_value, - arrayEnumerate(arr_datetime) AS row_number - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY datetime ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN datetime ELSE NULL END as mark" if event_start else ""} - FROM (SELECT session_id, - datetime, - value, - SUM(new_session) OVER (ORDER BY session_id, datetime) AS session_rank - FROM (SELECT *, - if(equals(source_timestamp, '1970-01-01'), 1, 0) AS new_session - FROM (SELECT session_id, - datetime, - {event_column} AS value, - lagInFrame(datetime) OVER (PARTITION BY session_id ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_timestamp - FROM {event_table} {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - ORDER BY session_id, datetime) AS related_events) AS ranked_events - ORDER BY session_rank, datetime - ) AS processed - {") AS marked) AS maxed WHERE datetime >= max) AS filtered" if event_start else ""} - GROUP BY session_rank - ORDER BY session_rank) - ARRAY JOIN - arr_datetime AS datetime, - arr_value AS value, - row_number - ORDER BY session_rank ASC, - row_number ASC) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE not empty(source_event) - AND not empty(target_event) - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, - **__get_constraint_values(args), **extra_values} - - rows = ch.execute(query=ch_query, params=params) - # print(ch_query % params) - return __transform_journey(rows) - - -def __compute_weekly_percentage(rows): - if rows is None or len(rows) == 0: - return rows - t = -1 - for r in rows: - if r["week"] == 0: - t = r["usersCount"] - r["percentage"] = r["usersCount"] / t - return rows - - -def __complete_retention(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if i < len(rows) \ - and i != rows[i]["week"]: - rows.insert(i, neutral) - elif i >= len(rows): - rows.append(neutral) - return rows - - -def __complete_acquisition(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - week = 0 - delta_date = 0 - while max_week > 0: - start_date += TimeUTC.MS_WEEK - if end_date is not None and start_date >= end_date: - break - delta = 0 - if delta_date + week >= len(rows) \ - or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - rows.insert(delta_date + week + i, neutral) - delta = i - else: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if delta_date + week + i < len(rows) \ - and i != rows[delta_date + week + i]["week"]: - rows.insert(delta_date + week + i, neutral) - elif delta_date + week + i >= len(rows): - rows.append(neutral) - delta = i - week += delta - max_week -= 1 - delta_date += 1 - return rows - - -@dev.timed -def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - COUNT(all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - ) AS all_connexions USING (user_id) - GROUP BY connexion_week - ORDER BY connexion_week;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, - week, - users_count, - connected_users - FROM ( - SELECT first_connexion_week, - toInt8((connexion_week - first_connexion_week) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(20)(all_connexions.user_id) AS connected_users - FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - GROUP BY user_id) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - WHERE first_connexion_week <= connexion_week - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week - ) AS full_data;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query += meta_condition - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - AND bsess.{event_column}=%(value)s - LIMIT 1)) - ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - GROUP BY connexion_week - ORDER BY connexion_week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - ch_sub_query += meta_condition - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, - week, - users_count, - connected_users - FROM ( - SELECT first_connexion_week, - toInt8((connexion_week - first_connexion_week) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - INNER JOIN {event_table} AS bsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - AND bsess.{event_column} = %(value)s - LIMIT 1)) - GROUP BY user_id) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - WHERE first_connexion_week <= connexion_week - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week - ) AS full_data;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - - with ch_client.ClickHouseClient() as ch: - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query += meta_condition - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(meta_condition)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - all_user_count = ch.execute(ch_query, params) - if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: - return [] - all_user_count = all_user_count[0]["count"] - ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND length({event_column})>2 - GROUP BY value - ORDER BY count DESC - LIMIT 7;""" - - # print(ch_query % params) - # print("---------------------") - popularity = ch.execute(ch_query, params) - params["values"] = [p["value"] for p in popularity] - if len(params["values"]) == 0: - return [] - ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND {event_column} IN %(values)s - GROUP BY value;""" - - # print(ch_query % params) - # print("---------------------") - frequencies = ch.execute(ch_query, params) - total_usage = sum([f["count"] for f in frequencies]) - frequencies = {f["value"]: f["count"] for f in frequencies} - for p in popularity: - p["popularity"] = p.pop("count") / all_user_count - p["frequency"] = frequencies[p["value"]] / total_usage - - return popularity - - -@dev.timed -def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - # else: - # print(f"no {event_table} most used value") - # return {"target": 0, "adoption": 0, - # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} - - extra_values["value"] = event_value - - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - ch_sub_query += meta_condition - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(meta_condition)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - all_user_count = ch.execute(ch_query, params) - if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: - return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": event_value}], } - all_user_count = all_user_count[0]["count"] - - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - adoption = ch.execute(ch_query, params) - adoption = adoption[0]["count"] / all_user_count - return {"target": all_user_count, "adoption": adoption, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return {"users": [], - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} - - extra_values["value"] = event_value - if len(meta_condition) == 0: - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id - ORDER BY count DESC - LIMIT 10;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - return {"users": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count - FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY day - ORDER BY day) AS raw_results;""" - params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, - density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, - neutral={"count": 0}), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY avg DESC - LIMIT 7;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - - return rows - - -PERIOD_TO_FUNCTION = { - "DAY": "toStartOfDay", - "WEEK": "toStartOfWeek" -} - - -@dev.timed -def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - meta_condition = __get_meta_constraint(args) - period = "DAY" - extra_values = {} - for f in filters: - if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: - period = f["value"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - period_function = PERIOD_TO_FUNCTION[period] - ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg - FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY period) AS daily_users;""" - params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, - "project_id": project_id, - "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( - startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), - **extra_values} - # print(ch_query % params) - # print("---------------------") - avg = ch.execute(ch_query, params) - if len(avg) == 0 or avg[0]["avg"] == 0: - return {"avg": 0, "chart": []} - avg = avg[0]["avg"] - # TODO: optimize this when DB structure changes, optimization from 3s to 1s - ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count - FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY period - ORDER BY period) AS raw_results;""" - # print(ch_query % params) - # print("---------------------") - rows = ch.execute(ch_query, params) - return {"avg": avg, "chart": rows} - - -@dev.timed -def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg - FROM(SELECT COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days) AS results;""" - params = {"project_id": project_id, - "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - # print("---------------------") - avg = ch.execute(ch_query, params) - if len(avg) == 0 or avg[0]["avg"] == 0: - return {"avg": 0, "partition": []} - avg = avg[0]["avg"] - ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days;""" - - # print(ch_query % params) - # print("---------------------") - rows = ch.execute(ch_query, params) - - return {"avg": avg, "partition": helper.list_to_camel_case(rows)} - - -@dev.timed -def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "list": [] - } - extra_values["value"] = event_value - if len(meta_condition) == 0: - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT user_id, - toUnixTimestamp(last_time)*1000 AS last_time, - interactions_count, - toUnixTimestamp(first_seen) * 1000 AS first_seen, - toUnixTimestamp(last_seen) * 1000 AS last_seen - FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen - FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) - WHERE now() - last_time > 7 - GROUP BY user_id, last_time, interactions_count - ORDER BY interactions_count DESC, last_time DESC - LIMIT 50) AS raw_results;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "list": helper.list_to_camel_case(rows) - } - - -@dev.timed -def search(text, feature_type, project_id, platform=None): - if not feature_type: - resource_type = "ALL" - data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) - return data - args = {} if platform is None else {"platform": platform} - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, - "endTimestamp": TimeUTC.now(), - "project_id": project_id, - "value": text.lower(), - "platform_0": platform} - if feature_type == "ALL": - with ch_client.ClickHouseClient() as ch: - sub_queries = [] - for e in JOURNEY_TYPES: - sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} AS feature - WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 - LIMIT 10)""") - ch_query = "UNION ALL".join(sub_queries) - print(ch_query % params) - rows = ch.execute(ch_query, params) - elif JOURNEY_TYPES.get(feature_type) is not None: - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature - WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 - LIMIT 10;""" - print(ch_query % params) - rows = ch.execute(ch_query, params) - else: - return [] - return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql similarity index 100% rename from ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql rename to ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql diff --git a/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql similarity index 100% rename from scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql rename to scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql From 108d0bf744c2e6bc8fa8420ce01dcd64d7b00378 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 28 Oct 2021 19:28:57 +0200 Subject: [PATCH 091/218] feat(api): v1.4.0 patch EE --- .../db/init_dbs/clickhouse/{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename ee/scripts/helm/db/init_dbs/clickhouse/{1.3.6/1.3.6.sql => 1.4.0/1.4.0.sql} (100%) diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.4.0/1.4.0.sql similarity index 100% rename from ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql rename to ee/scripts/helm/db/init_dbs/clickhouse/1.4.0/1.4.0.sql From 3c2110d5672de61b14f8e69aac4d0ff8b5eba903 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 29 Oct 2021 11:08:44 +0200 Subject: [PATCH 092/218] feat(api): sign URLs for mobile replay --- api/chalicelib/blueprints/bp_core.py | 11 ++- api/chalicelib/core/mobile.py | 11 +++ api/chalicelib/utils/s3urls.py | 120 +++++++++++++++++++++++++++ 3 files changed, 139 insertions(+), 3 deletions(-) create mode 100644 api/chalicelib/core/mobile.py create mode 100644 api/chalicelib/utils/s3urls.py diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py index e99d6e297..a73a05d07 100644 --- a/api/chalicelib/blueprints/bp_core.py +++ b/api/chalicelib/blueprints/bp_core.py @@ -1,5 +1,3 @@ -from chalicelib.utils.helper import environ - from chalice import Blueprint from chalice import Response @@ -11,9 +9,10 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig log_tool_stackdriver, reset_password, sessions_favorite_viewed, \ log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \ log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ - assist, heatmaps + assist, heatmaps, mobile from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import email_helper +from chalicelib.utils.helper import environ app = Blueprint(__name__) _overrides.chalice_app(app) @@ -902,3 +901,9 @@ def get_heatmaps_by_url(projectId, context): @app.route('/general_stats', methods=['GET'], authorizer=None) def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} + + +@app.route('/mobile/urls', methods=['POST']) +def mobile_signe(context): + data = app.current_request.json_body + return {"data": mobile.sign_urls(data["URL"])} diff --git a/api/chalicelib/core/mobile.py b/api/chalicelib/core/mobile.py new file mode 100644 index 000000000..f37e4e276 --- /dev/null +++ b/api/chalicelib/core/mobile.py @@ -0,0 +1,11 @@ +from chalicelib.utils import s3, s3urls + + +def sign_urls(urls): + result = [] + for u in urls: + e = s3urls.parse_url(u) + result.append(s3.get_presigned_url_for_sharing(bucket=e["bucket"], + key=e["key"], + expires_in=10 * 60)) + return result diff --git a/api/chalicelib/utils/s3urls.py b/api/chalicelib/utils/s3urls.py new file mode 100644 index 000000000..bc0b39bea --- /dev/null +++ b/api/chalicelib/utils/s3urls.py @@ -0,0 +1,120 @@ +import re +from urllib.parse import urlparse + + +def style(url): + """ Determine 'style' of a given S3 url + + >>> style("s3://my-bucket/my-key/") + 's3' + + >>> style("s3://user@my-bucket/my-key/") + 's3-credential' + + >>> style("https://my-bucket.s3.amazonaws.com/my-key/") + 'bucket-in-netloc' + + >>> style("https://s3.amazonaws.com/my-bucket/my-key/") + 'bucket-in-path' + """ + o = urlparse(url) + if o.scheme == 's3': + if '@' in o.netloc: + return 's3-credential' + else: + return 's3' + + if re.search(r'^s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): + return 'bucket-in-path' + + if re.search(r'\.s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): + return 'bucket-in-netloc' + + raise ValueError(f'Unknown url style: {url}') + + +def build_url(url_type, bucket, key=None, region=None, credential_name=None): + """ Construct an S3 URL + + Args: + url_type: one of 's3', 's3-credential', 'bucket-in-path', 'bucket-in-netloc' + bucket: S3 bucket name + key: Key within bucket (optional) + region: S3 region name (optional) + credential_name: user/credential name to use in S3 scheme url (optional) + + Returns + (string) S3 URL + """ + if url_type == 's3': + credential = f'{credential_name}@' if credential_name else "" + return f's3://{credential}{bucket}/{key or ""}' + + if url_type == 'bucket-in-path': + return f'https://s3{"-" if region else ""}{region or ""}.amazonaws.com/{bucket}/{key}' + + if url_type == 'bucket-in-netloc': + return f'https://{bucket}.s3.amazonaws.com/{key}' + + raise ValueError(f'Invalid url_type: {url_type}') + + +def parse_s3_credential_url(url): + """ Parse S3 scheme url containing a user/credential name + + >>> parse_s3_url("s3://user@my-bucket/my-key") + {'bucket': 'my-bucket', 'key': 'my-key/', 'credential_name': 'user'} + """ + o = urlparse(url) + cred_name, bucket = o.netloc.split('@') + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key, 'credential_name': cred_name} + + +def parse_s3_url(url): + """ Parse S3 scheme url + + >>> parse_s3_url("s3://my-bucket/my-key") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + o = urlparse(url) + bucket = o.netloc + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key} + + +def parse_bucket_in_path_url(url): + """ Parse url with bucket name path + + >>> parse_bucket_in_path_url("https://s3-eu-west-1.amazonaws.com/my-bucket/my-key/") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + path = urlparse(url).path + bucket = path.split('/')[1] + key = '/'.join(path.split('/')[2:]) + return {'bucket': bucket, 'key': key} + + +def parse_bucket_in_netloc_url(url): + """ Parse url with bucket name in host/netloc + + >>> parse_bucket_in_netloc_url("https://my-bucket.s3.amazonaws.com/my-key/") + {'bucket': 'my-bucket', 'key': 'my-key/'} + """ + o = urlparse(url) + bucket = o.netloc.split('.')[0] + key = o.path if o.path[0] != '/' else o.path[1:] + return {'bucket': bucket, 'key': key} + + +def parse_url(url): + url_style = style(url) + + if url_style == 's3-credential': + return parse_s3_credential_url(url) + if url_style == 's3': + return parse_s3_url(url) + if url_style == 'bucket-in-path': + return parse_bucket_in_path_url(url) + if url_style == 'bucket-in-netloc': + return parse_bucket_in_netloc_url(url) From 26d2131baeb12fb01748d5a5980ae9d265339186 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 9 Nov 2021 12:12:48 +0100 Subject: [PATCH 093/218] feat(utilities): unset downloaded file feat(utilities): unset generated consumer --- utilities/servers/sourcemaps-handler.js | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/utilities/servers/sourcemaps-handler.js b/utilities/servers/sourcemaps-handler.js index fca74758b..91917104d 100644 --- a/utilities/servers/sourcemaps-handler.js +++ b/utilities/servers/sourcemaps-handler.js @@ -46,7 +46,7 @@ module.exports.sourcemapReader = async event => { console.log(err); return reject(err); } - const sourcemap = data.Body.toString(); + let sourcemap = data.Body.toString(); return new sourceMap.SourceMapConsumer(sourcemap) .then(consumer => { @@ -91,10 +91,14 @@ module.exports.sourcemapReader = async event => { // console.log(result); results.push(result); } - + consumer = undefined; // Use this code if you don't use the http event with the LAMBDA-PROXY integration return resolve(results); - }); + }) + .finally(() => { + sourcemap = undefined; + }) + }); }); }; \ No newline at end of file From cd44f2735d845ab1066dca43440072a317d44605 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 9 Nov 2021 17:54:29 +0100 Subject: [PATCH 094/218] feat(api): changed mobile sign endpoint --- api/chalicelib/blueprints/bp_core.py | 6 +++--- api/chalicelib/core/mobile.py | 14 ++++++++------ 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py index a73a05d07..303bca306 100644 --- a/api/chalicelib/blueprints/bp_core.py +++ b/api/chalicelib/blueprints/bp_core.py @@ -903,7 +903,7 @@ def get_general_stats(): return {"data": {"sessions:": sessions.count_all()}} -@app.route('/mobile/urls', methods=['POST']) -def mobile_signe(context): +@app.route('/{projectId}/mobile/{sessionId}/urls', methods=['POST']) +def mobile_signe(projectId, sessionId, context): data = app.current_request.json_body - return {"data": mobile.sign_urls(data["URL"])} + return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data["keys"])} diff --git a/api/chalicelib/core/mobile.py b/api/chalicelib/core/mobile.py index f37e4e276..c13403584 100644 --- a/api/chalicelib/core/mobile.py +++ b/api/chalicelib/core/mobile.py @@ -1,11 +1,13 @@ -from chalicelib.utils import s3, s3urls +from chalicelib.core import projects +from chalicelib.utils import s3 +from chalicelib.utils.helper import environ -def sign_urls(urls): +def sign_keys(project_id, session_id, keys): result = [] - for u in urls: - e = s3urls.parse_url(u) - result.append(s3.get_presigned_url_for_sharing(bucket=e["bucket"], - key=e["key"], + project_key = projects.get_project_key(project_id) + for k in keys: + result.append(s3.get_presigned_url_for_sharing(bucket=environ["iosBucket"], + key=f"{project_key}/{session_id}/{k}", expires_in=10 * 60)) return result From 0c8f5e400a1ce0c3967cf1ea354a4f24d39e6c50 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 9 Nov 2021 17:56:54 +0100 Subject: [PATCH 095/218] feat(api): mobile bucket --- api/.chalice/config.json | 1 + ee/api/.chalice/config.json | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/api/.chalice/config.json b/api/.chalice/config.json index b6f821e3a..c9d74fea2 100644 --- a/api/.chalice/config.json +++ b/api/.chalice/config.json @@ -54,6 +54,7 @@ "S3_SECRET": "", "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", + "iosBucket": "openreplay-ios-images", "version_number": "1.2.0" }, "lambda_timeout": 150, diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 7705f0fbd..d4e130688 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -55,7 +55,6 @@ "S3_HOST": "", "S3_KEY": "", "S3_SECRET": "", - "version_number": "1.0.0", "LICENSE_KEY": "", "SAML2_MD_URL": "", "idp_entityId": "", @@ -63,7 +62,9 @@ "idp_x509cert": "", "idp_sls_url": "", "invitation_link": "/api/users/invitation?token=%s", - "change_password_link": "/reset-password?invitation=%s&&pass=%s" + "change_password_link": "/reset-password?invitation=%s&&pass=%s", + "iosBucket": "openreplay-ios-images", + "version_number": "1.4.0" }, "lambda_timeout": 150, "lambda_memory_size": 400, From d2b589bd017d9ced7ecfc6939316176b7b22dce5 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 9 Nov 2021 18:19:29 +0100 Subject: [PATCH 096/218] feat(backend-http): IOSBatchMeta + init s3 for image --- backend/pkg/messages/batch.go | 8 ++++++++ backend/pkg/messages/messages.go | 16 ++++++++++++++++ backend/pkg/messages/read_message.go | 7 +++++++ backend/services/http/main.go | 2 +- 4 files changed, 32 insertions(+), 1 deletion(-) diff --git a/backend/pkg/messages/batch.go b/backend/pkg/messages/batch.go index a70d96c98..fa40db7b2 100644 --- a/backend/pkg/messages/batch.go +++ b/backend/pkg/messages/batch.go @@ -30,6 +30,14 @@ func ReadBatch(b []byte, callback func(Message)) error { timestamp = m.Timestamp isBatchMeta = true // continue readLoop + case *IOSBatchMeta: + if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though + return errors.New("Batch Meta found at the end of the batch") + } + index = m.FirstIndex + timestamp = int64(m.Timestamp) + isBatchMeta = true + // continue readLoop case *Timestamp: timestamp = int64(m.Timestamp) // TODO(?): replace timestamp type to int64 everywhere (including encoding part in tracker) // No skipping here for making it easy to encode back the same sequence of message diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 2f1865884..0cf0874eb 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -1192,6 +1192,22 @@ p = WriteUint(msg.ID, buf, p) return buf[:p] } +type IOSBatchMeta struct { + *meta + Timestamp uint64 +Length uint64 +FirstIndex uint64 +} +func (msg *IOSBatchMeta) Encode() []byte{ + buf := make([]byte, 31 ) + buf[0] = 107 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) +p = WriteUint(msg.Length, buf, p) +p = WriteUint(msg.FirstIndex, buf, p) + return buf[:p] +} + type IOSSessionStart struct { *meta Timestamp uint64 diff --git a/backend/pkg/messages/read_message.go b/backend/pkg/messages/read_message.go index 5803e0051..89624b2a5 100644 --- a/backend/pkg/messages/read_message.go +++ b/backend/pkg/messages/read_message.go @@ -532,6 +532,13 @@ if msg.Selector, err = ReadString(reader); err != nil { return nil, err } if msg.ID, err = ReadUint(reader); err != nil { return nil, err } return msg, nil + case 107: + msg := &IOSBatchMeta{ meta: &meta{ TypeID: 107} } + if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err } +if msg.Length, err = ReadUint(reader); err != nil { return nil, err } +if msg.FirstIndex, err = ReadUint(reader); err != nil { return nil, err } + return msg, nil + case 90: msg := &IOSSessionStart{ meta: &meta{ TypeID: 90} } if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err } diff --git a/backend/services/http/main.go b/backend/services/http/main.go index cac5a2842..19d518999 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -52,7 +52,7 @@ func main() { rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN")) pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) defer pgconn.Close() - //s3 = storage.NewS3(env.String("S3_BUCKET_IMAGES_IOS"), env.String("AWS_REGION")) + s3 = storage.NewS3(env.String("S3_BUCKET_IOS_IMAGES"), env.String("AWS_REGION")) tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET")) uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE")) geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE")) From 9d9ba10af81c8cf6423fda5555f07d62752b07b9 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 9 Nov 2021 18:40:16 +0100 Subject: [PATCH 097/218] dev(backend): log ios s3 --- backend/services/http/handlers_ios.go | 1 + backend/services/http/main.go | 1 + 2 files changed, 2 insertions(+) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index fd30eb5c2..9cfa233a7 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -172,6 +172,7 @@ func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { continue // TODO: send server error or accumulate successful files } key := prefix + fileHeader.Filename + log.Printf("Uploading ios screen: %v", key) go s3.Upload(file, key, "image/png", false) } } diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 19d518999..ddd1e66c4 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -53,6 +53,7 @@ func main() { pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) defer pgconn.Close() s3 = storage.NewS3(env.String("S3_BUCKET_IOS_IMAGES"), env.String("AWS_REGION")) + log.Printf("Sr storage: %v, %v ",env.String("S3_BUCKET_IOS_IMAGES"), env.String("AWS_REGION")) tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET")) uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE")) geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE")) From 197082d049022ac1ca73b97061e89b14615c94a3 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 9 Nov 2021 19:05:44 +0100 Subject: [PATCH 098/218] dev(backend) --- backend/services/http/handlers_ios.go | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 9cfa233a7..81679e0ef 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -7,6 +7,7 @@ import ( "time" "math/rand" "strconv" + "log" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/token" From 2bae8a357d56c45375529939861c1d1ded109e30 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 9 Nov 2021 19:39:49 +0100 Subject: [PATCH 099/218] dev(backend): log --- backend/services/http/handlers_ios.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 81679e0ef..3d2bea213 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -168,13 +168,17 @@ func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { for _, fileHeaderList := range r.MultipartForm.File { for _, fileHeader := range fileHeaderList { - file, err := fileHeader.Open() + file, err := fileHeader.Open() //TODO: mime type from header if err != nil { continue // TODO: send server error or accumulate successful files } key := prefix + fileHeader.Filename log.Printf("Uploading ios screen: %v", key) - go s3.Upload(file, key, "image/png", false) + go func() { + if err := s3.Upload(file, key, "image/jpeg", false); err != nil { + log.Printf("Upload ios screen error. %v", err) + } + }() } } From aa6b88ea76423b78a5ca7a3bcb3478f4974222f8 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 13:25:06 +0100 Subject: [PATCH 100/218] fix(backend-http): correct storage init --- backend/services/http/main.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/services/http/main.go b/backend/services/http/main.go index ddd1e66c4..ccd755ed2 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -52,8 +52,7 @@ func main() { rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN")) pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) defer pgconn.Close() - s3 = storage.NewS3(env.String("S3_BUCKET_IOS_IMAGES"), env.String("AWS_REGION")) - log.Printf("Sr storage: %v, %v ",env.String("S3_BUCKET_IOS_IMAGES"), env.String("AWS_REGION")) + s3 = storage.NewS3(env.String("AWS_REGION"), env.String("S3_BUCKET_IOS_IMAGES")) tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET")) uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE")) geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE")) From bdcc8989f76a440459e8d5c77f034cec2e49af44 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 15:45:38 +0100 Subject: [PATCH 101/218] feat (github workflow): backend service rebuild on pkg dependency change --- .github/workflows/workers.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 37d87bfb6..3b1d21491 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -47,7 +47,15 @@ jobs: # # Getting the images to build # - git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt + + { + git diff --name-only | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 + + git diff --name-only | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do + grep -rl "pkg/$pkg_name" backend/services | cut -d '/' -f3 + done + } | uniq > backend/images_to_build.txt + [[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry From 3d142bcf67eec802ca10ae4ba509a9beb30c5e6d Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 15:49:07 +0100 Subject: [PATCH 102/218] fix (github-wf-backend): diff on last commit --- .github/workflows/workers.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 3b1d21491..49fd0948b 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -49,9 +49,9 @@ jobs: # { - git diff --name-only | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 + git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do + git diff --name-only HEAD HEAD~1 | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services | cut -d '/' -f3 done } | uniq > backend/images_to_build.txt From f3fad20e6ec6ea98e2f64498cae97f3872f84f46 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 15:52:07 +0100 Subject: [PATCH 103/218] feat(backend): IOSScreen message --- backend/pkg/messages/filters.go | 2 +- backend/pkg/messages/messages.go | 14 +++++++++++--- backend/pkg/messages/read_message.go | 6 +++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index 80525a2cd..c30419819 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -7,5 +7,5 @@ func IsReplayerType(id uint64) bool { } func IsIOSType(id uint64) bool { - return 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id + return 107 == id || 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id } diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 0cf0874eb..3d8bae7f6 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -1321,14 +1321,22 @@ p = WriteString(msg.Value, buf, p) type IOSScreenChanges struct { *meta Timestamp uint64 -SkipData []byte +Length uint64 +X uint64 +Y uint64 +Width uint64 +Height uint64 } func (msg *IOSScreenChanges) Encode() []byte{ - buf := make([]byte, 21 + len(msg.SkipData)) + buf := make([]byte, 61 ) buf[0] = 96 p := 1 p = WriteUint(msg.Timestamp, buf, p) -p = WriteData(msg.SkipData, buf, p) +p = WriteUint(msg.Length, buf, p) +p = WriteUint(msg.X, buf, p) +p = WriteUint(msg.Y, buf, p) +p = WriteUint(msg.Width, buf, p) +p = WriteUint(msg.Height, buf, p) return buf[:p] } diff --git a/backend/pkg/messages/read_message.go b/backend/pkg/messages/read_message.go index 89624b2a5..d0148bbc6 100644 --- a/backend/pkg/messages/read_message.go +++ b/backend/pkg/messages/read_message.go @@ -591,7 +591,11 @@ if msg.Value, err = ReadString(reader); err != nil { return nil, err } case 96: msg := &IOSScreenChanges{ meta: &meta{ TypeID: 96} } if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err } -if msg.SkipData, err = ReadData(reader); err != nil { return nil, err } +if msg.Length, err = ReadUint(reader); err != nil { return nil, err } +if msg.X, err = ReadUint(reader); err != nil { return nil, err } +if msg.Y, err = ReadUint(reader); err != nil { return nil, err } +if msg.Width, err = ReadUint(reader); err != nil { return nil, err } +if msg.Height, err = ReadUint(reader); err != nil { return nil, err } return msg, nil case 97: From 11c86f555d749965604af8bf4335f12fe8132ce9 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 17:04:45 +0100 Subject: [PATCH 104/218] feat(backend): topic redirections --- backend/Dockerfile | 4 +- backend/Dockerfile.bundle | 4 +- backend/services/assets/main.go | 4 +- backend/services/db/main.go | 2 +- backend/services/ender/main.go | 3 +- backend/services/http/assets.go | 2 +- backend/services/http/handlers.go | 255 +------------------------- backend/services/http/handlers_ios.go | 24 ++- backend/services/http/handlers_web.go | 249 +++++++++++++++++++++++++ backend/services/http/main.go | 23 ++- backend/services/integrations/main.go | 2 +- backend/services/sink/main.go | 13 +- 12 files changed, 299 insertions(+), 286 deletions(-) create mode 100644 backend/services/http/handlers_web.go diff --git a/backend/Dockerfile b/backend/Dockerfile index 8353b8f63..6ca305ca1 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -28,11 +28,11 @@ ENV TZ=UTC \ BEACON_SIZE_LIMIT=7000000 \ KAFKA_USE_SSL=true \ REDIS_STREAMS_MAX_LEN=3000 \ - TOPIC_RAW=raw \ + TOPIC_RAW_WEB=raw \ + TOPIC_RAW_IOS=raw-ios \ TOPIC_CACHE=cache \ TOPIC_ANALYTICS=analytics \ TOPIC_TRIGGER=trigger \ - TOPIC_EVENTS=events \ GROUP_SINK=sink \ GROUP_STORAGE=storage \ GROUP_DB=db \ diff --git a/backend/Dockerfile.bundle b/backend/Dockerfile.bundle index 904bb45f4..efbcb2684 100644 --- a/backend/Dockerfile.bundle +++ b/backend/Dockerfile.bundle @@ -29,11 +29,11 @@ ENV TZ=UTC \ BEACON_SIZE_LIMIT=1000000 \ KAFKA_USE_SSL=true \ REDIS_STREAMS_MAX_LEN=3000 \ - TOPIC_RAW=raw \ + TOPIC_RAW_WEB=raw \ + TOPIC_RAW_IOS=raw-ios \ TOPIC_CACHE=cache \ TOPIC_ANALYTICS=analytics \ TOPIC_TRIGGER=trigger \ - TOPIC_EVENTS=events \ GROUP_SINK=sink \ GROUP_STORAGE=storage \ GROUP_DB=db \ diff --git a/backend/services/assets/main.go b/backend/services/assets/main.go index 05c779bbb..34f4558b9 100644 --- a/backend/services/assets/main.go +++ b/backend/services/assets/main.go @@ -20,7 +20,7 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) GROUP_CACHE := env.String("GROUP_CACHE") - TOPIC_TRIGGER := env.String("TOPIC_TRIGGER") + TOPIC_CACHE := env.String("TOPIC_CACHE") cacher := cacher.NewCacher( env.String("AWS_REGION"), @@ -31,7 +31,7 @@ func main() { consumer := queue.NewMessageConsumer( GROUP_CACHE, - []string{ TOPIC_TRIGGER }, + []string{ TOPIC_CACHE }, func(sessionID uint64, message messages.Message, e *types.Meta) { switch msg := message.(type) { case *messages.AssetCache: diff --git a/backend/services/db/main.go b/backend/services/db/main.go index a2cef41b3..c0f48aaff 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -32,7 +32,7 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_DB"), []string{ - //env.String("TOPIC_RAW"), + env.String("TOPIC_IOS_RAW"), env.String("TOPIC_TRIGGER"), }, func(sessionID uint64, msg messages.Message, _ *types.Meta) { diff --git a/backend/services/ender/main.go b/backend/services/ender/main.go index e99c6866c..0fbd1010c 100644 --- a/backend/services/ender/main.go +++ b/backend/services/ender/main.go @@ -30,7 +30,8 @@ func main() { consumer := queue.NewMessageConsumer( GROUP_EVENTS, []string{ - env.String("TOPIC_RAW"), + env.String("TOPIC_RAW_WEB"), + env.String("TOPIC_RAW_IOS"), }, func(sessionID uint64, msg messages.Message, meta *types.Meta) { lastTs = meta.Timestamp diff --git a/backend/services/http/assets.go b/backend/services/http/assets.go index 69fb7f53d..cc055087a 100644 --- a/backend/services/http/assets.go +++ b/backend/services/http/assets.go @@ -7,7 +7,7 @@ import ( func sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) { if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable { - producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(&messages.AssetCache{ + producer.Produce(TOPIC_CACHE, sessionID, messages.Encode(&messages.AssetCache{ URL: fullURL, })) } diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index 2ac2852a2..e45e84e64 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -1,126 +1,17 @@ package main import ( - "encoding/json" - "errors" "io" "io/ioutil" "log" - "math/rand" "net/http" - "strconv" - "time" gzip "github.com/klauspost/pgzip" - - "openreplay/backend/pkg/db/postgres" - "openreplay/backend/pkg/token" - . "openreplay/backend/pkg/messages" ) const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb -func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { - type request struct { - Token string `json:"token"` - UserUUID *string `json:"userUUID"` - RevID string `json:"revID"` - Timestamp uint64 `json:"timestamp"` - TrackerVersion string `json:"trackerVersion"` - IsSnippet bool `json:"isSnippet"` - DeviceMemory uint64 `json:"deviceMemory"` - JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"` - ProjectKey *string `json:"projectKey"` - Reset bool `json:"reset"` - } - type response struct { - Timestamp int64 `json:"timestamp"` - Delay int64 `json:"delay"` - Token string `json:"token"` - UserUUID string `json:"userUUID"` - SessionID string `json:"sessionID"` - BeaconSizeLimit int64 `json:"beaconSizeLimit"` - } - - startTime := time.Now() - req := &request{} - body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error? - //defer body.Close() - if err := json.NewDecoder(body).Decode(req); err != nil { - responseWithError(w, http.StatusBadRequest, err) - return - } - - if req.ProjectKey == nil { - responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) - return - } - - p, err := pgconn.GetProjectByKey(*req.ProjectKey) - if err != nil { - if postgres.IsNoRowsErr(err) { - responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) - } else { - responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging - } - return - } - - userUUID := getUUID(req.UserUUID) - tokenData, err := tokenizer.Parse(req.Token) - if err != nil || req.Reset { // Starting the new one - dice := byte(rand.Intn(100)) // [0, 100) - if dice >= p.SampleRate { - responseWithError(w, http.StatusForbidden, errors.New("cancel")) - return - } - - ua := uaParser.ParseFromHTTPRequest(r) - if ua == nil { - responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) - return - } - sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) - if err != nil { - responseWithError(w, http.StatusInternalServerError, err) - return - } - // TODO: if EXPIRED => send message for two sessions association - expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) - tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} - - country := geoIP.ExtractISOCodeFromHTTPRequest(r) - producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&SessionStart{ - Timestamp: req.Timestamp, - ProjectID: uint64(p.ProjectID), - TrackerVersion: req.TrackerVersion, - RevID: req.RevID, - UserUUID: userUUID, - UserAgent: r.Header.Get("User-Agent"), - UserOS: ua.OS, - UserOSVersion: ua.OSVersion, - UserBrowser: ua.Browser, - UserBrowserVersion: ua.BrowserVersion, - UserDevice: ua.Device, - UserDeviceType: ua.DeviceType, - UserCountry: country, - UserDeviceMemorySize: req.DeviceMemory, - UserDeviceHeapSize: req.JsHeapSizeLimit, - })) - } - - //delayDuration := time.Now().Sub(startTime) - responseWithJSON(w, &response{ - //Timestamp: startTime.UnixNano() / 1e6, - //Delay: delayDuration.Nanoseconds() / 1e6, - Token: tokenizer.Compose(*tokenData), - UserUUID: userUUID, - SessionID: strconv.FormatUint(tokenData.ID, 10), - BeaconSizeLimit: BEACON_SIZE_LIMIT, - }) -} - -func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { +func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) { body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) //defer body.Close() var reader io.ReadCloser @@ -145,148 +36,6 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging return } - producer.Produce(TOPIC_RAW, sessionID, buf) // What if not able to send? - w.WriteHeader(http.StatusOK) -} - -func pushMessagesHandler(w http.ResponseWriter, r *http.Request) { - sessionData, err := tokenizer.ParseFromHTTPRequest(r) - if err != nil { - responseWithError(w, http.StatusUnauthorized, err) - return - } - pushMessages(w, r, sessionData.ID) -} - -func pushMessagesSeparatelyHandler(w http.ResponseWriter, r *http.Request) { - sessionData, err := tokenizer.ParseFromHTTPRequest(r) - if err != nil { - responseWithError(w, http.StatusUnauthorized, err) - return - } - body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) - //defer body.Close() - buf, err := ioutil.ReadAll(body) - if err != nil { - responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging - return - } - //log.Printf("Sending batch...") - //startTime := time.Now() - - // analyticsMessages := make([]Message, 0, 200) - - rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message { - switch m := msg.(type) { - case *SetNodeAttributeURLBased: - if m.Name == "src" || m.Name == "href" { - msg = &SetNodeAttribute{ - ID: m.ID, - Name: m.Name, - Value: handleURL(sessionData.ID, m.BaseURL, m.Value), - } - } else if m.Name == "style" { - msg = &SetNodeAttribute{ - ID: m.ID, - Name: m.Name, - Value: handleCSS(sessionData.ID, m.BaseURL, m.Value), - } - } - case *SetCSSDataURLBased: - msg = &SetCSSData{ - ID: m.ID, - Data: handleCSS(sessionData.ID, m.BaseURL, m.Data), - } - case *CSSInsertRuleURLBased: - msg = &CSSInsertRule{ - ID: m.ID, - Index: m.Index, - Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule), - } - } - - // switch msg.(type) { - // case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair) - // *SetPageLocation, - // *PageLoadTiming, - // *PageRenderTiming, - // *PerformanceTrack, - // *SetInputTarget, - // *SetInputValue, - // *MouseClick, - // *RawErrorEvent, - // *JSException, - // *ResourceTiming, - // *RawCustomEvent, - // *CustomIssue, - // *Fetch, - // *StateAction, - // *GraphQL, - // *CreateElementNode, - // *CreateTextNode, - // *RemoveNode, - // *CreateDocument, - // *RemoveNodeAttribute, - // *MoveNode, - // *SetCSSData, - // *CSSInsertRule, - // *CSSDeleteRule: - // analyticsMessages = append(analyticsMessages, msg) - //} - - return msg - }) - if err != nil { - responseWithError(w, http.StatusForbidden, err) - return - } - producer.Produce(TOPIC_RAW, sessionData.ID, rewritenBuf) - //producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages)) - //duration := time.Now().Sub(startTime) - //log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf))) - w.WriteHeader(http.StatusOK) -} - -func notStartedHandler(w http.ResponseWriter, r *http.Request) { - type request struct { - ProjectKey *string `json:"projectKey"` - TrackerVersion string `json:"trackerVersion"` - DoNotTrack bool `json:"DoNotTrack"` - // RevID string `json:"revID"` - } - req := &request{} - body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) - defer body.Close() - if err := json.NewDecoder(body).Decode(req); err != nil { - responseWithError(w, http.StatusBadRequest, err) - return - } - if req.ProjectKey == nil { - responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) - return - } - ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway - if ua == nil { - responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) - return - } - country := geoIP.ExtractISOCodeFromHTTPRequest(r) - err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{ - ProjectKey: *req.ProjectKey, - TrackerVersion: req.TrackerVersion, - DoNotTrack: req.DoNotTrack, - Platform: "web", - UserAgent: r.Header.Get("User-Agent"), - UserOS: ua.OS, - UserOSVersion: ua.OSVersion, - UserBrowser: ua.Browser, - UserBrowserVersion: ua.BrowserVersion, - UserDevice: ua.Device, - UserDeviceType: ua.DeviceType, - UserCountry: country, - }) - if err != nil { - log.Printf("Unable to insert Unstarted Session: %v\n", err) - } + producer.Produce(topicName, sessionID, buf) // What if not able to send? w.WriteHeader(http.StatusOK) } diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 3d2bea213..3bc70e9d3 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -97,7 +97,7 @@ func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { country := geoIP.ExtractISOCodeFromHTTPRequest(r) // The difference with web is mostly here: - producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&IOSSessionStart{ + producer.Produce(TOPIC_RAW_IOS, tokenData.ID, Encode(&IOSSessionStart{ Timestamp: req.Timestamp, ProjectID: uint64(p.ProjectID), TrackerVersion: req.TrackerVersion, @@ -127,18 +127,29 @@ func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { } -func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) { +func pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { + responseWithError(w, http.StatusUnauthorized, err) + return + } + pushMessages(w, r, sessionData.ID, TOPIC_RAW_IOS) +} + + + +func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { sessionData, err := tokenizer.ParseFromHTTPRequest(r) if err != nil && err != token.EXPIRED { responseWithError(w, http.StatusUnauthorized, err) return } // Check timestamps here? - pushMessages(w, r, sessionData.ID) + pushMessages(w, r, sessionData.ID,TOPIC_RAW_IOS) } -func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { +func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { sessionData, err := tokenizer.ParseFromHTTPRequest(r) if err != nil { // Should accept expired token? responseWithError(w, http.StatusUnauthorized, err) @@ -168,13 +179,12 @@ func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) { for _, fileHeaderList := range r.MultipartForm.File { for _, fileHeader := range fileHeaderList { - file, err := fileHeader.Open() //TODO: mime type from header + file, err := fileHeader.Open() if err != nil { continue // TODO: send server error or accumulate successful files } key := prefix + fileHeader.Filename - log.Printf("Uploading ios screen: %v", key) - go func() { + go func() { //TODO: mime type from header if err := s3.Upload(file, key, "image/jpeg", false); err != nil { log.Printf("Upload ios screen error. %v", err) } diff --git a/backend/services/http/handlers_web.go b/backend/services/http/handlers_web.go new file mode 100644 index 000000000..5e144f1cc --- /dev/null +++ b/backend/services/http/handlers_web.go @@ -0,0 +1,249 @@ +package main + +import ( + "encoding/json" + "errors" + "io/ioutil" + "log" + "math/rand" + "net/http" + "strconv" + "time" + + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/token" + . "openreplay/backend/pkg/messages" +) + +func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { + type request struct { + Token string `json:"token"` + UserUUID *string `json:"userUUID"` + RevID string `json:"revID"` + Timestamp uint64 `json:"timestamp"` + TrackerVersion string `json:"trackerVersion"` + IsSnippet bool `json:"isSnippet"` + DeviceMemory uint64 `json:"deviceMemory"` + JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"` + ProjectKey *string `json:"projectKey"` + Reset bool `json:"reset"` + } + type response struct { + Timestamp int64 `json:"timestamp"` + Delay int64 `json:"delay"` + Token string `json:"token"` + UserUUID string `json:"userUUID"` + SessionID string `json:"sessionID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` + } + + startTime := time.Now() + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error? + //defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } + + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + + p, err := pgconn.GetProjectByKey(*req.ProjectKey) + if err != nil { + if postgres.IsNoRowsErr(err) { + responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or capture limit has been reached")) + } else { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + } + return + } + + userUUID := getUUID(req.UserUUID) + tokenData, err := tokenizer.Parse(req.Token) + if err != nil || req.Reset { // Starting the new one + dice := byte(rand.Intn(100)) // [0, 100) + if dice >= p.SampleRate { + responseWithError(w, http.StatusForbidden, errors.New("cancel")) + return + } + + ua := uaParser.ParseFromHTTPRequest(r) + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) + return + } + // TODO: if EXPIRED => send message for two sessions association + expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) + tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} + + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + producer.Produce(TOPIC_RAW_WEB, tokenData.ID, Encode(&SessionStart{ + Timestamp: req.Timestamp, + ProjectID: uint64(p.ProjectID), + TrackerVersion: req.TrackerVersion, + RevID: req.RevID, + UserUUID: userUUID, + UserAgent: r.Header.Get("User-Agent"), + UserOS: ua.OS, + UserOSVersion: ua.OSVersion, + UserBrowser: ua.Browser, + UserBrowserVersion: ua.BrowserVersion, + UserDevice: ua.Device, + UserDeviceType: ua.DeviceType, + UserCountry: country, + UserDeviceMemorySize: req.DeviceMemory, + UserDeviceHeapSize: req.JsHeapSizeLimit, + })) + } + + //delayDuration := time.Now().Sub(startTime) + responseWithJSON(w, &response{ + //Timestamp: startTime.UnixNano() / 1e6, + //Delay: delayDuration.Nanoseconds() / 1e6, + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), + BeaconSizeLimit: BEACON_SIZE_LIMIT, + }) +} + +func pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) { + sessionData, err := tokenizer.ParseFromHTTPRequest(r) + if err != nil { + responseWithError(w, http.StatusUnauthorized, err) + return + } + body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) + //defer body.Close() + buf, err := ioutil.ReadAll(body) + if err != nil { + responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + return + } + //log.Printf("Sending batch...") + //startTime := time.Now() + + // analyticsMessages := make([]Message, 0, 200) + + rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message { + switch m := msg.(type) { + case *SetNodeAttributeURLBased: + if m.Name == "src" || m.Name == "href" { + msg = &SetNodeAttribute{ + ID: m.ID, + Name: m.Name, + Value: handleURL(sessionData.ID, m.BaseURL, m.Value), + } + } else if m.Name == "style" { + msg = &SetNodeAttribute{ + ID: m.ID, + Name: m.Name, + Value: handleCSS(sessionData.ID, m.BaseURL, m.Value), + } + } + case *SetCSSDataURLBased: + msg = &SetCSSData{ + ID: m.ID, + Data: handleCSS(sessionData.ID, m.BaseURL, m.Data), + } + case *CSSInsertRuleURLBased: + msg = &CSSInsertRule{ + ID: m.ID, + Index: m.Index, + Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule), + } + } + + // switch msg.(type) { + // case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair) + // *SetPageLocation, + // *PageLoadTiming, + // *PageRenderTiming, + // *PerformanceTrack, + // *SetInputTarget, + // *SetInputValue, + // *MouseClick, + // *RawErrorEvent, + // *JSException, + // *ResourceTiming, + // *RawCustomEvent, + // *CustomIssue, + // *Fetch, + // *StateAction, + // *GraphQL, + // *CreateElementNode, + // *CreateTextNode, + // *RemoveNode, + // *CreateDocument, + // *RemoveNodeAttribute, + // *MoveNode, + // *SetCSSData, + // *CSSInsertRule, + // *CSSDeleteRule: + // analyticsMessages = append(analyticsMessages, msg) + //} + + return msg + }) + if err != nil { + responseWithError(w, http.StatusForbidden, err) + return + } + producer.Produce(TOPIC_RAW_WEB, sessionData.ID, rewritenBuf) + //producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages)) + //duration := time.Now().Sub(startTime) + //log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf))) + w.WriteHeader(http.StatusOK) +} + +func notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { + type request struct { + ProjectKey *string `json:"projectKey"` + TrackerVersion string `json:"trackerVersion"` + DoNotTrack bool `json:"DoNotTrack"` + // RevID string `json:"revID"` + } + req := &request{} + body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) + defer body.Close() + if err := json.NewDecoder(body).Decode(req); err != nil { + responseWithError(w, http.StatusBadRequest, err) + return + } + if req.ProjectKey == nil { + responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + return + } + ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway + if ua == nil { + responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + return + } + country := geoIP.ExtractISOCodeFromHTTPRequest(r) + err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{ + ProjectKey: *req.ProjectKey, + TrackerVersion: req.TrackerVersion, + DoNotTrack: req.DoNotTrack, + Platform: "web", + UserAgent: r.Header.Get("User-Agent"), + UserOS: ua.OS, + UserOSVersion: ua.OSVersion, + UserBrowser: ua.Browser, + UserBrowserVersion: ua.BrowserVersion, + UserDevice: ua.Device, + UserDeviceType: ua.DeviceType, + UserCountry: country, + }) + if err != nil { + log.Printf("Unable to insert Unstarted Session: %v\n", err) + } + w.WriteHeader(http.StatusOK) +} \ No newline at end of file diff --git a/backend/services/http/main.go b/backend/services/http/main.go index ccd755ed2..9d82139d5 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -34,10 +34,11 @@ var geoIP *geoip.GeoIP var tokenizer *token.Tokenizer var s3 *storage.S3 -var TOPIC_RAW string +var TOPIC_RAW_WEB string +var TOPIC_RAW_IOS string +var TOPIC_CACHE string var TOPIC_TRIGGER string -var TOPIC_ANALYTICS string -// var kafkaTopicEvents string +//var TOPIC_ANALYTICS string var CACHE_ASSESTS bool var BEACON_SIZE_LIMIT int64 @@ -46,9 +47,11 @@ func main() { producer = queue.NewProducer() defer producer.Close(15000) - TOPIC_RAW = env.String("TOPIC_RAW") + TOPIC_RAW_WEB = env.String("TOPIC_RAW_WEB") + TOPIC_RAW_IOS = env.String("TOPIC_RAW_IOS") + TOPIC_CACHE = env.String("TOPIC_CACHE") TOPIC_TRIGGER = env.String("TOPIC_TRIGGER") - TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS") + //TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS") rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN")) pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) defer pgconn.Close() @@ -85,7 +88,7 @@ func main() { case "/v1/web/not-started": switch r.Method { case http.MethodPost: - notStartedHandler(w, r) + notStartedHandlerWeb(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } @@ -99,7 +102,7 @@ func main() { case "/v1/web/i": switch r.Method { case http.MethodPost: - pushMessagesSeparatelyHandler(w, r) + pushMessagesHandlerWeb(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } @@ -113,21 +116,21 @@ func main() { case "/v1/ios/i": switch r.Method { case http.MethodPost: - pushMessagesHandler(w, r) + pushMessagesHandlerIOS(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } case "/v1/ios/late": switch r.Method { case http.MethodPost: - pushLateMessagesHandler(w, r) + pushLateMessagesHandlerIOS(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } case "/v1/ios/images": switch r.Method { case http.MethodPost: - iosImagesUploadHandler(w, r) + imagesUploadHandlerIOS(w, r) default: w.WriteHeader(http.StatusMethodNotAllowed) } diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go index e1ea58ebd..e535dd6cc 100644 --- a/backend/services/integrations/main.go +++ b/backend/services/integrations/main.go @@ -19,7 +19,7 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - TOPIC_RAW := env.String("TOPIC_RAW") + TOPIC_RAW := env.String("TOPIC_RAW_WEB") POSTGRES_STRING := env.String("POSTGRES_STRING") pg := postgres.NewConn(POSTGRES_STRING) diff --git a/backend/services/sink/main.go b/backend/services/sink/main.go index 4a6ac189d..8d3e5ab02 100644 --- a/backend/services/sink/main.go +++ b/backend/services/sink/main.go @@ -10,9 +10,9 @@ import ( "syscall" "openreplay/backend/pkg/env" - "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" + . "openreplay/backend/pkg/messages" ) @@ -27,16 +27,17 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_SINK"), []string{ - env.String("TOPIC_RAW"), + env.String("TOPIC_RAW_WEB"), + env.String("TOPIC_RAW_IOS") }, - func(sessionID uint64, message messages.Message, _ *types.Meta) { - //typeID, err := messages.GetMessageTypeID(value) + func(sessionID uint64, message Message, _ *types.Meta) { + //typeID, err := GetMessageTypeID(value) // if err != nil { // log.Printf("Message type decoding error: %v", err) // return // } typeID := message.Meta().TypeID - if !messages.IsReplayerType(typeID) { + if !IsReplayerType(typeID) { return } @@ -44,7 +45,7 @@ func main() { value := message.Encode() var data []byte - if messages.IsIOSType(typeID) { + if IsIOSType(typeID) { data = value } else { data = make([]byte, len(value)+8) From 08db513ef42c07b9e6923f50c964800914c23d81 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 17:27:43 +0100 Subject: [PATCH 105/218] fix(backend): typo fix+image upload limit --- backend/services/http/handlers_ios.go | 2 +- backend/services/integrations/main.go | 4 ++-- backend/services/sink/main.go | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index 3bc70e9d3..f047c02cf 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -14,7 +14,7 @@ import ( . "openreplay/backend/pkg/messages" ) -const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb +const FILES_SIZE_LIMIT int64 = 1e7 // 10Mb func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { type request struct { diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go index e535dd6cc..f664fe862 100644 --- a/backend/services/integrations/main.go +++ b/backend/services/integrations/main.go @@ -19,7 +19,7 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - TOPIC_RAW := env.String("TOPIC_RAW_WEB") + TOPIC_RAW_WEB := env.String("TOPIC_RAW_WEB") POSTGRES_STRING := env.String("POSTGRES_STRING") pg := postgres.NewConn(POSTGRES_STRING) @@ -80,7 +80,7 @@ func main() { sessionID = sessData.ID } // TODO: send to ready-events topic. Otherwise it have to go through the events worker. - producer.Produce(TOPIC_RAW, sessionID, messages.Encode(event.RawErrorEvent)) + producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent)) case err := <-manager.Errors: log.Printf("Integration error: %v\n", err) case i := <-manager.RequestDataUpdates: diff --git a/backend/services/sink/main.go b/backend/services/sink/main.go index 8d3e5ab02..b1bdf8d25 100644 --- a/backend/services/sink/main.go +++ b/backend/services/sink/main.go @@ -28,7 +28,7 @@ func main() { env.String("GROUP_SINK"), []string{ env.String("TOPIC_RAW_WEB"), - env.String("TOPIC_RAW_IOS") + env.String("TOPIC_RAW_IOS"), }, func(sessionID uint64, message Message, _ *types.Meta) { //typeID, err := GetMessageTypeID(value) From 9c6d895ceebac3aa4371fcc489f4a6b6247d3a68 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 17:30:43 +0100 Subject: [PATCH 106/218] dev(backend): service force update --- backend/services/assets/main.go | 1 - backend/services/db/main.go | 1 - backend/services/ender/main.go | 1 - 3 files changed, 3 deletions(-) diff --git a/backend/services/assets/main.go b/backend/services/assets/main.go index 34f4558b9..450dfc83c 100644 --- a/backend/services/assets/main.go +++ b/backend/services/assets/main.go @@ -15,7 +15,6 @@ import ( "openreplay/backend/services/assets/cacher" ) - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) diff --git a/backend/services/db/main.go b/backend/services/db/main.go index c0f48aaff..49bb91938 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -17,7 +17,6 @@ import ( "openreplay/backend/services/db/heuristics" ) - var pg *cache.PGCache func main() { diff --git a/backend/services/ender/main.go b/backend/services/ender/main.go index 0fbd1010c..9c62d14b0 100644 --- a/backend/services/ender/main.go +++ b/backend/services/ender/main.go @@ -16,7 +16,6 @@ import ( "openreplay/backend/services/ender/builder" ) - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) From 4551b7448ed04d2ca3e296d2802f611a22b1a725 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 18:17:40 +0100 Subject: [PATCH 107/218] fix(backend-http): topic name --- backend/services/db/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/services/db/main.go b/backend/services/db/main.go index 49bb91938..d16512054 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -31,7 +31,7 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_DB"), []string{ - env.String("TOPIC_IOS_RAW"), + env.String("TOPIC_RAW_IOS"), env.String("TOPIC_TRIGGER"), }, func(sessionID uint64, msg messages.Message, _ *types.Meta) { From 67e1c6559188e4c57e68d7d9dc9f8097027ad8fc Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 18:47:53 +0100 Subject: [PATCH 108/218] fix(backend-db): IOS device type --- backend/pkg/db/cache/messages_ios.go | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/pkg/db/cache/messages_ios.go b/backend/pkg/db/cache/messages_ios.go index f630de53d..0cced5472 100644 --- a/backend/pkg/db/cache/messages_ios.go +++ b/backend/pkg/db/cache/messages_ios.go @@ -22,6 +22,7 @@ func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) er UserOSVersion: s.UserOSVersion, UserDevice: s.UserDevice, UserCountry: s.UserCountry, + UserDeviceType: s.UserDeviceType, } if err := c.Conn.InsertSessionStart(sessionID, c.sessions[ sessionID ]); err != nil { c.sessions[ sessionID ] = nil From 924e300e64bdf5f2e05a02414e1d72acd4d2d4f9 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 19:23:28 +0100 Subject: [PATCH 109/218] feat(backend-db): detailed log --- backend/services/db/main.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/services/db/main.go b/backend/services/db/main.go index d16512054..a14aa7648 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -37,7 +37,7 @@ func main() { func(sessionID uint64, msg messages.Message, _ *types.Meta) { if err := insertMessage(sessionID, msg); err != nil { if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, Message %v", err, msg) + log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err,sessionID, msg) } return } @@ -45,13 +45,13 @@ func main() { session, err := pg.GetSession(sessionID) if err != nil { // Might happen due to the assets-related message TODO: log only if session is necessary for this kind of message - log.Printf("Error on session retrieving from cache: %v, Message %v, sessionID %v", err, msg, sessionID) + log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg) return; } err = insertStats(session, msg) if err != nil { - log.Printf("Stats Insertion Error %v; Session:%v, Message: %v", err, session, msg) + log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) } heurFinder.HandleMessage(session, msg) @@ -59,14 +59,14 @@ func main() { // TODO: DRY code (carefully with the return statement logic) if err := insertMessage(sessionID, msg); err != nil { if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, Message %v", err, msg) + log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) } return } err = insertStats(session, msg) if err != nil { - log.Printf("Stats Insertion Error %v", err) + log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) } }) }, From 055ca2918c34260accb4a35f102976cc4970c12e Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 10 Nov 2021 20:53:25 +0100 Subject: [PATCH 110/218] fix(backend-ender): getTimestamp hack for ios messages --- backend/pkg/messages/get_timestamp.go | 65 +++++++++++++++++++++++ backend/services/ender/builder/builder.go | 6 +-- 2 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 backend/pkg/messages/get_timestamp.go diff --git a/backend/pkg/messages/get_timestamp.go b/backend/pkg/messages/get_timestamp.go new file mode 100644 index 000000000..c8e42f756 --- /dev/null +++ b/backend/pkg/messages/get_timestamp.go @@ -0,0 +1,65 @@ +// Auto-generated, do not edit +package messages + + +func GetTimestamp(message Message) uint64 { + switch msg := message.(type) { + + case *IOSBatchMeta: + return msg.Timestamp + + case *IOSSessionStart: + return msg.Timestamp + + case *IOSSessionEnd: + return msg.Timestamp + + case *IOSMetadata: + return msg.Timestamp + + case *IOSCustomEvent: + return msg.Timestamp + + case *IOSUserID: + return msg.Timestamp + + case *IOSUserAnonymousID: + return msg.Timestamp + + case *IOSScreenChanges: + return msg.Timestamp + + case *IOSCrash: + return msg.Timestamp + + case *IOSScreenEnter: + return msg.Timestamp + + case *IOSScreenLeave: + return msg.Timestamp + + case *IOSClickEvent: + return msg.Timestamp + + case *IOSInputEvent: + return msg.Timestamp + + case *IOSPerformanceEvent: + return msg.Timestamp + + case *IOSLog: + return msg.Timestamp + + case *IOSInternalError: + return msg.Timestamp + + case *IOSNetworkCall: + return msg.Timestamp + + case *IOSIssueEvent: + return msg.Timestamp + + } + return uint64(message.Meta().Timestamp) +} + diff --git a/backend/services/ender/builder/builder.go b/backend/services/ender/builder/builder.go index f72737894..f17116501 100644 --- a/backend/services/ender/builder/builder.go +++ b/backend/services/ender/builder/builder.go @@ -108,11 +108,11 @@ func (b *builder) buildInputEvent() { } func (b *builder) handleMessage(message Message, messageID uint64) { - timestamp := uint64(message.Meta().Timestamp) - if b.timestamp <= timestamp { // unnecessary. TODO: test and remove + timestamp := GetTimestamp(message) + if b.timestamp <= timestamp { // unnecessary? TODO: test and remove b.timestamp = timestamp } - // Before the first timestamp. + // Might happen before the first timestamp. switch msg := message.(type) { case *SessionStart, *Metadata, From 64868f7396408ed44f3e82c4b811363aa1bc1f02 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 12 Nov 2021 10:06:58 +0100 Subject: [PATCH 111/218] feat(api): removed ios socket --- api/chalicelib/core/sessions.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 31d88ebf2..aa1ab3d58 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -79,10 +79,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) - data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id, - device=data["userDevice"], - os_version=data["userOsVersion"], - mob_url=data["mobsUrl"]) else: data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, group_clickrage=True) From 151c26d8cd33ab5417807fbe6660f93c6a0d80e1 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 10:22:06 +0100 Subject: [PATCH 112/218] feat(frontend): ios messages decoding --- .../app/player/MessageDistributor/messages.ts | 150 +++++++++++++++++- 1 file changed, 146 insertions(+), 4 deletions(-) diff --git a/frontend/app/player/MessageDistributor/messages.ts b/frontend/app/player/MessageDistributor/messages.ts index 6d9f3244c..26af00575 100644 --- a/frontend/app/player/MessageDistributor/messages.ts +++ b/frontend/app/player/MessageDistributor/messages.ts @@ -5,6 +5,7 @@ import PrimitiveReader from './PrimitiveReader'; export const ID_TP_MAP = { 0: "timestamp", + 2: "session_disconnect", 4: "set_page_location", 5: "set_viewport_size", 6: "set_viewport_scroll", @@ -38,6 +39,12 @@ export const ID_TP_MAP = { 59: "long_task", 69: "mouse_click", 70: "create_i_frame_document", + 90: "ios_session_start", + 93: "ios_custom_event", + 100: "ios_click_event", + 102: "ios_performance_event", + 103: "ios_log", + 105: "ios_network_call", } as const; @@ -46,6 +53,11 @@ export interface Timestamp { timestamp: number, } +export interface SessionDisconnect { + tp: "session_disconnect", + timestamp: number, +} + export interface SetPageLocation { tp: "set_page_location", url: string, @@ -271,12 +283,71 @@ export interface CreateIFrameDocument { id: number, } +export interface IosSessionStart { + tp: "ios_session_start", + timestamp: number, + projectID: number, + trackerVersion: string, + revID: string, + userUUID: string, + userOS: string, + userOSVersion: string, + userDevice: string, + userDeviceType: string, + userCountry: string, +} -export type Message = Timestamp | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument; +export interface IosCustomEvent { + tp: "ios_custom_event", + timestamp: number, + length: number, + name: string, + payload: string, +} + +export interface IosClickEvent { + tp: "ios_click_event", + timestamp: number, + length: number, + label: string, + x: number, + y: number, +} + +export interface IosPerformanceEvent { + tp: "ios_performance_event", + timestamp: number, + length: number, + name: string, + value: number, +} + +export interface IosLog { + tp: "ios_log", + timestamp: number, + length: number, + severity: string, + content: string, +} + +export interface IosNetworkCall { + tp: "ios_network_call", + timestamp: number, + length: number, + duration: number, + headers: string, + body: string, + url: string, + success: boolean, + method: string, + status: number, +} + + +export type Message = Timestamp | SessionDisconnect | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument | IosSessionStart | IosCustomEvent | IosClickEvent | IosPerformanceEvent | IosLog | IosNetworkCall; export default function (r: PrimitiveReader): Message | null { - const ui= r.readUint() - switch (ui) { + switch (r.readUint()) { case 0: return { @@ -284,6 +355,12 @@ export default function (r: PrimitiveReader): Message | null { timestamp: r.readUint(), }; + case 2: + return { + tp: ID_TP_MAP[2], + timestamp: r.readUint(), + }; + case 4: return { tp: ID_TP_MAP[4], @@ -542,8 +619,73 @@ export default function (r: PrimitiveReader): Message | null { id: r.readUint(), }; + case 90: + return { + tp: ID_TP_MAP[90], + timestamp: r.readUint(), + projectID: r.readUint(), + trackerVersion: r.readString(), + revID: r.readString(), + userUUID: r.readString(), + userOS: r.readString(), + userOSVersion: r.readString(), + userDevice: r.readString(), + userDeviceType: r.readString(), + userCountry: r.readString(), + }; + + case 93: + return { + tp: ID_TP_MAP[93], + timestamp: r.readUint(), + length: r.readUint(), + name: r.readString(), + payload: r.readString(), + }; + + case 100: + return { + tp: ID_TP_MAP[100], + timestamp: r.readUint(), + length: r.readUint(), + label: r.readString(), + x: r.readUint(), + y: r.readUint(), + }; + + case 102: + return { + tp: ID_TP_MAP[102], + timestamp: r.readUint(), + length: r.readUint(), + name: r.readString(), + value: r.readUint(), + }; + + case 103: + return { + tp: ID_TP_MAP[103], + timestamp: r.readUint(), + length: r.readUint(), + severity: r.readString(), + content: r.readString(), + }; + + case 105: + return { + tp: ID_TP_MAP[105], + timestamp: r.readUint(), + length: r.readUint(), + duration: r.readUint(), + headers: r.readString(), + body: r.readString(), + url: r.readString(), + success: r.readBoolean(), + method: r.readString(), + status: r.readUint(), + }; + default: - console.log("wtf is this", ui) r.readUint(); // IOS skip timestamp r.skip(r.readUint()); return null; From 394ca8fbbc08049217abd3b9d5fec4edaa6ef79d Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 10:37:52 +0100 Subject: [PATCH 113/218] feat(frontend): allow ios_screen_change in replayer --- backend/pkg/messages/filters.go | 2 +- .../app/player/MessageDistributor/messages.ts | 24 ++++++++++++++++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index c30419819..f43f40142 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -3,7 +3,7 @@ package messages func IsReplayerType(id uint64) bool { - return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 100 == id || 102 == id || 103 == id || 105 == id + return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id } func IsIOSType(id uint64) bool { diff --git a/frontend/app/player/MessageDistributor/messages.ts b/frontend/app/player/MessageDistributor/messages.ts index 26af00575..42c0e7ff4 100644 --- a/frontend/app/player/MessageDistributor/messages.ts +++ b/frontend/app/player/MessageDistributor/messages.ts @@ -41,6 +41,7 @@ export const ID_TP_MAP = { 70: "create_i_frame_document", 90: "ios_session_start", 93: "ios_custom_event", + 96: "ios_screen_changes", 100: "ios_click_event", 102: "ios_performance_event", 103: "ios_log", @@ -305,6 +306,16 @@ export interface IosCustomEvent { payload: string, } +export interface IosScreenChanges { + tp: "ios_screen_changes", + timestamp: number, + length: number, + x: number, + y: number, + width: number, + height: number, +} + export interface IosClickEvent { tp: "ios_click_event", timestamp: number, @@ -344,7 +355,7 @@ export interface IosNetworkCall { } -export type Message = Timestamp | SessionDisconnect | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument | IosSessionStart | IosCustomEvent | IosClickEvent | IosPerformanceEvent | IosLog | IosNetworkCall; +export type Message = Timestamp | SessionDisconnect | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument | IosSessionStart | IosCustomEvent | IosScreenChanges | IosClickEvent | IosPerformanceEvent | IosLog | IosNetworkCall; export default function (r: PrimitiveReader): Message | null { switch (r.readUint()) { @@ -643,6 +654,17 @@ export default function (r: PrimitiveReader): Message | null { payload: r.readString(), }; + case 96: + return { + tp: ID_TP_MAP[96], + timestamp: r.readUint(), + length: r.readUint(), + x: r.readUint(), + y: r.readUint(), + width: r.readUint(), + height: r.readUint(), + }; + case 100: return { tp: ID_TP_MAP[100], From a5af6eb0dd983d6bef1548615521dff673b21eac Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 10:49:45 +0100 Subject: [PATCH 114/218] feat(frontend): error on wrong message type --- frontend/app/player/MessageDistributor/messages.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/player/MessageDistributor/messages.ts b/frontend/app/player/MessageDistributor/messages.ts index 42c0e7ff4..bb391b9f4 100644 --- a/frontend/app/player/MessageDistributor/messages.ts +++ b/frontend/app/player/MessageDistributor/messages.ts @@ -358,7 +358,8 @@ export interface IosNetworkCall { export type Message = Timestamp | SessionDisconnect | SetPageLocation | SetViewportSize | SetViewportScroll | CreateDocument | CreateElementNode | CreateTextNode | MoveNode | RemoveNode | SetNodeAttribute | RemoveNodeAttribute | SetNodeData | SetCssData | SetNodeScroll | SetInputValue | SetInputChecked | MouseMove | ConsoleLog | CssInsertRule | CssDeleteRule | Fetch | Profiler | OTable | Redux | Vuex | MobX | NgRx | GraphQl | PerformanceTrack | ConnectionInformation | SetPageVisibility | LongTask | MouseClick | CreateIFrameDocument | IosSessionStart | IosCustomEvent | IosScreenChanges | IosClickEvent | IosPerformanceEvent | IosLog | IosNetworkCall; export default function (r: PrimitiveReader): Message | null { - switch (r.readUint()) { + const tp = r.readUint() + switch (tp) { case 0: return { @@ -708,8 +709,7 @@ export default function (r: PrimitiveReader): Message | null { }; default: - r.readUint(); // IOS skip timestamp - r.skip(r.readUint()); + throw new Error(`Unrecognizable message type: ${ tp }`) return null; } } From 0b812683d715c48d59a8514b82973ee2bdeb3e14 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 11:17:11 +0100 Subject: [PATCH 115/218] fix(frontend): IOS Parser --- .../MessageDistributor/PrimitiveReader.ts | 4 +++ frontend/app/player/ios/Parser.ts | 29 +++++++++++++++++ frontend/app/player/ios/parser.js | 31 ------------------- 3 files changed, 33 insertions(+), 31 deletions(-) create mode 100644 frontend/app/player/ios/Parser.ts delete mode 100644 frontend/app/player/ios/parser.js diff --git a/frontend/app/player/MessageDistributor/PrimitiveReader.ts b/frontend/app/player/MessageDistributor/PrimitiveReader.ts index 6ee5ade4e..7dc8eb023 100644 --- a/frontend/app/player/MessageDistributor/PrimitiveReader.ts +++ b/frontend/app/player/MessageDistributor/PrimitiveReader.ts @@ -1,6 +1,10 @@ export default class PrimitiveReader { protected p = 0 constructor(protected readonly buf: Uint8Array) {} + + hasNext() { + return this.buf.length < this.buf.p + } readUint() { var r = 0, s = 1, b; diff --git a/frontend/app/player/ios/Parser.ts b/frontend/app/player/ios/Parser.ts new file mode 100644 index 000000000..7791398f6 --- /dev/null +++ b/frontend/app/player/ios/Parser.ts @@ -0,0 +1,29 @@ +import readMessage from '../MessageDistributor/messages'; +import PrimitiveReader from '../MessageDistributor/PrimitiveReader'; + + +export default class Parser { + private reader: PrimitiveReader + private error = null + constructor(byteArray) { + this.reader = new PrimitiveReader(byteArray) + } + + parseEach(cb) { + while (this.hasNext()) { + const msg = this.parseNext(); + if (msg !== null) { + cb(msg); + } + } + } + + hasNext() { + return !this.error && this.reader.hasNext(); + } + + parseNext() { + return readMessage(this.reader); + } + +} \ No newline at end of file diff --git a/frontend/app/player/ios/parser.js b/frontend/app/player/ios/parser.js deleted file mode 100644 index 83ddaec50..000000000 --- a/frontend/app/player/ios/parser.js +++ /dev/null @@ -1,31 +0,0 @@ -import readMessage from '../MessageDistributor/messages'; - - -export default class Parser { - _p = 0 - _data - _error = null - constructor(byteArray) { - this._data = byteArray; - } - - parseEach(cb) { - while (this.hasNext()) { - const msg = this.parseNext(); - if (msg !== null) { - cb(msg); - } - } - } - - hasNext() { - return !this._error && this._data.length > this._p; - } - - parseNext() { - let msg; - [ msg, this._p ] = readMessage(this._data, this._p); - return msg - } - -} \ No newline at end of file From d6248c1e92ec0bad9805925a00cb90747211aa6b Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 11:18:24 +0100 Subject: [PATCH 116/218] fix(frontend): IOS Parser(2) --- frontend/app/player/ios/Parser.ts | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/frontend/app/player/ios/Parser.ts b/frontend/app/player/ios/Parser.ts index 7791398f6..f202e9306 100644 --- a/frontend/app/player/ios/Parser.ts +++ b/frontend/app/player/ios/Parser.ts @@ -4,14 +4,14 @@ import PrimitiveReader from '../MessageDistributor/PrimitiveReader'; export default class Parser { private reader: PrimitiveReader - private error = null + private error: boolean = false constructor(byteArray) { this.reader = new PrimitiveReader(byteArray) } parseEach(cb) { while (this.hasNext()) { - const msg = this.parseNext(); + const msg = this.next(); if (msg !== null) { cb(msg); } @@ -22,8 +22,14 @@ export default class Parser { return !this.error && this.reader.hasNext(); } - parseNext() { - return readMessage(this.reader); + next() { + try { + return readMessage(this.reader) + } catch(e) { + console.warn(e) + this.error = true + return null + } } } \ No newline at end of file From e602b646c2c8e589af57b3cf0eef1d796d3a09d2 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 15 Nov 2021 13:02:31 +0100 Subject: [PATCH 117/218] fix(frontend): IOS Parser(3) --- frontend/app/player/MessageDistributor/PrimitiveReader.ts | 2 +- frontend/app/player/ios/ImagePlayer.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/player/MessageDistributor/PrimitiveReader.ts b/frontend/app/player/MessageDistributor/PrimitiveReader.ts index 7dc8eb023..7842212f9 100644 --- a/frontend/app/player/MessageDistributor/PrimitiveReader.ts +++ b/frontend/app/player/MessageDistributor/PrimitiveReader.ts @@ -3,7 +3,7 @@ export default class PrimitiveReader { constructor(protected readonly buf: Uint8Array) {} hasNext() { - return this.buf.length < this.buf.p + return this.buf.length < this.p } readUint() { diff --git a/frontend/app/player/ios/ImagePlayer.js b/frontend/app/player/ios/ImagePlayer.js index 824ee397a..4fd22776a 100644 --- a/frontend/app/player/ios/ImagePlayer.js +++ b/frontend/app/player/ios/ImagePlayer.js @@ -21,7 +21,7 @@ import { createListState, createScreenListState, } from './lists'; -import Parser from './parser'; +import Parser from './Parser'; import PerformanceList from './PerformanceList'; const HIGHEST_SPEED = 3; From dfe3c1e9f4c45d3b2af6805be345b7214e2606cd Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 16 Nov 2021 20:46:52 +0100 Subject: [PATCH 118/218] feat(tracker-assist): accept remote click event --- tracker/tracker-assist/package.json | 2 +- tracker/tracker-assist/src/index.ts | 26 ++++++++++++++++++-------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index b9d4b77ef..145a981c9 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.4", + "version": "3.4.5", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index e09bc0ab8..0bbe115df 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -22,6 +22,12 @@ enum CallingState { False, }; +// type IncomeMessages = +// "call_end" | +// { type: "agent_name", name: string } | +// { type: "click", x: number, y: number } | +// { x: number, y: number } + export default function(opts: Partial = {}) { const options: Options = Object.assign( { @@ -181,20 +187,24 @@ export default function(opts: Partial = {}) { document.addEventListener("click", onInteraction) }); dataConn.on('data', (data: any) => { + if (!data) { return } if (data === "call_end") { - log('Recieved call_end during call') + //console.log('receiving callend on call') onCallEnd(); return; } - // if (data && typeof data.video === 'boolean') { - // log('Recieved video toggle signal: ', data.video) - // callUI.toggleRemoteVideo(data.video) - // } - if (data && typeof data.name === 'string') { - log('Recieved name: ', data.name) + if (data.name === 'string') { + //console.log("name",data) callUI.setAssistentName(data.name); } - if (data && typeof data.x === 'number' && typeof data.y === 'number') { + if (data.type === "click" && typeof data.x === 'number' && typeof data.y === 'number') { + const el = document.elementFromPoint(data.x, data.y) + if (el instanceof HTMLElement) { + el.click() + } + return + } + if (typeof data.x === 'number' && typeof data.y === 'number') { mouse.move(data); } }); From 64843b87f94cf5ea7d8b674fe52a65b13523c66a Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 16 Nov 2021 20:51:46 +0100 Subject: [PATCH 119/218] feat(frontend): remote control btn on assist call --- .../AssistActions/AassistActions.css | 9 +---- .../AssistActions/AssistActions.tsx | 36 ++++++++++++++----- .../managers/AssistManager.ts | 27 ++++++++++++-- frontend/app/svg/icons/remote-control.svg | 1 + 4 files changed, 55 insertions(+), 18 deletions(-) create mode 100644 frontend/app/svg/icons/remote-control.svg diff --git a/frontend/app/components/Assist/components/AssistActions/AassistActions.css b/frontend/app/components/Assist/components/AssistActions/AassistActions.css index 85f5867c6..8a5758d90 100644 --- a/frontend/app/components/Assist/components/AssistActions/AassistActions.css +++ b/frontend/app/components/Assist/components/AssistActions/AassistActions.css @@ -1,11 +1,4 @@ -.inCall { - & svg { - fill: $red - } - color: $red; -} - .disabled { opacity: 0.5; pointer-events: none; -} \ No newline at end of file +} diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 493ed3cfe..7d4d5526a 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -30,16 +30,17 @@ interface Props { userId: String, toggleChatWindow: (state) => void, calling: CallingState, - peerConnectionStatus: ConnectionStatus + peerConnectionStatus: ConnectionStatus, + remoteControlEnabled: boolean, } -function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus }: Props) { +function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled }: Props) { const [ incomeStream, setIncomeStream ] = useState(null); const [ localStream, setLocalStream ] = useState(null); - const [ endCall, setEndCall ] = useState<()=>void>(()=>{}); + const [ callObject, setCallObject ] = useState<{ end: ()=>void, toggleRemoteControl: ()=>void } | null >(null); useEffect(() => { - return endCall + return callObject?.end() }, []) useEffect(() => { @@ -52,7 +53,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus function call() { RequestLocalStream().then(lStream => { setLocalStream(lStream); - setEndCall(() => callPeer( + setCallObject(callPeer( lStream, setIncomeStream, lStream.stop.bind(lStream), @@ -76,7 +77,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus {[stl.disabled]: peerConnectionStatus !== ConnectionStatus.Connected} ) } - onClick={ inCall ? endCall : call} + onClick={ inCall ? callObject?.end : call} role="button" > - { inCall ? 'End Call' : 'Call' } + { inCall ? 'End Call' : 'Call' } } content={ `Call ${userId}` } @@ -92,8 +93,26 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus inverted position="top right" /> + { calling === CallingState.True && +
+ + { 'Remote Control' } +
+ }
- { inCall && } + { inCall && callObject && }
) @@ -103,5 +122,6 @@ const con = connect(null, { toggleChatWindow }) export default con(connectPlayer(state => ({ calling: state.calling, + remoteControlEnabled: state.remoteControl, peerConnectionStatus: state.peerConnectionStatus, }))(AssistActions)) diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 9e508de25..992cdc987 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -47,11 +47,13 @@ export function getStatusText(status: ConnectionStatus): string { export interface State { calling: CallingState, peerConnectionStatus: ConnectionStatus, + remoteControl: boolean, } export const INITIAL_STATE: State = { calling: CallingState.False, peerConnectionStatus: ConnectionStatus.Connecting, + remoteControl: false, } const MAX_RECONNECTION_COUNT = 4; @@ -348,6 +350,24 @@ export default class AssistManager { conn.send({ x: Math.round(data.x), y: Math.round(data.y) }); } + private onMouseClick = (e: MouseEvent): void => { + const conn = this.dataConnection; + if (!conn) { return; } + const data = this.md.getInternalCoordinates(e); + // const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager + conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) }); + } + + private toggleRemoteControl = () => { + if (getState().remoteControl) { + this.md.overlay.removeEventListener("click", this.onMouseClick); + update({ remoteControl: false }) + } else { + this.md.overlay.addEventListener("click", this.onMouseClick); + update({ remoteControl: true }) + } + } + private localCallData: { localStream: LocalStream, @@ -357,7 +377,7 @@ export default class AssistManager { onError?: ()=> void } | null = null - call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): null | Function { + call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): { end: Function, toggleRemoteControl: Function } { this.localCallData = { localStream, onStream, @@ -371,7 +391,10 @@ export default class AssistManager { onError, } this._call() - return this.initiateCallEnd; + return { + end: this.initiateCallEnd, + toggleRemoteControl: this.toggleRemoteControl, + } } private _call() { diff --git a/frontend/app/svg/icons/remote-control.svg b/frontend/app/svg/icons/remote-control.svg new file mode 100644 index 000000000..64087850c --- /dev/null +++ b/frontend/app/svg/icons/remote-control.svg @@ -0,0 +1 @@ + \ No newline at end of file From 8f6aee5db8dffde2b8d239b64cf040988178a1e0 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Tue, 16 Nov 2021 20:52:34 +0100 Subject: [PATCH 120/218] fix(frontend-player): PrimitiveReader fix --- frontend/app/player/MessageDistributor/PrimitiveReader.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/player/MessageDistributor/PrimitiveReader.ts b/frontend/app/player/MessageDistributor/PrimitiveReader.ts index 7842212f9..b49955074 100644 --- a/frontend/app/player/MessageDistributor/PrimitiveReader.ts +++ b/frontend/app/player/MessageDistributor/PrimitiveReader.ts @@ -3,7 +3,7 @@ export default class PrimitiveReader { constructor(protected readonly buf: Uint8Array) {} hasNext() { - return this.buf.length < this.p + return this.p < this.buf.length } readUint() { From ca1c161ff500645e6f6415f834332a6e19840b0c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 17 Nov 2021 17:32:47 +0100 Subject: [PATCH 121/218] feat(nginx): added mobile bucket --- .../helm/nginx-ingress/nginx-ingress/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 2e6f259a1..422171a73 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -11,7 +11,7 @@ data: location /healthz { return 200 'OK'; } - location ~ ^/(mobs|sessions-assets|frontend|static|sourcemaps)/ { + location ~ ^/(mobs|sessions-assets|frontend|static|sourcemaps|ios-images)/ { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; From 08946b3c62725c2216d10b6263cc8937cc7bac15 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 18 Nov 2021 13:54:37 +0100 Subject: [PATCH 122/218] feat(tracker-assist):3.4.6:webpack5 hack --- tracker/tracker-assist/package.json | 2 +- tracker/tracker-assist/src/index.ts | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 145a981c9..040cec4b8 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.5", + "version": "3.4.6", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index 0bbe115df..7d08755b3 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -22,6 +22,9 @@ enum CallingState { False, }; +//@ts-ignore webpack5 hack (?!) +Peer = Peer.default || Peer; + // type IncomeMessages = // "call_end" | // { type: "agent_name", name: string } | From f2994d9fafc8d2befc9302d08bdf64a762fc8a06 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 18 Nov 2021 18:37:54 +0530 Subject: [PATCH 123/218] change(assist) - added an option config --- tracker/tracker-assist/package-lock.json | 15 ++++----------- tracker/tracker-assist/src/index.ts | 12 +++++++++--- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/tracker/tracker-assist/package-lock.json b/tracker/tracker-assist/package-lock.json index 8781de1bf..e1e86f4bf 100644 --- a/tracker/tracker-assist/package-lock.json +++ b/tracker/tracker-assist/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-assist", - "version": "3.4.3", + "version": "3.4.6", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -30,12 +30,6 @@ "js-tokens": "^4.0.0" } }, - "@medv/finder": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@medv/finder/-/finder-2.1.0.tgz", - "integrity": "sha512-Egrg5XO4kLol24b1Kv50HDfi5hW0yQ6aWSsO0Hea1eJ4rogKElIN0M86FdVnGF4XIGYyA7QWx0MgbOzVPA0qkA==", - "dev": true - }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -63,12 +57,11 @@ } }, "@openreplay/tracker": { - "version": "3.4.4", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.4.tgz", - "integrity": "sha512-IcuxwwTt1RtLZw9QlQVAVNqoybv0ZkD2ZDk2FeHEQ/+BItsMhG61/4/lB2yXKLTLr6ydeKTzwYvxfr1vwxn2dw==", + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.7.tgz", + "integrity": "sha512-E9ZwjPwo9WbThV9nAQbK8EKLwJcLBgQG51ND3LB+p21xaz0WcMETIaJDFFmHhhwvkCQ1Vi43gK3cjoOoHF4XFg==", "dev": true, "requires": { - "@medv/finder": "^2.0.0", "error-stack-parser": "^2.0.6" } }, diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index 7d08755b3..d36faeb95 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -14,6 +14,7 @@ export interface Options { confirmText: string, confirmStyle: Object, // Styles object session_calling_peer_key: string, + config: Object } enum CallingState { @@ -37,6 +38,7 @@ export default function(opts: Partial = {}) { confirmText: "You have a call. Do you want to answer?", confirmStyle: {}, session_calling_peer_key: "__openreplay_calling_peer", + config: null }, opts, ); @@ -70,12 +72,16 @@ export default function(opts: Partial = {}) { app.attachStartCallback(function() { if (assistDemandedRestart) { return; } const peerID = `${app.getProjectKey()}-${app.getSessionID()}` - peer = new Peer(peerID, { - // @ts-ignore + const _opt = { + // @ts-ignore host: app.getHost(), path: '/assist', port: location.protocol === 'http:' && appOptions.__DISABLE_SECURE_MODE ? 80 : 443, - }); + } + if (options.config) { + _opt['config'] = options.config + } + peer = new Peer(peerID, _opt); log('Peer created: ', peer) peer.on('error', e => warn("Peer error: ", e.type, e)) peer.on('connection', function(conn) { From b43a67bb66a9dbc3e4cb670e35f6f465f58f37bd Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 18 Nov 2021 18:52:10 +0530 Subject: [PATCH 124/218] change(assist) - version change to 3.4.7 --- tracker/tracker-assist/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 040cec4b8..6f7bb536b 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.6", + "version": "3.4.7", "keywords": [ "WebRTC", "assistance", From dcd3fd2cd51594a26212f8af23376a40d6b43e6c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 19 Nov 2021 12:34:34 +0100 Subject: [PATCH 125/218] feat(api): changed signed URL TTL --- api/chalicelib/core/mobile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/mobile.py b/api/chalicelib/core/mobile.py index c13403584..12a2d268d 100644 --- a/api/chalicelib/core/mobile.py +++ b/api/chalicelib/core/mobile.py @@ -9,5 +9,5 @@ def sign_keys(project_id, session_id, keys): for k in keys: result.append(s3.get_presigned_url_for_sharing(bucket=environ["iosBucket"], key=f"{project_key}/{session_id}/{k}", - expires_in=10 * 60)) + expires_in=60 * 60)) return result From f8b79af2eb9c8f27ad729a29e41f5ecd5fe781c6 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 22 Nov 2021 18:29:51 +0100 Subject: [PATCH 126/218] dev(backend-http): ios-image logs --- backend/services/http/handlers_ios.go | 3 +++ backend/services/http/main.go | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers_ios.go index f047c02cf..6c3f945bd 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers_ios.go @@ -150,6 +150,8 @@ func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { + log.Printf("recieved imagerequest") + sessionData, err := tokenizer.ParseFromHTTPRequest(r) if err != nil { // Should accept expired token? responseWithError(w, http.StatusUnauthorized, err) @@ -184,6 +186,7 @@ func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { continue // TODO: send server error or accumulate successful files } key := prefix + fileHeader.Filename + log.Printf("Uploading image... %v", key) go func() { //TODO: mime type from header if err := s3.Upload(file, key, "image/jpeg", false); err != nil { log.Printf("Upload ios screen error. %v", err) diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 9d82139d5..eaede2d4b 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -24,7 +24,7 @@ import ( "openreplay/backend/services/http/uaparser" ) - + var rewriter *assets.Rewriter var producer types.Producer var pgconn *cache.PGCache From f475a795074943f77aec8d7183e30d01b0daf50d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 22 Nov 2021 18:35:47 +0100 Subject: [PATCH 127/218] feat(api): iceServers configuration --- api/chalicelib/blueprints/bp_core_dynamic.py | 5 ++++- api/chalicelib/core/assist.py | 9 +++++++-- api/chalicelib/core/users.py | 4 +++- ee/api/chalicelib/blueprints/bp_core_dynamic.py | 7 ++++++- ee/api/chalicelib/core/users.py | 2 ++ 5 files changed, 22 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py index b695ab54a..5807d7bed 100644 --- a/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/api/chalicelib/blueprints/bp_core_dynamic.py @@ -16,6 +16,7 @@ from chalicelib.core import notifications from chalicelib.core import boarding from chalicelib.core import webhook from chalicelib.core import license +from chalicelib.core import assist from chalicelib.core.collaboration_slack import Slack app = Blueprint(__name__) @@ -47,6 +48,7 @@ def login(): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True, version=True) c["smtp"] = helper.has_smtp() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -68,7 +70,8 @@ def get_account(context): "metadata": metadata.get_remaining_metadata_with_count(context['tenantId']) }, **license.get_status(context["tenantId"]), - "smtp": helper.has_smtp() + "smtp": helper.has_smtp(), + "iceServers": assist.get_ice_servers() } } diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index dbfe8269b..7f9e8465f 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,6 +1,6 @@ -from chalicelib.utils import pg_client, helper -from chalicelib.core import projects, sessions, sessions_metas import requests +from chalicelib.core import projects, sessions, sessions_metas +from chalicelib.utils import pg_client, helper from chalicelib.utils.helper import environ SESSION_PROJECTION_COLS = """s.project_id, @@ -72,3 +72,8 @@ def is_live(project_id, session_id, project_key=None): return False connected_peers = connected_peers.json().get("data", []) return str(session_id) in connected_peers + + +def get_ice_servers(): + return environ.get("iceServers") if environ.get("iceServers") is not None \ + and len(environ["iceServers"]) > 0 else None diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index c001ea5e2..4a980eefc 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -8,7 +8,7 @@ from chalicelib.utils import dev from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.helper import environ -from chalicelib.core import tenants +from chalicelib.core import tenants, assist import secrets @@ -440,6 +440,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -467,6 +468,7 @@ def set_password_invitation(user_id, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index de2ae3bfb..8ce7d9bc2 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -16,6 +16,7 @@ from chalicelib.core import notifications from chalicelib.core import boarding from chalicelib.core import webhook from chalicelib.core import license +from chalicelib.core import assist from chalicelib.core.collaboration_slack import Slack app = Blueprint(__name__) @@ -48,6 +49,9 @@ def login(): c.pop("createdAt") c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True, version=True) + c["smtp"] = helper.has_smtp() + c["iceServers"] = assist.get_ice_servers() + return { 'jwt': r.pop('jwt'), 'data': { @@ -70,7 +74,8 @@ def get_account(context): }, **license.get_status(context["tenantId"]), "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0, - "saml2": SAML2_helper.is_saml2_available() + "saml2": SAML2_helper.is_saml2_available(), + "iceServers": assist.get_ice_servers() } } diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index ec96b68ac..050738659 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -450,6 +450,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -477,6 +478,7 @@ def set_password_invitation(tenant_id, user_id, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() + c["iceServers"]= assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { From c0322b85f6cb2bf36a457982942044c61bfa3c23 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 22 Nov 2021 21:41:10 +0100 Subject: [PATCH 128/218] fix(backend-http): thread-safe project cacher --- backend/pkg/db/cache/pg_cache.go | 7 ++++--- backend/pkg/db/cache/project.go | 17 +++++++++++------ 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/backend/pkg/db/cache/pg_cache.go b/backend/pkg/db/cache/pg_cache.go index 0ca429eb5..9a62354f1 100644 --- a/backend/pkg/db/cache/pg_cache.go +++ b/backend/pkg/db/cache/pg_cache.go @@ -2,6 +2,7 @@ package cache import ( "time" + "sync" "openreplay/backend/pkg/db/postgres" . "openreplay/backend/pkg/db/types" @@ -20,8 +21,8 @@ type ProjectMeta struct { type PGCache struct { *postgres.Conn sessions map[uint64]*Session - projects map[uint32]*ProjectMeta - projectsByKeys map[string]*ProjectMeta + projects map[uint32]*ProjectMeta + projectsByKeys sync.Map // map[string]*ProjectMeta projectExpirationTimeout time.Duration } @@ -31,7 +32,7 @@ func NewPGCache(pgConn *postgres.Conn, projectExpirationTimeoutMs int64) *PGCach Conn: pgConn, sessions: make(map[uint64]*Session), projects: make(map[uint32]*ProjectMeta), - projectsByKeys: make(map[string]*ProjectMeta), + //projectsByKeys: make(map[string]*ProjectMeta), projectExpirationTimeout: time.Duration(1000 * projectExpirationTimeoutMs), } } diff --git a/backend/pkg/db/cache/project.go b/backend/pkg/db/cache/project.go index 1411e608b..6a7739bdd 100644 --- a/backend/pkg/db/cache/project.go +++ b/backend/pkg/db/cache/project.go @@ -6,16 +6,21 @@ import ( ) func (c *PGCache) GetProjectByKey(projectKey string) (*Project, error) { - if c.projectsByKeys[ projectKey ] != nil && - time.Now().Before(c.projectsByKeys[ projectKey ].expirationTime) { - return c.projectsByKeys[ projectKey ].Project, nil + pmInterface, found := c.projectsByKeys.Load(projectKey) + if found { + if pm, ok := pmInterface.(*ProjectMeta); ok { + if time.Now().Before(pm.expirationTime) { + return pm.Project, nil + } + } } + p, err := c.Conn.GetProjectByKey(projectKey) if err != nil { return nil, err } - c.projectsByKeys[ projectKey ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projects[ p.ProjectID ] = c.projectsByKeys[ projectKey ] + c.projects[ p.ProjectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } + c.projectsByKeys.Store(projectKey, c.projects[ p.ProjectID ]) return p, nil } @@ -31,7 +36,7 @@ func (c *PGCache) GetProject(projectID uint32) (*Project, error) { return nil, err } c.projects[ projectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projectsByKeys[ p.ProjectKey ] = c.projects[ projectID ] + c.projectsByKeys.Store(p.ProjectKey, c.projects[ projectID ]) return p, nil } From 9f12433707af906abecf23a5ed6b4bc5123c52bc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 24 Nov 2021 17:44:57 +0100 Subject: [PATCH 129/218] feat(api): EE roles --- ee/api/.gitignore | 2 + ee/api/chalicelib/blueprints/bp_ee.py | 42 + ee/api/chalicelib/core/roles.py | 99 +++ ee/api/chalicelib/core/sessions.py | 723 ------------------ .../db/init_dbs/postgresql/1.4.0/1.4.0.sql | 55 +- .../db/init_dbs/postgresql/init_schema.sql | 14 + 6 files changed, 211 insertions(+), 724 deletions(-) create mode 100644 ee/api/chalicelib/core/roles.py delete mode 100644 ee/api/chalicelib/core/sessions.py diff --git a/ee/api/.gitignore b/ee/api/.gitignore index d5e957053..06eb982a9 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -204,6 +204,8 @@ Pipfile /chalicelib/core/log_tool_sentry.py /chalicelib/core/log_tool_stackdriver.py /chalicelib/core/log_tool_sumologic.py +/chalicelib/core/mobile.py +/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_favorite_viewed.py /chalicelib/core/sessions_metas.py diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index a0fa0aa8c..76272ec2c 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -1,9 +1,51 @@ from chalice import Blueprint from chalicelib import _overrides +from chalicelib.core import roles from chalicelib.core import unlock app = Blueprint(__name__) _overrides.chalice_app(app) unlock.check() + + +@app.route('/client/roles', methods=['GET']) +def get_roles(context): + return { + 'data': roles.get_roles(tenant_id=context["tenantId"]) + } + + +@app.route('/client/roles', methods=['POST', 'PUT']) +def add_role(context): + data = app.current_request.json_body + data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], **data) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.route('/client/roles/{roleId}', methods=['POST', 'PUT']) +def edit_role(roleId, context): + data = app.current_request.json_body + data = roles.update(tenant_id=context['tenantId'], user_id=context['userId'], role_id=roleId, changes=data) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.route('/client/roles/{roleId}', methods=['DELETE']) +def delete_role(roleId, context): + data = roles.delete(tenant_id=context['tenantId'], user_id=context["userId"], role_id=roleId) + if "errors" in data: + return data + return { + 'data': data + } diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py new file mode 100644 index 000000000..d5c4fb7da --- /dev/null +++ b/ee/api/chalicelib/core/roles.py @@ -0,0 +1,99 @@ +from chalicelib.core import users +from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC + + +def update(tenant_id, user_id, role_id, changes): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + + if len(changes.keys()) == 0: + return None + ALLOW_EDIT = ["name", "description", "permissions"] + sub_query = [] + for key in changes.keys(): + if key in ALLOW_EDIT: + sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.roles + SET {" ,".join(sub_query)} + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND deleted_at ISNULL + AND protected = FALSE;""", + {"tenant_id": tenant_id, "role_id": role_id, **changes}) + ) + return get_roles(tenant_id=tenant_id) + + +def create(tenant_id, user_id, name, description, permissions): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions) + VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[]);""", + {"tenant_id": tenant_id, "name": name, "description": description, "permissions": permissions}) + ) + return get_roles(tenant_id=tenant_id) + + +def get_roles(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT * + FROM public.roles + where tenant_id =%(tenant_id)s + AND deleted_at IS NULL + ORDER BY role_id;""", + {"tenant_id": tenant_id}) + ) + rows = cur.fetchall() + for r in rows: + r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) + return helper.list_to_camel_case(rows) + + +def delete(tenant_id, user_id, role_id): + admin = users.get(user_id=user_id, tenant_id=tenant_id) + + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT 1 + FROM public.roles + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND protected = TRUE + LIMIT 1;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + if cur.fetchone() is not None: + return {"errors": ["this role is protected"]} + cur.execute( + cur.mogrify("""SELECT 1 + FROM public.users + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + LIMIT 1;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + if cur.fetchone() is not None: + return {"errors": ["this role is already attached to other user(s)"]} + cur.execute( + cur.mogrify("""UPDATE public.roles + SET deleted_at = timezone('utc'::text, now()) + WHERE role_id = %(role_id)s + AND tenant_id = %(tenant_id)s + AND protected = FALSE;""", + {"tenant_id": tenant_id, "role_id": role_id}) + ) + return get_roles(tenant_id=tenant_id) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py deleted file mode 100644 index 53d1d9383..000000000 --- a/ee/api/chalicelib/core/sessions.py +++ /dev/null @@ -1,723 +0,0 @@ -from chalicelib.utils import pg_client, helper -from chalicelib.utils import dev -from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs, issues - -from chalicelib.core import projects, errors - -from chalicelib.core import resources - -SESSION_PROJECTION_COLS = """s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_agent, - s.user_os, - s.user_browser, - s.user_device, - s.user_device_type, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - s.user_anonymous_id, - s.platform, - s.issue_score, - to_jsonb(s.issue_types) AS issue_types, - favorite_sessions.session_id NOTNULL AS favorite, - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - """ - - -def __group_metadata(session, project_metadata): - meta = [] - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta.append({project_metadata[m]: session[m]}) - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) - data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id, - device=data["userDevice"], - os_version=data["userOsVersion"], - mob_url=data["mobsUrl"]) - else: - data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if - e['source'] == "js_exception"][ - :500] # limit the number of errors to reduce the response-body size - data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['resources'] = resources.get_by_session_id(session_id=session_id) - - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id) - - return data - return None - - -def sessions_args(args, params): - if params is not None: - for key in ['userOs', 'userBrowser', 'userCountry', 'path', 'path_in_order', 'after', 'minDuration', - 'maxDuration', 'sortSessions', 'eventsCount', 'consoleLogCount', 'startDate', 'endDate', - 'consoleLog', 'location']: - args[key] = params.get(key) - - -new_line = "\n" - - -def __get_sql_operator(op): - op = op.lower() - return "=" if op == "is" or op == "on" else "!=" if op == "isnot" else "ILIKE" if op == "contains" else "NOT ILIKE" if op == "notcontains" else "=" - - -def __is_negation_operator(op): - return op in ("!=", "NOT ILIKE") - - -def __reverse_sql_operator(op): - return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE" - - -def __get_sql_operator_multiple(op): - op = op.lower() - return " IN " if op == "is" else " NOT IN " - - -def __get_sql_operator_boolean(op): - op = op.lower() - return True if op == "true" else False - - -def __get_sql_value_multiple(values): - if isinstance(values, tuple): - return values - return tuple([v for v in values]) - - -@dev.timed -def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False, error_status="ALL", - count_only=False, issue=None): - sessions = [] - generic_args = {"startDate": data['startDate'], "endDate": data['endDate'], - "projectId": project_id, - "userId": user_id} - with pg_client.PostgresClient() as cur: - extra_constraints = [ - cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}), - cur.mogrify("s.duration IS NOT NULL", {}) - ] - extra_from = "" - fav_only_join = "" - if favorite_only and not errors_only: - fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id" - extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id})) - events_query_part = "" - strict = True - - if len(data.get("events", [])) > 0: - events_query_from = [] - event_index = 0 - - for event in data["events"]: - # TODO: remove this when message_id is removed - seq_id = False - event_type = event["type"].upper() - if event.get("operator") is None: - event["operator"] = "is" - op = __get_sql_operator(event["operator"]) - is_not = False - if __is_negation_operator(op) and event_index > 0: - is_not = True - op = __reverse_sql_operator(op) - event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)" - event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", - "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", - "ms.start_ts <= %(endDate)s"] - event_args = {"value": helper.string_to_sql_like_with_op(event['value'], op)} - if event_type not in list(events.SUPPORTED_TYPES.keys()) \ - or event.get("value") in [None, "", "*"] \ - and (event_type != events.event_type.ERROR.ui_type \ - or event_type != events.event_type.ERROR_IOS.ui_type): - continue - if event_type == events.event_type.CLICK.ui_type: - event_from = event_from % f"{events.event_type.CLICK.table} AS main " - event_where.append(f"main.{events.event_type.CLICK.column} {op} %(value)s") - - elif event_type == events.event_type.INPUT.ui_type: - event_from = event_from % f"{events.event_type.INPUT.table} AS main " - event_where.append(f"main.{events.event_type.INPUT.column} {op} %(value)s") - if len(event.get("custom", "")) > 0: - event_where.append("main.value ILIKE %(custom)s") - event_args["custom"] = helper.string_to_sql_like_with_op(event['custom'], "ILIKE") - elif event_type == events.event_type.LOCATION.ui_type: - event_from = event_from % f"{events.event_type.LOCATION.table} AS main " - event_where.append(f"main.{events.event_type.LOCATION.column} {op} %(value)s") - elif event_type == events.event_type.CUSTOM.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.CUSTOM.table} AS main " - event_where.append(f"main.{events.event_type.CUSTOM.column} {op} %(value)s") - elif event_type == events.event_type.REQUEST.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.REQUEST.table} AS main " - event_where.append(f"main.{events.event_type.REQUEST.column} {op} %(value)s") - elif event_type == events.event_type.GRAPHQL.ui_type: - event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " - event_where.append(f"main.{events.event_type.GRAPHQL.column} {op} %(value)s") - elif event_type == events.event_type.STATEACTION.ui_type: - event_from = event_from % f"{events.event_type.STATEACTION.table} AS main " - event_where.append(f"main.{events.event_type.STATEACTION.column} {op} %(value)s") - elif event_type == events.event_type.ERROR.ui_type: - if event.get("source") in [None, "*", ""]: - event["source"] = "js_exception" - event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" - if event.get("value") not in [None, "*", ""]: - event_where.append(f"(main1.message {op} %(value)s OR main1.name {op} %(value)s)") - if event.get("source") not in [None, "*", ""]: - event_where.append(f"main1.source = %(source)s") - event_args["source"] = event["source"] - elif event.get("source") not in [None, "*", ""]: - event_where.append(f"main1.source = %(source)s") - event_args["source"] = event["source"] - - # ----- IOS - elif event_type == events.event_type.CLICK_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main " - event_where.append(f"main.{events.event_type.CLICK_IOS.column} {op} %(value)s") - - elif event_type == events.event_type.INPUT_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main " - event_where.append(f"main.{events.event_type.INPUT_IOS.column} {op} %(value)s") - - if len(event.get("custom", "")) > 0: - event_where.append("main.value ILIKE %(custom)s") - event_args["custom"] = helper.string_to_sql_like_with_op(event['custom'], "ILIKE") - elif event_type == events.event_type.VIEW_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main " - event_where.append(f"main.{events.event_type.VIEW_IOS.column} {op} %(value)s") - elif event_type == events.event_type.CUSTOM_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main " - event_where.append(f"main.{events.event_type.CUSTOM_IOS.column} {op} %(value)s") - elif event_type == events.event_type.REQUEST_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main " - event_where.append(f"main.{events.event_type.REQUEST_IOS.column} {op} %(value)s") - elif event_type == events.event_type.ERROR_IOS.ui_type: - seq_id = True - event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" - if event.get("value") not in [None, "*", ""]: - event_where.append(f"(main1.reason {op} %(value)s OR main1.name {op} %(value)s)") - - else: - continue - - event_index += 1 - if is_not: - event_from += f""" LEFT JOIN (SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) AS left_not USING (session_id)""" - event_where[-1] = "left_not.session_id ISNULL" - events_query_from.append(cur.mogrify(f"""\ - (SELECT - main.session_id, {'seq_index' if seq_id else 'message_id %%%% 2147483647 AS seq_index'}, timestamp, {event_index} AS funnel_step - FROM {event_from} - WHERE {" AND ".join(event_where)} - )\ - """, {**generic_args, **event_args}).decode('UTF-8')) - - if len(events_query_from) > 0: - events_query_part = f"""\ - SELECT - session_id, MIN(timestamp) AS first_event_ts, MAX(timestamp) AS last_event_ts - FROM - ({(" UNION ALL ").join(events_query_from)}) AS f_query - GROUP BY 1 - {"" if event_index < 2 else f"HAVING events.funnel(array_agg(funnel_step ORDER BY timestamp,seq_index ASC), {event_index})" if strict - else f"HAVING array_length(array_agg(DISTINCT funnel_step), 1) = {len(data['events'])}"} - {fav_only_join} - """ - else: - data["events"] = [] - - # --------------------------------------------------------------------------- - if "filters" in data: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - for f in data["filters"]: - if not isinstance(f.get("value"), list): - f["value"] = [f.get("value")] - if len(f["value"]) == 0 or f["value"][0] is None: - continue - filter_type = f["type"].upper() - f["value"] = __get_sql_value_multiple(f["value"]) - if filter_type == sessions_metas.meta_type.USERBROWSER: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]})) - - elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]: - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]})) - elif filter_type == "duration".upper(): - if len(f["value"]) > 0 and f["value"][0] is not None: - extra_constraints.append( - cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]})) - if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0: - extra_constraints.append( - cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]})) - elif filter_type == sessions_metas.meta_type.REFERRER: - # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" - extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - op = __get_sql_operator_multiple(f["operator"]) - extra_constraints.append( - cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]})) - elif filter_type == events.event_type.METADATA.ui_type: - op = __get_sql_operator(f["operator"]) - if f.get("key") in meta_keys.keys(): - extra_constraints.append( - cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID, - sessions_metas.meta_type.USERANONYMOUSID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.user_anonymous_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]: - op = __get_sql_operator(f["operator"]) - extra_constraints.append( - cur.mogrify(f"s.rev_id {op} %(value)s", - {"value": helper.string_to_sql_like_with_op(f["value"][0], op)}) - ) - - # --------------------------------------------------------------------------- - - if data.get("startDate") is not None: - extra_constraints.append(cur.mogrify("s.start_ts >= %(startDate)s", {"startDate": data['startDate']})) - else: - data['startDate'] = None - if data.get("endDate") is not None: - extra_constraints.append(cur.mogrify("s.start_ts <= %(endDate)s", {"endDate": data['endDate']})) - else: - data['endDate'] = None - - if data.get('platform') is not None: - if data['platform'] == 'mobile': - extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')") - elif data['platform'] == 'desktop': - extra_constraints.append( - b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - - order = "DESC" - if data.get("order") is not None: - order = data["order"] - sort = 'session_id' - if data.get("sort") is not None and data["sort"] != "session_id": - sort += " " + order + "," + helper.key_to_snake_case(data["sort"]) - else: - sort = 'session_id' - - if errors_only: - extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" - extra_constraints.append(b"ser.source = 'js_exception'") - if error_status != "ALL": - extra_constraints.append(cur.mogrify("ser.status = %(status)s", {"status": error_status.lower()})) - if favorite_only: - extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - extra_constraints.append(cur.mogrify("ufe.user_id = %(user_id)s", {"user_id": user_id})) - - extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints] - if not favorite_only and not errors_only: - extra_from += """LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - extra_join = "" - if issue is not None: - extra_join = cur.mogrify(""" - INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id) - WHERE issues.session_id=f.session_id - AND p_issues.type=%(type)s - AND p_issues.context_string=%(contextString)s - AND timestamp >= f.first_event_ts - AND timestamp <= f.last_event_ts) AS issues ON(TRUE) - """, {"contextString": issue["contextString"], "type": issue["type"]}).decode('UTF-8') - - query_part = f"""\ - FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"} - {extra_join} - {"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""} - {extra_from} - WHERE - - {" AND ".join(extra_constraints)}""" - - if errors_only: - main_query = cur.mogrify(f"""\ - SELECT DISTINCT er.error_id, ser.status, ser.parent_error_id, ser.payload, - COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE er.error_id = ve.error_id - AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - {query_part};""", - generic_args) - - elif count_only: - main_query = cur.mogrify(f"""\ - SELECT COUNT(DISTINCT s.session_id) AS count_sessions, COUNT(DISTINCT s.user_uuid) AS count_users - {query_part};""", - generic_args) - else: - main_query = cur.mogrify(f"""\ - SELECT * FROM - (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {order};""", - generic_args) - - # print("--------------------") - # print(main_query) - - cur.execute(main_query) - - if count_only: - return helper.dict_to_camel_case(cur.fetchone()) - sessions = [] - total = cur.rowcount - row = cur.fetchone() - limit = 200 - while row is not None and len(sessions) < limit: - if row.get("favorite"): - limit += 1 - sessions.append(row) - row = cur.fetchone() - - if errors_only: - return sessions - if data.get("sort") is not None and data["sort"] != "session_id": - sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data["sort"])], - reverse=data.get("order", "DESC").upper() == "DESC") - return { - 'total': total, - 'sessions': helper.list_to_camel_case(sessions) - } - - -def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): - if project_id is None: - all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - else: - all_projects = [ - projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, - include_gdpr=False)] - - all_projects = {int(p["projectId"]): p["name"] for p in all_projects} - project_ids = list(all_projects.keys()) - - available_keys = metadata.get_keys_by_projects(project_ids) - for i in available_keys: - available_keys[i]["user_id"] = sessions_metas.meta_type.USERID - available_keys[i]["user_anonymous_id"] = sessions_metas.meta_type.USERANONYMOUSID - results = {} - for i in project_ids: - if m_key not in available_keys[i].values(): - available_keys.pop(i) - results[i] = {"total": 0, "sessions": [], "missingMetadata": True} - project_ids = list(available_keys.keys()) - if len(project_ids) > 0: - with pg_client.PostgresClient() as cur: - sub_queries = [] - for i in project_ids: - col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] - sub_queries.append(cur.mogrify( - f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", - {"id": i, "value": m_value}).decode('UTF-8')) - query = f"""SELECT {", ".join(sub_queries)};""" - cur.execute(query=query) - - rows = cur.fetchone() - - sub_queries = [] - for i in rows.keys(): - results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]} - if rows[i] > 0: - col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)] - sub_queries.append( - cur.mogrify( - f"""( - SELECT * - FROM ( - SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s LEFT JOIN (SELECT session_id - FROM public.user_favorite_sessions - WHERE user_favorite_sessions.user_id = %(userId)s - ) AS favorite_sessions USING (session_id) - WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s - ) AS full_sessions - ORDER BY favorite DESC, issue_score DESC - LIMIT 10 - )""", - {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) - if len(sub_queries) > 0: - cur.execute("\nUNION\n".join(sub_queries)) - rows = cur.fetchall() - for i in rows: - results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) - return results - - -def search_by_issue(user_id, issue, project_id, start_date, end_date): - constraints = ["s.project_id = %(projectId)s", - "p_issues.context_string = %(issueContextString)s", - "p_issues.type = %(issueType)s"] - if start_date is not None: - constraints.append("start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("start_ts <= %(endDate)s") - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s - INNER JOIN events_common.issues USING (session_id) - INNER JOIN public.issues AS p_issues USING (issue_id) - LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id) - WHERE {" AND ".join(constraints)} - ORDER BY s.session_id DESC;""", - { - "issueContextString": issue["contextString"], - "issueType": issue["type"], "userId": user_id, - "projectId": project_id, - "startDate": start_date, - "endDate": end_date - })) - - rows = cur.fetchall() - return helper.list_to_camel_case(rows) - - -def get_favorite_sessions(project_id, user_id, include_viewed=False): - with pg_client.PostgresClient() as cur: - query_part = cur.mogrify(f"""\ - FROM public.sessions AS s - LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id - WHERE fs.user_id = %(userId)s""", - {"projectId": project_id, "userId": user_id} - ) - - extra_query = b"" - if include_viewed: - extra_query = cur.mogrify(""",\ - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""", - {"projectId": project_id, "userId": user_id}) - - cur.execute(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_agent, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - TRUE AS favorite - {extra_query.decode('UTF-8')} - {query_part.decode('UTF-8')} - ORDER BY s.session_id - LIMIT 50;""") - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - -def get_user_sessions(project_id, user_id, start_date, end_date): - with pg_client.PostgresClient() as cur: - constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] - if start_date is not None: - constraints.append("s.start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("s.start_ts <= %(endDate)s") - - query_part = f"""\ - FROM public.sessions AS s - WHERE {" AND ".join(constraints)}""" - - cur.execute(cur.mogrify(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_agent, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count - {query_part} - ORDER BY s.session_id - LIMIT 50;""", { - "projectId": project_id, - "userId": user_id, - "startDate": start_date, - "endDate": end_date - })) - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - -def get_session_user(project_id, user_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT - user_id, - count(*) as session_count, - max(start_ts) as last_seen, - min(start_ts) as first_seen - FROM - "public".sessions - WHERE - project_id = %(project_id)s - AND user_id = %(user_id)s - AND duration is not null - GROUP BY user_id; - """, - {"project_id": project_id, "user_id": user_id} - ) - cur.execute(query=query) - data = cur.fetchone() - return helper.dict_to_camel_case(data) - - -def get_session_ids_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT session_id FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} - ) - ids = cur.execute(query=query) - return ids - - -def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - session_id IN %(session_ids)s;""", - {"session_ids": tuple(session_ids)} - ) - cur.execute(query=query) - - return True - - -def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} - ) - cur.execute(query=query) - - return True diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql index 2139861e3..ed2ce7672 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql @@ -5,7 +5,60 @@ CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessio CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; CREATE INDEX users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; -CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues(issue_id,timestamp); +CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues (issue_id, timestamp); CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp); CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); + +CREATE TABLE roles +( + role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + description text DEFAULT NULL, + permissions text[] NOT NULL DEFAULT '{}', + protected bool NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL +); + +INSERT INTO roles(tenant_id, name, description, permissions, protected) +SELECT * +FROM (SELECT tenant_id FROM tenants) AS tenants, + (VALUES ('Owner', 'The company''s owner', '{}'::text[], TRUE), + ('Admin', 'Admin member', '{}'::text[], TRUE), + ('Member', 'A member', '{}'::text[], TRUE) + ) AS default_roles(name, description, permissions, protected); + + +ALTER TABLE users + ADD COLUMN role_id integer REFERENCES roles (role_id) ON DELETE SET NULL; + +UPDATE users +SET role_id = r.role_id +FROM (SELECT tenant_id, role_id + FROM tenants + INNER JOIN roles USING (tenant_id) + WHERE roles.name = 'Owner') AS r(tenant_id, role_id) +WHERE users.tenant_id = r.tenant_id + AND users.role = 'owner'; + +UPDATE users +SET role_id = r.role_id +FROM (SELECT tenant_id, role_id + FROM tenants + INNER JOIN roles USING (tenant_id) + WHERE roles.name = 'Admin') AS r(tenant_id, role_id) +WHERE users.tenant_id = r.tenant_id + AND users.role = 'admin'; + +UPDATE users +SET role_id = r.role_id +FROM (SELECT tenant_id, role_id + FROM tenants + INNER JOIN roles USING (tenant_id) + WHERE roles.name = 'Member') AS r(tenant_id, role_id) +WHERE users.tenant_id = r.tenant_id + AND users.role = 'member'; + + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 73cdffeae..12fb7a5b5 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -887,6 +887,20 @@ $$ CREATE INDEX ON jobs (status); CREATE INDEX ON jobs (start_at); + + CREATE TABLE roles + ( + role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + name text NOT NULL, + description text DEFAULT NULL, + permissions text[] NOT NULL DEFAULT '{}', + protected bool NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL + ); + + raise notice 'DB created'; END IF; END; From 8a24e0a7267a2bbe48b887a11ba123e146c386b0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 24 Nov 2021 18:45:01 +0100 Subject: [PATCH 130/218] feat(api): EE include role_id --- ee/api/chalicelib/core/signup.py | 13 +++++++-- ee/api/chalicelib/core/users.py | 49 +++++++++++++++++++------------- 2 files changed, 39 insertions(+), 23 deletions(-) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 652867c25..1e8fbb154 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -68,10 +68,17 @@ def create_step1(data): VALUES (%(companyName)s, %(versionNumber)s, 'ee') RETURNING tenant_id, api_key ), + r AS ( + INSERT INTO public.roles(tenant_id, name, description, permissions) + VALUES ((SELECT tenant_id FROM t), 'Owner', 'The company''s owner', '{}'::text[]), + ((SELECT tenant_id FROM t), 'Admin', 'Admin member', '{}'::text[]), + ((SELECT tenant_id FROM t), 'Member', 'A member', '{}'::text[]) + RETURNING * + ), u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data) - VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s) - RETURNING user_id,email,role,name + INSERT INTO public.users (tenant_id, email, role, name, data, role_id) + VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s, (SELECT role_id FROM r WHERE name ='Owner')) + RETURNING user_id,email,role,name,role_id ), au AS ( INSERT INTO public.basic_authentication (user_id, password, generated_password) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 050738659..d8cc2a136 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -1,26 +1,27 @@ import json +import secrets +from chalicelib.core import assist from chalicelib.core import authorizers, metadata, projects from chalicelib.core import tenants +from chalicelib.utils import dev from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils import dev from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.helper import environ -import secrets def __generate_invitation_token(): return secrets.token_urlsafe(64) -def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False): +def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False, role_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ WITH u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data) - VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s) - RETURNING user_id,email,role,name,appearance + INSERT INTO public.users (tenant_id, email, role, name, data, role_id) + VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(role_id)s) + RETURNING user_id,email,role,name,appearance, role_id ), au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) @@ -35,19 +36,20 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, - au.invitation_token + au.invitation_token, + u.role_id FROM u,au;""", {"tenantId": tenant_id, "email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), - "invitation_token": invitation_token}) + "invitation_token": invitation_token, "role_id": role_id}) cur.execute( query ) return helper.dict_to_camel_case(cur.fetchone()) -def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False): +def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False, role_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ UPDATE public.users @@ -56,7 +58,8 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own deleted_at= NULL, created_at = timezone('utc'::text, now()), tenant_id= %(tenant_id)s, - api_key= generate_api_key(20) + api_key= generate_api_key(20), + role_id= %(role_id)s WHERE user_id=%(user_id)s RETURNING user_id AS id, email, @@ -65,9 +68,11 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own TRUE AS change_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""", + (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, + role_id;""", {"tenant_id": tenant_id, "user_id": user_id, "email": email, - "role": "owner" if owner else "admin" if admin else "member", "name": name}) + "role": "owner" if owner else "admin" if admin else "member", "name": name, + "role_id": role_id}) cur.execute( query ) @@ -157,7 +162,8 @@ def update(tenant_id, user_id, changes): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + users.appearance, + users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) if len(sub_query_bauth) > 0: @@ -198,14 +204,15 @@ def create_member(tenant_id, user_id, data): return {"errors": ["invalid user name"]} if name is None: name = data["email"] + role_id = data.get("roleId") invitation_token = __generate_invitation_token() user = get_deleted_user_by_email(email=data["email"]) if user is not None: new_member = restore_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, user_id=user["userId"]) + admin=data.get("admin", False), name=name, user_id=user["userId"], role_id=role_id) else: new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name) + admin=data.get("admin", False), name=name, role_id=role_id) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) helper.async_post(environ['email_basic'] % 'member_invitation', { @@ -280,7 +287,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance"] + ALLOW_EDIT = ["name", "email", "admin", "appearance","roleId"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -450,7 +457,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]= assist.get_ice_servers() + c["iceServers"] = assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -478,7 +485,7 @@ def set_password_invitation(tenant_id, user_id, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]= assist.get_ice_servers() + c["iceServers"] = assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -597,7 +604,8 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, users.appearance, - users.origin + users.origin, + users.role_id FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) @@ -637,7 +645,8 @@ def authenticate_sso(email, internal_id, exp=None): (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, users.appearance, - origin + origin, + role_id FROM public.users AS users WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""", {"email": email, "internal_id": internal_id}) From 5fc9ce5bdd65e362f5231ead6ac02d295c899055 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 24 Nov 2021 19:12:31 +0100 Subject: [PATCH 131/218] feat(db): added role_id to users --- ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 12fb7a5b5..947f1282e 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -204,7 +204,8 @@ $$ jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT '{}'::jsonb, weekly_report boolean NOT NULL DEFAULT TRUE, - origin user_origin NULL DEFAULT NULL + origin user_origin NULL DEFAULT NULL, + role_id integer REFERENCES roles (role_id) ON DELETE SET NULL ); From 8ee8c2e9526bf77bd77658404975b8d08b34c0e0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 24 Nov 2021 19:16:04 +0100 Subject: [PATCH 132/218] feat(api): EE include role_id in the list of members --- ee/api/chalicelib/core/users.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index d8cc2a136..63946282e 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -383,7 +383,8 @@ def get_members(tenant_id): DATE_PART('day',timezone('utc'::text, now()) \ - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, basic_authentication.password IS NOT NULL AS joined, - invitation_token + invitation_token, + role_id FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.tenant_id = %(tenantId)s AND users.deleted_at IS NULL ORDER BY name, id""", From d5223e95c93b889361ea49fef1c808bb9593c815 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 24 Nov 2021 19:25:09 +0100 Subject: [PATCH 133/218] feat(api): EE protected roles --- ee/api/chalicelib/core/signup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 1e8fbb154..9aa99f994 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -69,10 +69,10 @@ def create_step1(data): RETURNING tenant_id, api_key ), r AS ( - INSERT INTO public.roles(tenant_id, name, description, permissions) - VALUES ((SELECT tenant_id FROM t), 'Owner', 'The company''s owner', '{}'::text[]), - ((SELECT tenant_id FROM t), 'Admin', 'Admin member', '{}'::text[]), - ((SELECT tenant_id FROM t), 'Member', 'A member', '{}'::text[]) + INSERT INTO public.roles(tenant_id, name, description, permissions, protected) + VALUES ((SELECT tenant_id FROM t), 'Owner', 'The company''s owner', '{}'::text[], TRUE), + ((SELECT tenant_id FROM t), 'Admin', 'Admin member', '{}'::text[], TRUE), + ((SELECT tenant_id FROM t), 'Member', 'A member', '{}'::text[], TRUE) RETURNING * ), u AS ( From 18e7437411d144c51b05d0ce55e7ad2d50237c13 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 25 Nov 2021 16:52:11 +0100 Subject: [PATCH 134/218] feat(api): included insights --- ee/api/chalicelib/core/insights.py | 931 +++++++++++++++++++++++++++++ 1 file changed, 931 insertions(+) create mode 100644 ee/api/chalicelib/core/insights.py diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py new file mode 100644 index 000000000..28dbf9dfd --- /dev/null +++ b/ee/api/chalicelib/core/insights.py @@ -0,0 +1,931 @@ +from chalicelib.core import sessions_metas +from chalicelib.core.dashboard import __get_basic_constraints, __get_constraint_values +from chalicelib.utils import helper, dev +from chalicelib.utils import pg_client +from chalicelib.utils.TimeUTC import TimeUTC + + +def __transform_journey(rows): + nodes = [] + links = [] + for r in rows: + source = r["source_event"][r["source_event"].index("_") + 1:] + target = r["target_event"][r["target_event"].index("_") + 1:] + if source not in nodes: + nodes.append(source) + if target not in nodes: + nodes.append(target) + links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) + return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} + + +JOURNEY_DEPTH = 5 +JOURNEY_TYPES = { + "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, + "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, + # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only + "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} +} + + +@dev.timed +def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): + pg_sub_query_subset = __get_basic_constraints(project_id=project_id, data=args, duration=True, + main_table="sessions", + time_constraint=True) + event_start = None + event_table = JOURNEY_TYPES["PAGES"]["table"] + event_column = JOURNEY_TYPES["PAGES"]["column"] + event_table_id = JOURNEY_TYPES["PAGES"]["table_id"] + extra_values = {} + for f in filters: + if f["type"] == "START_POINT": + event_start = f["value"] + elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT source_event, + target_event, + count(*) AS value + + FROM (SELECT event_number || '_' || value as target_event, + LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event + FROM (SELECT value, + session_rank, + message_id, + ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number + + {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" + if event_start else ""} + + FROM (SELECT session_id, + message_id, + timestamp, + value, + SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank + FROM (SELECT *, + CASE + WHEN source_timestamp IS NULL THEN 1 + ELSE 0 END AS new_session + FROM (SELECT session_id, + {event_table_id} AS message_id, + timestamp, + {event_column} AS value, + LAG(timestamp) + OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp + FROM {event_table} INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + ) AS related_events) AS ranked_events) AS processed + {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} + ) AS sorted_events + WHERE event_number <= %(JOURNEY_DEPTH)s) AS final + WHERE source_event IS NOT NULL + and target_event IS NOT NULL + GROUP BY source_event, target_event + ORDER BY value DESC + LIMIT 20;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, + **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return __transform_journey(rows) + + +def __compute_weekly_percentage(rows): + if rows is None or len(rows) == 0: + return rows + t = -1 + for r in rows: + if r["week"] == 0: + t = r["usersCount"] + r["percentage"] = r["usersCount"] / t + return rows + + +def __complete_retention(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if i < len(rows) \ + and i != rows[i]["week"]: + rows.insert(i, neutral) + elif i >= len(rows): + rows.append(neutral) + return rows + + +def __complete_acquisition(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + week = 0 + delta_date = 0 + while max_week > 0: + start_date += TimeUTC.MS_WEEK + if end_date is not None and start_date >= end_date: + break + delta = 0 + if delta_date + week >= len(rows) \ + or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + rows.insert(delta_date + week + i, neutral) + delta = i + else: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if delta_date + week + i < len(rows) \ + and i != rows[delta_date + week + i]["week"]: + rows.insert(delta_date + week + i, neutral) + elif delta_date + week + i >= len(rows): + rows.append(neutral) + delta = i + week += delta + max_week -= 1 + delta_date += 1 + return rows + + +@dev.timed +def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts < %(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + ) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <=sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id + ) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <= sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week + FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS raw_users_list) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING(session_id) + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return [] + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL + GROUP BY value + ORDER BY count DESC + LIMIT 7;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + popularity = cur.fetchall() + pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + frequencies = cur.fetchall() + total_usage = sum([f["count"] for f in frequencies]) + frequencies = {f["value"]: f["count"] for f in frequencies} + for p in popularity: + p["popularity"] = p.pop("count") / all_user_count + p["frequency"] = frequencies[p["value"]] / total_usage + + return popularity + + +@dev.timed +def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + adoption = cur.fetchone()["count"] / all_user_count + return {"target": all_user_count, "adoption": adoption, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1 + ORDER BY 2 DESC + LIMIT 10;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query_chart = __get_basic_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND length({event_column})>2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(session_id), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT session_id + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"chart": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + pg_sub_query.append(f"length({event_column})>2") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value + ORDER BY avg DESC + LIMIT 7;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # TODO: solve full scan issue + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return rows + + +@dev.timed +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query_chart = __get_basic_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + pg_sub_query_chart.append("user_id IS NOT NULL") + period = "DAY" + extra_values = {} + for f in filters: + if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: + period = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(users), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp) AS chart;""" + params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, + "project_id": project_id, + "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( + startTimestamp), + "endTimestamp": endTimestamp, **__get_constraint_values(args), + **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return row_users + + +@dev.timed +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) + pg_sub_query.append("user_id IS NOT NULL") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition + FROM (SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS day_users_partition;""" + params = {"project_id": project_id, + "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return helper.dict_to_camel_case(row_users) + + +@dev.timed +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen + FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY user_id) AS user_last_usage + INNER JOIN sessions USING (user_id) + WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 + GROUP BY user_id, last_time,interactions_count;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "list": helper.list_to_camel_case(rows) + } + + +@dev.timed +def search(text, feature_type, project_id, platform=None): + if not feature_type: + resource_type = "ALL" + data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) + return data + + pg_sub_query = __get_basic_constraints(project_id=project_id, time_constraint=True, duration=True, + data={} if platform is None else {"platform": platform}) + + params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, + "endTimestamp": TimeUTC.now(), + "project_id": project_id, + "value": helper.string_to_sql_like(text.lower()), + "platform_0": platform} + if feature_type == "ALL": + with pg_client.PostgresClient() as cur: + sub_queries = [] + for e in JOURNEY_TYPES: + sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" + FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s + LIMIT 10)""") + pg_query = "UNION ALL".join(sub_queries) + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + elif JOURNEY_TYPES.get(feature_type) is not None: + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s + LIMIT 10;""" + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + else: + return [] + return [helper.dict_to_camel_case(row) for row in rows] From 2189298e04ecda63c1549691a88e3025c19546d8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 25 Nov 2021 16:53:13 +0100 Subject: [PATCH 135/218] feat(api): removed insights 2 --- ee/api/chalicelib/core/insights.py | 931 ----------------------------- 1 file changed, 931 deletions(-) delete mode 100644 ee/api/chalicelib/core/insights.py diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py deleted file mode 100644 index 28dbf9dfd..000000000 --- a/ee/api/chalicelib/core/insights.py +++ /dev/null @@ -1,931 +0,0 @@ -from chalicelib.core import sessions_metas -from chalicelib.core.dashboard import __get_basic_constraints, __get_constraint_values -from chalicelib.utils import helper, dev -from chalicelib.utils import pg_client -from chalicelib.utils.TimeUTC import TimeUTC - - -def __transform_journey(rows): - nodes = [] - links = [] - for r in rows: - source = r["source_event"][r["source_event"].index("_") + 1:] - target = r["target_event"][r["target_event"].index("_") + 1:] - if source not in nodes: - nodes.append(source) - if target not in nodes: - nodes.append(target) - links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) - return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} - - -JOURNEY_DEPTH = 5 -JOURNEY_TYPES = { - "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, - "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, - # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only - "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} -} - - -@dev.timed -def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): - pg_sub_query_subset = __get_basic_constraints(project_id=project_id, data=args, duration=True, - main_table="sessions", - time_constraint=True) - event_start = None - event_table = JOURNEY_TYPES["PAGES"]["table"] - event_column = JOURNEY_TYPES["PAGES"]["column"] - event_table_id = JOURNEY_TYPES["PAGES"]["table_id"] - extra_values = {} - for f in filters: - if f["type"] == "START_POINT": - event_start = f["value"] - elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT source_event, - target_event, - count(*) AS value - - FROM (SELECT event_number || '_' || value as target_event, - LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event - FROM (SELECT value, - session_rank, - message_id, - ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number - - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" - if event_start else ""} - - FROM (SELECT session_id, - message_id, - timestamp, - value, - SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank - FROM (SELECT *, - CASE - WHEN source_timestamp IS NULL THEN 1 - ELSE 0 END AS new_session - FROM (SELECT session_id, - {event_table_id} AS message_id, - timestamp, - {event_column} AS value, - LAG(timestamp) - OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp - FROM {event_table} INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_subset)} - ) AS related_events) AS ranked_events) AS processed - {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} - ) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE source_event IS NOT NULL - and target_event IS NOT NULL - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, - **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - - return __transform_journey(rows) - - -def __compute_weekly_percentage(rows): - if rows is None or len(rows) == 0: - return rows - t = -1 - for r in rows: - if r["week"] == 0: - t = r["usersCount"] - r["percentage"] = r["usersCount"] / t - return rows - - -def __complete_retention(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if i < len(rows) \ - and i != rows[i]["week"]: - rows.insert(i, neutral) - elif i >= len(rows): - rows.append(neutral) - return rows - - -def __complete_acquisition(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - week = 0 - delta_date = 0 - while max_week > 0: - start_date += TimeUTC.MS_WEEK - if end_date is not None and start_date >= end_date: - break - delta = 0 - if delta_date + week >= len(rows) \ - or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - rows.insert(delta_date + week + i, neutral) - delta = i - else: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if delta_date + week + i < len(rows) \ - and i != rows[delta_date + week + i]["week"]: - rows.insert(delta_date + week + i, neutral) - elif delta_date + week + i >= len(rows): - rows.append(neutral) - delta = i - week += delta - max_week -= 1 - delta_date += 1 - return rows - - -@dev.timed -def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts < %(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - LIMIT 1)) - ) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <=sessions.start_ts - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id - ) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE users_list.user_id = sessions.user_id - AND %(startTimestamp)s <= sessions.start_ts - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY week - ORDER BY week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, - FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, - COUNT(DISTINCT connexions_list.user_id) AS users_count, - ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users - FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week - FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week - FROM sessions INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND NOT EXISTS((SELECT 1 - FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) - WHERE bsess.start_ts<%(startTimestamp)s - AND project_id = %(project_id)s - AND bsess.user_id = sessions.user_id - AND bfeature.timestamp<%(startTimestamp)s - AND bfeature.{event_column}=%(value)s - LIMIT 1)) - GROUP BY user_id) AS raw_users_list) AS users_list - LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, - user_id - FROM sessions INNER JOIN {event_table} AS feature USING(session_id) - WHERE users_list.user_id = sessions.user_id - AND first_connexion_week <= - DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) - AND sessions.project_id = %(project_id)s - AND sessions.start_ts < (%(endTimestamp)s - 1) - AND feature.timestamp >= %(startTimestamp)s - AND feature.timestamp < %(endTimestamp)s - AND feature.{event_column} = %(value)s - GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -@dev.timed -def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: - return [] - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL - GROUP BY value - ORDER BY count DESC - LIMIT 7;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - popularity = cur.fetchall() - pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY value;""" - # TODO: solve full scan - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - frequencies = cur.fetchall() - total_usage = sum([f["count"] for f in frequencies]) - frequencies = {f["value"]: f["count"] for f in frequencies} - for p in popularity: - p["popularity"] = p.pop("count") / all_user_count - p["frequency"] = frequencies[p["value"]] / total_usage - - return popularity - - -@dev.timed -def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - all_user_count = cur.fetchone()["count"] - if all_user_count == 0: - return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": event_value}], } - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND user_id IS NOT NULL;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - adoption = cur.fetchone()["count"] / all_user_count - return {"target": all_user_count, "adoption": adoption, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1 - ORDER BY 2 DESC - LIMIT 10;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return {"users": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query_chart = __get_basic_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with pg_client.PostgresClient() as cur: - pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - AND length({event_column})>2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") - pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(session_id), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT session_id - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""" - params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return {"chart": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -@dev.timed -def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - pg_sub_query.append(f"length({event_column})>2") - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY value - ORDER BY avg DESC - LIMIT 7;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # TODO: solve full scan issue - print(cur.mogrify(pg_query, params)) - print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - - return rows - - -@dev.timed -def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], - **args): - pg_sub_query_chart = __get_basic_constraints(project_id=project_id, time_constraint=True, - chart=True, data=args) - - pg_sub_query_chart.append("user_id IS NOT NULL") - period = "DAY" - extra_values = {} - for f in filters: - if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: - period = f["value"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart - FROM (SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(users), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT DISTINCT user_id - FROM public.sessions - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS users ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp) AS chart;""" - params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, - "project_id": project_id, - "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( - startTimestamp), - "endTimestamp": endTimestamp, **__get_constraint_values(args), - **extra_values} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() - - return row_users - - -@dev.timed -def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) - pg_sub_query.append("user_id IS NOT NULL") - - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition - FROM (SELECT number_of_days, COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days - FROM sessions - WHERE {" AND ".join(pg_sub_query)} - GROUP BY 1) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days) AS day_users_partition;""" - params = {"project_id": project_id, - "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(cur.mogrify(pg_query, params)) - # print("---------------------") - cur.execute(cur.mogrify(pg_query, params)) - row_users = cur.fetchone() - - return helper.dict_to_camel_case(row_users) - - -@dev.timed -def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - pg_sub_query = __get_basic_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", - time_constraint=True) - pg_sub_query.append("user_id IS NOT NULL") - pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") - pg_sub_query.append("feature.timestamp < %(endTimestamp)s") - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: - pg_sub_query.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - pg_sub_query.append(f"feature.{event_column} = %(value)s") - - with pg_client.PostgresClient() as cur: - if default: - # get most used value - pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query[:-1])} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - cur.execute(cur.mogrify(pg_query, params)) - row = cur.fetchone() - if row is not None: - event_value = row["value"] - extra_values["value"] = event_value - if len(event_value) > 2: - pg_sub_query.append(f"length({event_column})>2") - pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen - FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count - FROM {event_table} AS feature INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)} - GROUP BY user_id) AS user_last_usage - INNER JOIN sessions USING (user_id) - WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 - GROUP BY user_id, last_time,interactions_count;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "list": helper.list_to_camel_case(rows) - } - - -@dev.timed -def search(text, feature_type, project_id, platform=None): - if not feature_type: - resource_type = "ALL" - data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) - return data - - pg_sub_query = __get_basic_constraints(project_id=project_id, time_constraint=True, duration=True, - data={} if platform is None else {"platform": platform}) - - params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, - "endTimestamp": TimeUTC.now(), - "project_id": project_id, - "value": helper.string_to_sql_like(text.lower()), - "platform_0": platform} - if feature_type == "ALL": - with pg_client.PostgresClient() as cur: - sub_queries = [] - for e in JOURNEY_TYPES: - sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s - LIMIT 10)""") - pg_query = "UNION ALL".join(sub_queries) - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - elif JOURNEY_TYPES.get(feature_type) is not None: - with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) - WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s - LIMIT 10;""" - # print(cur.mogrify(pg_query, params)) - cur.execute(cur.mogrify(pg_query, params)) - rows = cur.fetchall() - else: - return [] - return [helper.dict_to_camel_case(row) for row in rows] From 3992fd2a23120f7378d2f3feb4514314cbc3bdeb Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 25 Nov 2021 22:52:44 +0100 Subject: [PATCH 136/218] fix (backend): separate project-by-key cache from project-by-id one --- backend/pkg/db/cache/project.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/backend/pkg/db/cache/project.go b/backend/pkg/db/cache/project.go index 6a7739bdd..70349a407 100644 --- a/backend/pkg/db/cache/project.go +++ b/backend/pkg/db/cache/project.go @@ -1,6 +1,7 @@ package cache import ( + "log" "time" . "openreplay/backend/pkg/db/types" ) @@ -19,8 +20,8 @@ func (c *PGCache) GetProjectByKey(projectKey string) (*Project, error) { if err != nil { return nil, err } - c.projects[ p.ProjectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projectsByKeys.Store(projectKey, c.projects[ p.ProjectID ]) + //c.projects[ p.ProjectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } + c.projectsByKeys.Store(projectKey, p) return p, nil } @@ -36,7 +37,7 @@ func (c *PGCache) GetProject(projectID uint32) (*Project, error) { return nil, err } c.projects[ projectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) } - c.projectsByKeys.Store(p.ProjectKey, c.projects[ projectID ]) + //c.projectsByKeys.Store(p.ProjectKey, c.projects[ projectID ]) return p, nil } From c1d760904c03cddad02852348e8a4cb2f66aa190 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 21 Oct 2021 22:07:17 +0530 Subject: [PATCH 137/218] chore(template): update vars_template Signed-off-by: Rajesh Rajendran --- scripts/helm/vars_template.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/helm/vars_template.yaml b/scripts/helm/vars_template.yaml index 766ed6a02..130968e89 100644 --- a/scripts/helm/vars_template.yaml +++ b/scripts/helm/vars_template.yaml @@ -90,6 +90,9 @@ db_resource_override: ## Sane defaults s3_endpoint: "{{ s3_endpoint }}" aws_region: "{{ aws_region }}" +assets_bucket: "{{ assets_bucket }}" +recordings_bucket: "{{ recordings_bucket }}" +sourcemaps_bucket: "{{ sourcemaps_bucket }}" kafka_endpoint: "{{ kafka_endpoint }}" kafka_ssl: "{{ kafka_ssl }}" postgres_endpoint: "{{ postgres_endpoint }}" From c59d0da5f140049cbcd0a4e449dd31700e6f113a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 17:26:40 +0100 Subject: [PATCH 138/218] feat(api): fixed invitation feat(api): fixed missing role_id --- ee/api/chalicelib/blueprints/bp_core_dynamic.py | 2 +- ee/api/chalicelib/core/users.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 8ce7d9bc2..bc2a9c5ef 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -386,7 +386,7 @@ def change_password_by_invitation(): data = app.current_request.json_body if data is None or len(data.get("invitation", "")) < 64 or len(data.get("pass", "")) < 8: return {"errors": ["please provide a valid invitation & pass"]} - user = users.get_by_invitation_token(token=data["token"], pass_token=data["pass"]) + user = users.get_by_invitation_token(token=data["invitation"], pass_token=data["pass"]) if user is None: return {"errors": ["invitation not found"]} if user["expiredChange"]: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 63946282e..3b8f2dbb9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -183,7 +183,8 @@ def update(tenant_id, user_id, changes): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - users.appearance;""", + users.appearance, + users.role_id;""", {"tenant_id": tenant_id, "user_id": user_id, **changes}) ) @@ -257,7 +258,8 @@ def get(user_id, tenant_id): (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, appearance, api_key, - origin + origin, + role_id FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.user_id = %(userId)s @@ -556,7 +558,7 @@ def get_by_invitation_token(token, pass_token=None): FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE invitation_token = %(token)s {"AND change_pwd_token = %(pass_token)s" if pass_token else ""} LIMIT 1;""", - {"token": token, "pass_token": token}) + {"token": token, "pass_token": pass_token}) ) r = cur.fetchone() return helper.dict_to_camel_case(r) From a055fa139bd89fed6342e64525fd3a523659fdf9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 17:50:45 +0100 Subject: [PATCH 139/218] feat(api): include role's details in the list of members --- ee/api/chalicelib/core/users.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 3b8f2dbb9..09c5dcd3d 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -386,8 +386,12 @@ def get_members(tenant_id): - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, basic_authentication.password IS NOT NULL AS joined, invitation_token, - role_id - FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + role_id, + roles.name AS role_name, + roles.permissions + FROM public.users + LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) WHERE users.tenant_id = %(tenantId)s AND users.deleted_at IS NULL ORDER BY name, id""", {"tenantId": tenant_id}) From 12588a97668bad1f3b575a651367bdaff33b5a20 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 18:07:52 +0100 Subject: [PATCH 140/218] feat(api): include permissions in /login and /account --- ee/api/chalicelib/core/users.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 09c5dcd3d..a113c22a4 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -259,8 +259,11 @@ def get(user_id, tenant_id): appearance, api_key, origin, - role_id - FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + role_id, + roles.name AS role_name, + roles.permissions + FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) WHERE users.user_id = %(userId)s AND tenant_id = %(tenantId)s @@ -387,8 +390,7 @@ def get_members(tenant_id): basic_authentication.password IS NOT NULL AS joined, invitation_token, role_id, - roles.name AS role_name, - roles.permissions + roles.name AS role_name FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) @@ -612,8 +614,11 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, users.appearance, users.origin, - users.role_id + users.role_id, + roles.name AS role_name, + roles.permissions FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) + LEFT JOIN public.roles USING (role_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1) From 21a9d28bb814cfd56326d14d320b093b4194000b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 18:13:22 +0100 Subject: [PATCH 141/218] feat(api): fixed /accounts --- ee/api/chalicelib/core/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index a113c22a4..ffbb56520 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -251,7 +251,7 @@ def get(user_id, tenant_id): users.user_id AS id, email, role, - name, + users.name, basic_authentication.generated_password, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, From b5e75c6f2e1bda65f799530de3acbb8a649e8d2d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 18:20:58 +0100 Subject: [PATCH 142/218] feat(api): fixed /accounts ambiguous tenant_id --- ee/api/chalicelib/core/users.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index ffbb56520..0f3254b71 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -266,8 +266,10 @@ def get(user_id, tenant_id): LEFT JOIN public.roles USING (role_id) WHERE users.user_id = %(userId)s - AND tenant_id = %(tenantId)s - AND deleted_at IS NULL + AND users.tenant_id = %(tenantId)s + AND roles.tenant_id = %(tenantId)s + AND users.deleted_at IS NULL + AND (roles.role_id IS NULL or roles.deleted_at IS NULL) LIMIT 1;""", {"userId": user_id, "tenantId": tenant_id}) ) From 90d58a99721960853609be4822a586768ef4f695 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 18:52:56 +0100 Subject: [PATCH 143/218] feat(api): fixed null role --- ee/api/chalicelib/core/users.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 0f3254b71..5ba613875 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -267,9 +267,8 @@ def get(user_id, tenant_id): WHERE users.user_id = %(userId)s AND users.tenant_id = %(tenantId)s - AND roles.tenant_id = %(tenantId)s AND users.deleted_at IS NULL - AND (roles.role_id IS NULL or roles.deleted_at IS NULL) + AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s) LIMIT 1;""", {"userId": user_id, "tenantId": tenant_id}) ) @@ -624,6 +623,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1) + AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s) LIMIT 1;""", {"email": email, "password": password}) From 40e894cf80f8e0068cdcd203196ac40e0a6aeff3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 19:06:01 +0100 Subject: [PATCH 144/218] feat(api): fixed create role payload check --- ee/api/chalicelib/blueprints/bp_ee.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index 76272ec2c..b32bd6f52 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -20,7 +20,8 @@ def get_roles(context): @app.route('/client/roles', methods=['POST', 'PUT']) def add_role(context): data = app.current_request.json_body - data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], **data) + data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], name=data["name"], + description=data.get("description"), permissions=data["permissions"]) if "errors" in data: return data From dd52556f141b8f52c9bb9ced559aa687bb741fed Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 26 Nov 2021 19:30:19 +0100 Subject: [PATCH 145/218] feat(api): changed create&update role --- ee/api/chalicelib/core/roles.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index d5c4fb7da..0a32d9d5e 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -24,10 +24,13 @@ def update(tenant_id, user_id, role_id, changes): WHERE role_id = %(role_id)s AND tenant_id = %(tenant_id)s AND deleted_at ISNULL - AND protected = FALSE;""", + AND protected = FALSE + RETURNING *;""", {"tenant_id": tenant_id, "role_id": role_id, **changes}) ) - return get_roles(tenant_id=tenant_id) + row = cur.fetchone() + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) def create(tenant_id, user_id, name, description, permissions): @@ -39,10 +42,13 @@ def create(tenant_id, user_id, name, description, permissions): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions) - VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[]);""", + VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[]) + RETURNING *;""", {"tenant_id": tenant_id, "name": name, "description": description, "permissions": permissions}) ) - return get_roles(tenant_id=tenant_id) + row=cur.fetchone() + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) def get_roles(tenant_id): From 941c6c06fd800c516f460c1696a5a1840bf2ce26 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Sat, 27 Nov 2021 03:27:33 +0530 Subject: [PATCH 146/218] Roles UI (#223) * feat(ui) - roles and permissions * feat(ui) - roles and permissions assist check * feat(ui) - roles and permissions dev tools * feat(ui) - roles and permissions logs * feat(ui) - roles and permissions logs * feat(ui) - roles and permissions cleanup --- .../AssistActions/AssistActions.tsx | 17 +- .../LiveSessionList/LiveSessionList.tsx | 16 +- frontend/app/components/Client/Client.js | 2 + .../Client/ManageUsers/ManageUsers.js | 166 ++++++++++-------- .../Client/PreferencesMenu/PreferencesMenu.js | 16 +- .../app/components/Client/Roles/Roles.tsx | 109 ++++++++++++ .../components/Permissions/Permissions.tsx | 15 ++ .../Roles/components/Permissions/index.ts | 1 + .../Roles/components/RoleForm/RoleForm.tsx | 100 +++++++++++ .../Client/Roles/components/RoleForm/index.ts | 1 + .../Roles/components/RoleForm/roleForm.css | 21 +++ .../Roles/components/RoleItem/RoleItem.tsx | 33 ++++ .../Client/Roles/components/RoleItem/index.ts | 1 + .../Roles/components/RoleItem/roleItem.css | 37 ++++ frontend/app/components/Client/Roles/index.ts | 1 + .../app/components/Client/Roles/roles.css | 13 ++ .../app/components/Dashboard/Dashboard.js | 2 + frontend/app/components/Errors/Errors.js | 2 + .../Funnels/FunnelHeader/FunnelDropdown.js | 1 - .../Session_/Player/Controls/Controls.js | 17 +- .../app/components/hocs/withPermissions.js | 15 ++ .../ui/NoPermission/NoPermission.tsx | 15 ++ .../app/components/ui/NoPermission/index.ts | 1 + .../ui/NoPermission/noPermission.css | 59 +++++++ frontend/app/components/ui/index.js | 1 + frontend/app/duck/index.js | 2 + frontend/app/duck/roles.js | 32 ++++ frontend/app/routes.js | 1 + frontend/app/svg/icons/shield-lock.svg | 4 + frontend/app/types/account/account.js | 1 + frontend/app/types/client/client.js | 3 +- frontend/app/types/member.js | 3 +- frontend/app/types/role.js | 30 ++++ frontend/tsconfig.json | 3 + 34 files changed, 648 insertions(+), 93 deletions(-) create mode 100644 frontend/app/components/Client/Roles/Roles.tsx create mode 100644 frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx create mode 100644 frontend/app/components/Client/Roles/components/Permissions/index.ts create mode 100644 frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx create mode 100644 frontend/app/components/Client/Roles/components/RoleForm/index.ts create mode 100644 frontend/app/components/Client/Roles/components/RoleForm/roleForm.css create mode 100644 frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx create mode 100644 frontend/app/components/Client/Roles/components/RoleItem/index.ts create mode 100644 frontend/app/components/Client/Roles/components/RoleItem/roleItem.css create mode 100644 frontend/app/components/Client/Roles/index.ts create mode 100644 frontend/app/components/Client/Roles/roles.css create mode 100644 frontend/app/components/hocs/withPermissions.js create mode 100644 frontend/app/components/ui/NoPermission/NoPermission.tsx create mode 100644 frontend/app/components/ui/NoPermission/index.ts create mode 100644 frontend/app/components/ui/NoPermission/noPermission.css create mode 100644 frontend/app/duck/roles.js create mode 100644 frontend/app/svg/icons/shield-lock.svg create mode 100644 frontend/app/types/role.js diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 7d4d5526a..eae41c44e 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -32,9 +32,11 @@ interface Props { calling: CallingState, peerConnectionStatus: ConnectionStatus, remoteControlEnabled: boolean, + hasPermission: boolean, + isEnterprise: boolean, } -function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled }: Props) { +function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled, hasPermission, isEnterprise }: Props) { const [ incomeStream, setIncomeStream ] = useState(null); const [ localStream, setLocalStream ] = useState(null); const [ callObject, setCallObject ] = useState<{ end: ()=>void, toggleRemoteControl: ()=>void } | null >(null); @@ -64,6 +66,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus } const inCall = calling !== CallingState.False; + const cannotCall = (peerConnectionStatus !== ConnectionStatus.Connected) || (isEnterprise && !hasPermission) return (
@@ -73,8 +76,8 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus className={ cn( 'cursor-pointer p-2 mr-2 flex items-center', - {[stl.inCall] : inCall }, - {[stl.disabled]: peerConnectionStatus !== ConnectionStatus.Connected} + // {[stl.inCall] : inCall }, + {[stl.disabled]: cannotCall} ) } onClick={ inCall ? callObject?.end : call} @@ -118,7 +121,13 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus ) } -const con = connect(null, { toggleChatWindow }) +const con = connect(state => { + const permissions = state.getIn([ 'user', 'account', 'permissions' ]) || [] + return { + hasPermission: permissions.includes('ASSIST_CALL'), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', + } +}, { toggleChatWindow }) export default con(connectPlayer(state => ({ calling: state.calling, diff --git a/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx index 0fc99b3ac..68176afbf 100644 --- a/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/BugFinder/LiveSessionList/LiveSessionList.tsx @@ -4,6 +4,7 @@ import { connect } from 'react-redux'; import { NoContent, Loader } from 'UI'; import { List, Map } from 'immutable'; import SessionItem from 'Shared/SessionItem'; +import withPermissions from 'HOCs/withPermissions' const AUTOREFRESH_INTERVAL = 1 * 60 * 1000 @@ -60,8 +61,13 @@ function LiveSessionList(props: Props) { ) } -export default connect(state => ({ - list: state.getIn(['sessions', 'liveSessions']), - loading: state.getIn([ 'sessions', 'loading' ]), - filters: state.getIn([ 'filters', 'appliedFilter' ]), -}), { fetchList })(LiveSessionList) +export default withPermissions(['ASSIST_LIVE'])(connect( + (state) => ({ + list: state.getIn(['sessions', 'liveSessions']), + loading: state.getIn([ 'sessions', 'loading' ]), + filters: state.getIn([ 'filters', 'appliedFilter' ]), + }), + { + fetchList + } +)(LiveSessionList)); diff --git a/frontend/app/components/Client/Client.js b/frontend/app/components/Client/Client.js index f8a3ee0c8..6cae36710 100644 --- a/frontend/app/components/Client/Client.js +++ b/frontend/app/components/Client/Client.js @@ -15,6 +15,7 @@ import styles from './client.css'; import cn from 'classnames'; import PreferencesMenu from './PreferencesMenu'; import Notifications from './Notifications'; +import Roles from './Roles'; @connect((state) => ({ appearance: state.getIn([ 'user', 'account', 'appearance' ]), @@ -42,6 +43,7 @@ export default class Client extends React.PureComponent { + ) diff --git a/frontend/app/components/Client/ManageUsers/ManageUsers.js b/frontend/app/components/Client/ManageUsers/ManageUsers.js index 0d49d5319..4dffc94c8 100644 --- a/frontend/app/components/Client/ManageUsers/ManageUsers.js +++ b/frontend/app/components/Client/ManageUsers/ManageUsers.js @@ -1,7 +1,9 @@ import { connect } from 'react-redux'; import cn from 'classnames'; import withPageTitle from 'HOCs/withPageTitle'; -import { IconButton, SlideModal, Input, Button, Loader, NoContent, Popup, CopyButton } from 'UI'; +import { + IconButton, SlideModal, Input, Button, Loader, + NoContent, Popup, CopyButton, Dropdown } from 'UI'; import { init, save, edit, remove as deleteMember, fetchList, generateInviteLink } from 'Duck/member'; import styles from './manageUsers.css'; import UserItem from './UserItem'; @@ -19,6 +21,7 @@ const LIMIT_WARNING = 'You have reached users limit.'; errors: state.getIn([ 'members', 'saveRequest', 'errors' ]), loading: state.getIn([ 'members', 'loading' ]), saving: state.getIn([ 'members', 'saveRequest', 'loading' ]), + roles: state.getIn(['roles', 'list']) }), { init, save, @@ -31,6 +34,7 @@ const LIMIT_WARNING = 'You have reached users limit.'; class ManageUsers extends React.PureComponent { state = { showModal: false, remaining: this.props.account.limits.teamMember.remaining, invited: false } + // writeOption = (e, { name, value }) => this.props.edit({ [ name ]: value }); onChange = (e, { name, value }) => this.props.edit({ [ name ]: value }); onChangeCheckbox = ({ target: { checked, name } }) => this.props.edit({ [ name ]: checked }); setFocus = () => this.focusElement.focus(); @@ -76,81 +80,97 @@ class ManageUsers extends React.PureComponent { }); } - formContent = (member, account) => ( -
-
-
- - { this.focusElement = ref; } } - name="name" - value={ member.name } - onChange={ this.onChange } - className={ styles.input } - id="name-field" - /> -
+ formContent = (member, account, roles) => { + const options = roles.map(r => ({ text: r.name, value: r.roleId })).toJS(); -
- - -
- { !account.smtp && -
- SMTP is not configured. Please follow (see here how to set it up). You can still add new users, but you’d have to manually copy then send them the invitation link. -
- } -
-
-
-
- - +
+ + +
+ { !account.smtp && +
+ SMTP is not configured. Please follow (see here how to set it up). You can still add new users, but you’d have to manually copy then send them the invitation link. +
+ } +
+ +
{ 'Can manage Projects and team members.' }
+
+ +
+ + +
+ + +
+
+ + +
+ { !member.joined && member.invitationLink && + + }
- { !member.joined && member.invitationLink && - - }
-
- ) + ) + } init = (v) => { this.props.init(v); @@ -160,7 +180,7 @@ class ManageUsers extends React.PureComponent { render() { const { - members, member, loading, account, hideHeader = false, + members, member, loading, account, hideHeader = false, roles } = this.props; const { showModal, remaining, invited } = this.state; const isAdmin = account.admin || account.superAdmin; @@ -173,7 +193,7 @@ class ManageUsers extends React.PureComponent { title="Inivte People" size="small" isDisplayed={ showModal } - content={ this.formContent(member, account) } + content={ this.formContent(member, account, roles) } onClose={ this.closeModal } />
diff --git a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js index 4a1ff3f1d..f139afbe9 100644 --- a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js +++ b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js @@ -6,7 +6,7 @@ import stl from './preferencesMenu.css'; import { CLIENT_TABS, client as clientRoute } from 'App/routes'; import { withRouter } from 'react-router-dom'; -function PreferencesMenu({ activeTab, appearance, history }) { +function PreferencesMenu({ activeTab, appearance, history, isEnterprise }) { const setTab = (tab) => { history.push(clientRoute(tab)); @@ -76,7 +76,18 @@ function PreferencesMenu({ activeTab, appearance, history }) { iconName="users" onClick={() => setTab(CLIENT_TABS.MANAGE_USERS) } /> -
+
+ + { isEnterprise && ( +
+ setTab(CLIENT_TABS.MANAGE_ROLES) } + /> +
+ )}
({ appearance: state.getIn([ 'user', 'account', 'appearance' ]), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', }))(withRouter(PreferencesMenu)); diff --git a/frontend/app/components/Client/Roles/Roles.tsx b/frontend/app/components/Client/Roles/Roles.tsx new file mode 100644 index 000000000..9198a7532 --- /dev/null +++ b/frontend/app/components/Client/Roles/Roles.tsx @@ -0,0 +1,109 @@ +import React, { useState, useEffect } from 'react' +import cn from 'classnames' +import { Loader, IconButton, Popup, NoContent, SlideModal } from 'UI' +import { connect } from 'react-redux' +import stl from './roles.css' +import RoleForm from './components/RoleForm' +import { init, edit, fetchList, remove as deleteRole } from 'Duck/roles'; +import RoleItem from './components/RoleItem' +import { confirm } from 'UI/Confirmation'; + +interface Props { + loading: boolean + init: (role?: any) => void, + edit: (role: any) => void, + instance: any, + roles: any[], + deleteRole: (id: any) => void, + fetchList: () => Promise, +} + +function Roles(props: Props) { + const { loading, instance, roles, init, edit, deleteRole } = props + const [showModal, setShowmModal] = useState(false) + + useEffect(() => { + props.fetchList() + }, []) + + const closeModal = () => { + setShowmModal(false) + setTimeout(() => { + init() + }, 100) + } + + const editHandler = role => { + init(role) + setShowmModal(true) + } + + const deleteHandler = async (role) => { + if (await confirm({ + header: 'Roles', + confirmation: `Are you sure you want to remove this role?` + })) { + deleteRole(role.roleId) + } + } + + return ( + + + } + onClose={ closeModal } + /> +
+
+
+

Manage Roles and Permissions

+ + setShowmModal(true) } + /> +
+ } + size="tiny" + inverted + position="top left" + /> +
+
+ + +
+ {roles.map(role => ( + + ))} +
+
+
+ + + ) +} + +export default connect(state => ({ + instance: state.getIn(['roles', 'instance']) || null, + roles: state.getIn(['roles', 'list']), + loading: state.getIn(['roles', 'fetchRequest', 'loading']), +}), { init, edit, fetchList, deleteRole })(Roles) \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx b/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx new file mode 100644 index 000000000..0dd56dfd9 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/Permissions/Permissions.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import Role from 'Types/role' + +interface Props { + role: Role +} +function Permissions(props: Props) { + return ( +
+ +
+ ); +} + +export default Permissions; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/Permissions/index.ts b/frontend/app/components/Client/Roles/components/Permissions/index.ts new file mode 100644 index 000000000..659544a53 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/Permissions/index.ts @@ -0,0 +1 @@ +export { default } from './Permissions'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx new file mode 100644 index 000000000..d12b60269 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx @@ -0,0 +1,100 @@ +import React, { useRef, useEffect } from 'react' +import { connect } from 'react-redux' +import stl from './roleForm.css' +import { save, edit } from 'Duck/roles' +import { Input, Button, Checkbox } from 'UI' + +interface Permission { + name: string, + value: string +} + +interface Props { + role: any, + edit: (role: any) => void, + save: (role: any) => Promise, + closeModal: () => void, + saving: boolean, + permissions: Array[] +} + +const RoleForm = ({ role, closeModal, edit, save, saving, permissions }: Props) => { + let focusElement = useRef(null) + const _save = () => { + save(role).then(() => { + closeModal() + }) + } + + const write = ({ target: { value, name } }) => edit({ [ name ]: value }) + + const onChangeOption = (e) => { + const { permissions } = role + const index = permissions.indexOf(e) + const _perms = permissions.contains(e) ? permissions.remove(index) : permissions.push(e) + edit({ permissions: _perms }) + } + + useEffect(() => { + focusElement && focusElement.current && focusElement.current.focus() + }, []) + + return ( +
+
+
+ + +
+ +
+ { permissions.map((permission: any, index) => ( +
+ onChangeOption(permission.value) } + label={permission.name} + /> +
+ ))} +
+
+ +
+
+ + +
+
+
+ ); +} + +export default connect(state => ({ + role: state.getIn(['roles', 'instance']), + permissions: state.getIn(['roles', 'permissions']), +}), { edit, save })(RoleForm); \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/index.ts b/frontend/app/components/Client/Roles/components/RoleForm/index.ts new file mode 100644 index 000000000..3bb62ee58 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/index.ts @@ -0,0 +1 @@ +export { default } from './RoleForm'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css b/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css new file mode 100644 index 000000000..a0c5934c8 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleForm/roleForm.css @@ -0,0 +1,21 @@ +.form { + padding: 0 20px; + + & .formGroup { + margin-bottom: 15px; + } + & label { + display: block; + margin-bottom: 5px; + font-weight: 500; + } + + & .input { + width: 100%; + } + + & input[type=checkbox] { + margin-right: 10px; + height: 13px; + } +} \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx new file mode 100644 index 000000000..a242ea6f2 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx @@ -0,0 +1,33 @@ +import React from 'react'; +import { Icon } from 'UI' +import stl from './roleItem.css' +import cn from 'classnames' + +interface Props { + role: any, + deleteHandler?: (role: any) => void, + editHandler?: (role: any) => void, +} +function RoleItem({ role, deleteHandler, editHandler }: Props) { + return ( +
+ + { role.name } + +
+ { !!deleteHandler && +
deleteHandler(role) } id="trash"> + +
+ } + { !!editHandler && +
editHandler(role) }> + +
+ } +
+
+ ); +} + +export default RoleItem; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/index.ts b/frontend/app/components/Client/Roles/components/RoleItem/index.ts new file mode 100644 index 000000000..645d37fd1 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/index.ts @@ -0,0 +1 @@ +export { default } from './RoleItem' \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css new file mode 100644 index 000000000..50a56afb4 --- /dev/null +++ b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css @@ -0,0 +1,37 @@ +.wrapper { + display: flex; + align-items: center; + width: 100%; + border-bottom: solid thin #e6e6e6; + padding: 10px 0px; +} + +.actions { + margin-left: auto; + /* opacity: 0; */ + transition: all 0.4s; + display: flex; + align-items: center; + & .button { + padding: 5px; + cursor: pointer; + margin-left: 10px; + display: flex; + align-items: center; + justify-content: center; + &:hover { + & svg { + fill: $teal-dark; + } + } + &.disabled { + pointer-events: none; + opacity: 0.5; + } + } + + & .disabled { + pointer-events: none; + opacity: 0.5; + } +} \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/index.ts b/frontend/app/components/Client/Roles/index.ts new file mode 100644 index 000000000..9e6fe3912 --- /dev/null +++ b/frontend/app/components/Client/Roles/index.ts @@ -0,0 +1 @@ +export { default } from './Roles'; \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/roles.css b/frontend/app/components/Client/Roles/roles.css new file mode 100644 index 000000000..819111686 --- /dev/null +++ b/frontend/app/components/Client/Roles/roles.css @@ -0,0 +1,13 @@ +.wrapper { + padding: 0; +} +.tabHeader { + display: flex; + align-items: center; + margin-bottom: 25px; + + & .tabTitle { + margin: 0 15px 0 0; + font-weight: 400 !important; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Dashboard.js b/frontend/app/components/Dashboard/Dashboard.js index ffbc87b41..2e5cba630 100644 --- a/frontend/app/components/Dashboard/Dashboard.js +++ b/frontend/app/components/Dashboard/Dashboard.js @@ -1,6 +1,7 @@ import { connect } from 'react-redux'; import cn from 'classnames'; import withPageTitle from 'HOCs/withPageTitle'; +import withPermissions from 'HOCs/withPermissions' import { setPeriod, setPlatform, fetchMetadataOptions } from 'Duck/dashboard'; import { NoContent } from 'UI'; import { WIDGET_KEYS } from 'Types/dashboard'; @@ -103,6 +104,7 @@ function isInViewport(el) { ); } +@withPermissions(['METRICS'], 'page-margin container-90') @connect(state => ({ period: state.getIn([ 'dashboard', 'period' ]), comparing: state.getIn([ 'dashboard', 'comparing' ]), diff --git a/frontend/app/components/Errors/Errors.js b/frontend/app/components/Errors/Errors.js index b4c5fce92..f9e7b5c9b 100644 --- a/frontend/app/components/Errors/Errors.js +++ b/frontend/app/components/Errors/Errors.js @@ -1,5 +1,6 @@ import { connect } from 'react-redux'; import withSiteIdRouter from 'HOCs/withSiteIdRouter'; +import withPermissions from 'HOCs/withPermissions' import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo"; import { getRE } from 'App/utils'; import { fetchBookmarks } from "Duck/errors"; @@ -33,6 +34,7 @@ function getStatusLabel(status) { } } +@withPermissions(['ERRORS'], 'page-margin container-90') @withSiteIdRouter @connect(state => ({ list: state.getIn([ "errors", "list" ]), diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js index 88715c174..87f7983b7 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js @@ -9,7 +9,6 @@ function FunnelDropdown(props) { const writeOption = (e, { name, value }) => { const { siteId, history } = props; - console.log(value) history.push(withSiteId(funnelRoute(parseInt(value)), siteId)); } diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index 266cf1cd1..e3e3bcc7a 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -97,12 +97,17 @@ function getStorageName(type) { showExceptions: state.exceptionsList.length > 0, showLongtasks: state.longtasksList.length > 0, })) -@connect((state, props) => ({ - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), - bottomBlock: state.getIn([ 'components', 'player', 'bottomBlock' ]), - showStorage: props.showStorage || !state.getIn(['components', 'player', 'hiddenHints', 'storage']), - showStack: props.showStack || !state.getIn(['components', 'player', 'hiddenHints', 'stack']), -}), { +@connect((state, props) => { + const permissions = state.getIn([ 'user', 'account', 'permissions' ]) || []; + const isEnterprise = state.getIn([ 'user', 'client', 'edition' ]) === 'ee'; + return { + disabled: props.disabled || (isEnterprise && !permissions.includes('DEV_TOOLS')), + fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), + bottomBlock: state.getIn([ 'components', 'player', 'bottomBlock' ]), + showStorage: props.showStorage || !state.getIn(['components', 'player', 'hiddenHints', 'storage']), + showStack: props.showStack || !state.getIn(['components', 'player', 'hiddenHints', 'stack']), + } +}, { fullscreenOn, fullscreenOff, toggleBottomBlock, diff --git a/frontend/app/components/hocs/withPermissions.js b/frontend/app/components/hocs/withPermissions.js new file mode 100644 index 000000000..c7a48609c --- /dev/null +++ b/frontend/app/components/hocs/withPermissions.js @@ -0,0 +1,15 @@ +import { connect } from 'react-redux'; +import { NoPermission } from 'UI'; + +export default (requiredPermissions, className) => BaseComponent => +@connect((state, props) => ({ + permissions: state.getIn([ 'user', 'account', 'permissions' ]), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', +})) +class extends React.PureComponent { + render() { + const hasPermission = this.props.permissions.some(permission => requiredPermissions.includes(permission)); + + return !this.props.isEnterprise || hasPermission ? :
+ } +} \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/NoPermission.tsx b/frontend/app/components/ui/NoPermission/NoPermission.tsx new file mode 100644 index 000000000..eaf43d4aa --- /dev/null +++ b/frontend/app/components/ui/NoPermission/NoPermission.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import stl from './noPermission.css' +import { Icon } from 'UI'; + +function NoPermission(props) { + return ( +
+ +
Not allowed
+ You don’t have the necessary permissions to access this feature. Please check with your admin. +
+ ); +} + +export default NoPermission; \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/index.ts b/frontend/app/components/ui/NoPermission/index.ts new file mode 100644 index 000000000..c826daf1d --- /dev/null +++ b/frontend/app/components/ui/NoPermission/index.ts @@ -0,0 +1 @@ +export { default } from './NoPermission'; \ No newline at end of file diff --git a/frontend/app/components/ui/NoPermission/noPermission.css b/frontend/app/components/ui/NoPermission/noPermission.css new file mode 100644 index 000000000..f4296757c --- /dev/null +++ b/frontend/app/components/ui/NoPermission/noPermission.css @@ -0,0 +1,59 @@ +.wrapper { + margin: auto; + width: 100%; + text-align: center; + min-height: 100px; + display: flex; + align-items: center; + flex-direction: column; + justify-content: center; + color: $gray-medium; + font-weight: 300; + transition: all 0.2s; + padding-top: 40px; + + &.small { + & .title { + font-size: 20px !important; + } + + & .subtext { + font-size: 16px; + } + } +} + +.title { + font-size: 32px; + margin-bottom: 15px; +} + +.subtext { + font-size: 16px; + margin-bottom: 20px; +} + + +.icon { + display: block; + margin: auto; + background-image: svg-load(no-results.svg, fill=#CCC); + background-repeat: no-repeat; + background-size: contain; + background-position: center center; + width: 166px; + height: 166px; + margin-bottom: 20px; +} + +.emptyIcon { + display: block; + margin: auto; + background-image: svg-load(empty-state.svg, fill=#CCC); + background-repeat: no-repeat; + background-size: contain; + background-position: center center; + width: 166px; + height: 166px; + margin-bottom: 20px; +} diff --git a/frontend/app/components/ui/index.js b/frontend/app/components/ui/index.js index fe9609f16..669be843a 100644 --- a/frontend/app/components/ui/index.js +++ b/frontend/app/components/ui/index.js @@ -52,5 +52,6 @@ export { default as QuestionMarkHint } from './QuestionMarkHint'; export { default as TimelinePointer } from './TimelinePointer'; export { default as CopyButton } from './CopyButton'; export { default as HighlightCode } from './HighlightCode'; +export { default as NoPermission } from './NoPermission'; export { Input, Modal, Form, Message, Card } from 'semantic-ui-react'; diff --git a/frontend/app/duck/index.js b/frontend/app/duck/index.js index 53771ca04..c8d7a7c65 100644 --- a/frontend/app/duck/index.js +++ b/frontend/app/duck/index.js @@ -33,6 +33,7 @@ import announcements from './announcements'; import errors from './errors'; import funnels from './funnels'; import config from './config'; +import roles from './roles'; export default combineReducers({ jwt, @@ -66,6 +67,7 @@ export default combineReducers({ errors, funnels, config, + roles, ...integrations, ...sources, }); diff --git a/frontend/app/duck/roles.js b/frontend/app/duck/roles.js new file mode 100644 index 000000000..abc3ce0f4 --- /dev/null +++ b/frontend/app/duck/roles.js @@ -0,0 +1,32 @@ +import { List, Map } from 'immutable'; +import Role from 'Types/role'; +import crudDuckGenerator from './tools/crudDuck'; +import { reduceDucks } from 'Duck/tools'; + +const crudDuck = crudDuckGenerator('client/role', Role, { idKey: 'roleId' }); +export const { fetchList, init, edit, remove, } = crudDuck.actions; + +const initialState = Map({ + list: List(), + permissions: List([ + { name: 'Session Replay', value: 'SESSION_REPLAY' }, + { name: 'Develoepr Tools', value: 'DEV_TOOLS' }, + { name: 'Errors', value: 'ERRORS' }, + { name: 'Metrics', value: 'METRICS' }, + { name: 'Assist Live', value: 'ASSIST_LIVE' }, + { name: 'Assist Call', value: 'ASSIST_CALL' }, + ]) +}); + +const reducer = (state = initialState, action = {}) => { + return state; +}; + +export function save(instance) { + return { + types: crudDuck.actionTypes.SAVE.toArray(), + call: client => instance.roleId ? client.post(`/client/roles/${ instance.roleId }`, instance.toData()) : client.put(`/client/roles`, instance.toData()), + }; +} + +export default reduceDucks(crudDuck, { initialState, reducer }).reducer; diff --git a/frontend/app/routes.js b/frontend/app/routes.js index df032fe32..2ca5fd672 100644 --- a/frontend/app/routes.js +++ b/frontend/app/routes.js @@ -60,6 +60,7 @@ export const CLIENT_TABS = { INTEGRATIONS: 'integrations', PROFILE: 'account', MANAGE_USERS: 'manage-users', + MANAGE_ROLES: 'manage-roles', SITES: 'projects', CUSTOM_FIELDS: 'metadata', WEBHOOKS: 'webhooks', diff --git a/frontend/app/svg/icons/shield-lock.svg b/frontend/app/svg/icons/shield-lock.svg new file mode 100644 index 000000000..1a1a49084 --- /dev/null +++ b/frontend/app/svg/icons/shield-lock.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/types/account/account.js b/frontend/app/types/account/account.js index 2493a692d..28822b194 100644 --- a/frontend/app/types/account/account.js +++ b/frontend/app/types/account/account.js @@ -13,6 +13,7 @@ export default Member.extend({ smtp: false, license: '', expirationDate: undefined, + permissions: [], }, { fromJS: ({ current = {}, ...account})=> ({ ...account, diff --git a/frontend/app/types/client/client.js b/frontend/app/types/client/client.js index c1a33114f..542ff1a97 100644 --- a/frontend/app/types/client/client.js +++ b/frontend/app/types/client/client.js @@ -10,7 +10,8 @@ export default Record({ tenantId: undefined, name: undefined, sites: List(), - optOut: true + optOut: true, + edition: '', }, { fromJS: ({ projects, diff --git a/frontend/app/types/member.js b/frontend/app/types/member.js index f712c7347..4c064e90d 100644 --- a/frontend/app/types/member.js +++ b/frontend/app/types/member.js @@ -11,7 +11,8 @@ export default Record({ superAdmin: false, joined: false, expiredInvitation: false, - invitationLink: '' + roleId: undefined, + invitationLink: '', }, { idKey: 'id', methods: { diff --git a/frontend/app/types/role.js b/frontend/app/types/role.js new file mode 100644 index 000000000..52a74d400 --- /dev/null +++ b/frontend/app/types/role.js @@ -0,0 +1,30 @@ +import Record from 'Types/Record'; +import { validateName } from 'App/validate'; +import { List } from 'immutable'; + +export default Record({ + roleId: undefined, + name: '', + permissions: List(), + protected: false, + description: '' +}, { + idKey: 'roleId', + methods: { + validate() { + return validateName(this.name, { diacritics: true }); + }, + toData() { + const js = this.toJS(); + delete js.key; + delete js.protected; + return js; + }, + }, + fromJS({ permissions, ...rest }) { + return { + ...rest, + permissions: List(permissions) + } + }, +}); diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index b5d3a9688..16cba8159 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -19,8 +19,11 @@ "Types": ["./app/types" ], "Types/*": ["./app/types/*"], // Sublime hack "UI": ["./app/components/ui"], + "UI/*": ["./app/components/ui/*"], "Duck": ["./app/duck"], "Duck/*": ["./app/duck/*"], + "HOCs": ["./app/components/hocs"], + "HOCs/*": ["./app/components/hocs/*"], "Shared": ["./app/components/shared"], "Shared/*": ["./app/components/shared/*"], "Player": ["./app/player"], From 3c509d80955aa860bac8ec7e2c3b56bb4beb0cdc Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Sat, 27 Nov 2021 00:18:34 +0100 Subject: [PATCH 147/218] Removed log import --- backend/pkg/db/cache/project.go | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/pkg/db/cache/project.go b/backend/pkg/db/cache/project.go index 70349a407..daf498d32 100644 --- a/backend/pkg/db/cache/project.go +++ b/backend/pkg/db/cache/project.go @@ -1,7 +1,6 @@ package cache import ( - "log" "time" . "openreplay/backend/pkg/db/types" ) From d567cd1a205af0a04e408263f8a0083112313a44 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 11:56:40 +0530 Subject: [PATCH 148/218] fix(ui) - ee check --- .../Client/ManageUsers/ManageUsers.js | 37 ++++++++++--------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/frontend/app/components/Client/ManageUsers/ManageUsers.js b/frontend/app/components/Client/ManageUsers/ManageUsers.js index 4dffc94c8..04902757e 100644 --- a/frontend/app/components/Client/ManageUsers/ManageUsers.js +++ b/frontend/app/components/Client/ManageUsers/ManageUsers.js @@ -21,7 +21,8 @@ const LIMIT_WARNING = 'You have reached users limit.'; errors: state.getIn([ 'members', 'saveRequest', 'errors' ]), loading: state.getIn([ 'members', 'loading' ]), saving: state.getIn([ 'members', 'saveRequest', 'loading' ]), - roles: state.getIn(['roles', 'list']) + roles: state.getIn(['roles', 'list']).map(r => ({ text: r.name, value: r.roleId })).toJS(), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', }), { init, save, @@ -80,8 +81,8 @@ class ManageUsers extends React.PureComponent { }); } - formContent = (member, account, roles) => { - const options = roles.map(r => ({ text: r.name, value: r.roleId })).toJS(); + formContent = () => { + const { member, account, isEnterprise, roles } = this.props; return (
@@ -127,18 +128,20 @@ class ManageUsers extends React.PureComponent {
{ 'Can manage Projects and team members.' }
- -
- - -
+ + { isEnterprise && ( +
+ + +
+ )}
@@ -180,7 +183,7 @@ class ManageUsers extends React.PureComponent { render() { const { - members, member, loading, account, hideHeader = false, roles + members, loading, account, hideHeader = false } = this.props; const { showModal, remaining, invited } = this.state; const isAdmin = account.admin || account.superAdmin; @@ -193,7 +196,7 @@ class ManageUsers extends React.PureComponent { title="Inivte People" size="small" isDisplayed={ showModal } - content={ this.formContent(member, account, roles) } + content={ this.formContent() } onClose={ this.closeModal } />
From f6d454ed7578f9dfc6333b38713002c845cefa7a Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 12:20:47 +0530 Subject: [PATCH 149/218] fix(http) - variable name --- scripts/helm/app/http.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helm/app/http.yaml b/scripts/helm/app/http.yaml index f594df201..19fd39a59 100644 --- a/scripts/helm/app/http.yaml +++ b/scripts/helm/app/http.yaml @@ -24,7 +24,7 @@ resources: env: ASSETS_ORIGIN: /sessions-assets # TODO: full path (with the minio prefix) TOKEN_SECRET: secret_token_string # TODO: generate on buld - S3_BUCKET_IMAGES_IOS: sessions-mobile-assets + S3_BUCKET_IOS_IMAGES: sessions-mobile-assets AWS_ACCESS_KEY_ID: "minios3AccessKeyS3cr3t" AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" AWS_REGION: us-east-1 From fade17dec16e33e799f8c1990a5521de8004b530 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 12:26:36 +0530 Subject: [PATCH 150/218] fix(ui) - metadata delete in onboarding --- .../Onboarding/components/MetadataList/MetadataList.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js b/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js index 014fac088..fddccaf51 100644 --- a/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js +++ b/frontend/app/components/Onboarding/components/MetadataList/MetadataList.js @@ -28,7 +28,7 @@ const MetadataList = (props) => { header: 'Metadata', confirmation: `Are you sure you want to remove?` })) { - this.props.remove(site.id, field.index); + props.remove(site.id, field.index); } } From d41c5736373aca87f2f9e3850cc4864ecf2770e7 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 09:18:40 +0100 Subject: [PATCH 151/218] feat(tracker): 3.4.7: click masking fix & safe iframe calls & allow resources 0 duration --- tracker/tracker/package-lock.json | 203 +++++++++++++------- tracker/tracker/package.json | 4 +- tracker/tracker/src/main/app/index.ts | 9 +- tracker/tracker/src/main/app/observer.ts | 51 +++-- tracker/tracker/src/main/modules/console.ts | 5 +- tracker/tracker/src/main/modules/mouse.ts | 42 ++-- tracker/tracker/src/main/modules/timing.ts | 2 +- 7 files changed, 194 insertions(+), 122 deletions(-) diff --git a/tracker/tracker/package-lock.json b/tracker/tracker/package-lock.json index e1d647441..287203b30 100644 --- a/tracker/tracker/package-lock.json +++ b/tracker/tracker/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker", - "version": "3.4.4", + "version": "3.4.7", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -475,9 +475,9 @@ } }, "acorn": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", - "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true }, "acorn-jsx": { @@ -499,18 +499,26 @@ } }, "ansi-escapes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", - "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "requires": { - "type-fest": "^0.8.1" + "type-fest": "^0.21.3" + }, + "dependencies": { + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + } } }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true }, "ansi-styles": { @@ -610,9 +618,9 @@ } }, "cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", "dev": true }, "co": { @@ -1084,9 +1092,9 @@ } }, "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" @@ -1179,24 +1187,84 @@ "dev": true }, "inquirer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", - "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", + "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", "dev": true, "requires": { "ansi-escapes": "^4.2.1", - "chalk": "^2.4.2", + "chalk": "^4.1.0", "cli-cursor": "^3.1.0", - "cli-width": "^2.0.0", + "cli-width": "^3.0.0", "external-editor": "^3.0.3", "figures": "^3.0.0", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "mute-stream": "0.0.8", - "run-async": "^2.2.0", - "rxjs": "^6.5.3", + "run-async": "^2.4.0", + "rxjs": "^6.6.0", "string-width": "^4.1.0", - "strip-ansi": "^5.1.0", + "strip-ansi": "^6.0.0", "through": "^2.3.6" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "is-extglob": { @@ -1226,12 +1294,6 @@ "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=", "dev": true }, - "is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", - "dev": true - }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -1338,9 +1400,9 @@ } }, "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "dev": true }, "merge-stream": { @@ -1371,18 +1433,18 @@ } }, "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true }, "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "dev": true, "requires": { - "minimist": "0.0.8" + "minimist": "^1.2.5" } }, "ms": { @@ -1419,9 +1481,9 @@ } }, "onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, "requires": { "mimic-fn": "^2.1.0" @@ -1469,9 +1531,9 @@ "dev": true }, "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, "picomatch": { @@ -1612,13 +1674,10 @@ } }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", - "dev": true, - "requires": { - "is-promise": "^2.1.0" - } + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true }, "run-parallel": { "version": "1.1.9", @@ -1627,9 +1686,9 @@ "dev": true }, "rxjs": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", - "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", + "version": "6.6.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", + "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", "dev": true, "requires": { "tslib": "^1.9.0" @@ -1678,9 +1737,9 @@ "dev": true }, "signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", "dev": true }, "slash": { @@ -1744,23 +1803,23 @@ "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==" }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "strip-ansi": "^6.0.1" }, "dependencies": { "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } } } @@ -1917,9 +1976,9 @@ "dev": true }, "typescript": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.4.tgz", - "integrity": "sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "uri-js": { diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 808aef967..5f13bde51 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.5", + "version": "3.4.7", "keywords": [ "logging", "replay" @@ -35,7 +35,7 @@ "rollup": "^2.17.0", "rollup-plugin-terser": "^6.1.0", "semver": "^6.3.0", - "typescript": "^4.3.4" + "typescript": "^4.6.0-dev.20211126" }, "dependencies": { "error-stack-parser": "^2.0.6" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index d23094dfc..3147cca14 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -44,7 +44,7 @@ export default class App { readonly ticker: Ticker; readonly projectKey: string; private readonly messages: Array = []; - private readonly observer: Observer; + /*private*/ readonly observer: Observer; // temp, for fast security fix. TODO: separate security/obscure module with nodeCallback that incapsulates `textMasked` functionality from Observer private readonly startCallbacks: Array = []; private readonly stopCallbacks: Array = []; private readonly commitCallbacks: Array = []; @@ -323,13 +323,14 @@ export default class App { return onStartInfo; }) .catch(e => { - this.stop(); + sessionStorage.removeItem(this.options.session_token_key) + this.stop() warn("OpenReplay was unable to start. ", e) this._debug("session_start", e); - throw e; + throw e }) } - return Promise.reject("Player is active"); + return Promise.reject("Player is already active"); } start(reset: boolean = false): Promise { diff --git a/tracker/tracker/src/main/app/observer.ts b/tracker/tracker/src/main/app/observer.ts index 3fad12c60..189c56912 100644 --- a/tracker/tracker/src/main/app/observer.ts +++ b/tracker/tracker/src/main/app/observer.ts @@ -234,30 +234,43 @@ export default class Observer { this.app.send(new SetNodeAttribute(id, name, value)); } + /* TODO: abstract sanitation */ + getInnerTextSecure(el: HTMLElement): string { + const id = this.app.nodes.getID(el) + if (!id) { return '' } + return this.checkObscure(id, el.innerText) + + } + + private checkObscure(id: number, data: string): string { + if (this.textMasked.has(id)) { + return data.replace( + /[^\f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]/g, + '█', + ); + } + if (this.options.obscureTextNumbers) { + data = data.replace(/\d/g, '0'); + } + if (this.options.obscureTextEmails) { + data = data.replace( + /([^\s]+)@([^\s]+)\.([^\s]+)/g, + (...f: Array) => + stars(f[1]) + '@' + stars(f[2]) + '.' + stars(f[3]), + ); + } + return data + } + private sendNodeData(id: number, parentElement: Element, data: string): void { if (this.isInstance(parentElement, HTMLStyleElement) || this.isInstance(parentElement, SVGStyleElement)) { this.app.send(new SetCSSDataURLBased(id, data, this.app.getBaseHref())); return; } - if (this.textMasked.has(id)) { - data = data.replace( - /[^\f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]/g, - '█', - ); - } else { - if (this.options.obscureTextNumbers) { - data = data.replace(/\d/g, '0'); - } - if (this.options.obscureTextEmails) { - data = data.replace( - /([^\s]+)@([^\s]+)\.([^\s]+)/g, - (...f: Array) => - stars(f[1]) + '@' + stars(f[2]) + '.' + stars(f[3]), - ); - } - } + data = this.checkObscure(id, data) this.app.send(new SetNodeData(id, data)); } + /* end TODO: abstract sanitation */ private bindNode(node: Node): void { const r = this.app.nodes.registerNode(node); @@ -412,7 +425,7 @@ export default class Observer { private iframeObservers: Observer[] = []; private handleIframe(iframe: HTMLIFrameElement): void { let context: Window | null = null - const handle = () => { + const handle = this.app.safe(() => { const id = this.app.nodes.getID(iframe) if (id === undefined) { return } if (iframe.contentWindow === context) { return } @@ -421,7 +434,7 @@ export default class Observer { const observer = new Observer(this.app, this.options, context) this.iframeObservers.push(observer) observer.observeIframe(id, context) - } + }) this.app.attachEventListener(iframe, "load", handle) handle() } diff --git a/tracker/tracker/src/main/modules/console.ts b/tracker/tracker/src/main/modules/console.ts index e625961a7..0db199319 100644 --- a/tracker/tracker/src/main/modules/console.ts +++ b/tracker/tracker/src/main/modules/console.ts @@ -138,7 +138,7 @@ export default function (app: App, opts: Partial): void { }); patchConsole(window.console); - app.nodes.attachNodeCallback(node => { + app.nodes.attachNodeCallback(app.safe(node => { if (node instanceof HTMLIFrameElement) { let context = node.contentWindow if (context) { @@ -151,6 +151,5 @@ export default function (app: App, opts: Partial): void { } }) } - - }) + })) } diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index 8a808f4bf..a2b406806 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -71,33 +71,33 @@ function _getTarget(target: Element): Element | null { return target === document.documentElement ? null : target; } -function getTargetLabel(target: Element): string { - const dl = getLabelAttribute(target); - if (dl !== null) { - return dl; - } - const tag = target.tagName.toUpperCase(); - if (tag === 'INPUT') { - return getInputLabel(target as HTMLInputElement) - } - if (tag === 'BUTTON' || - tag === 'A' || - tag === 'LI' || - (target as HTMLElement).onclick != null || - target.getAttribute('role') === 'button' - ) { - const label: string = (target as HTMLElement).innerText || ''; - return normSpaces(label).slice(0, 100); - } - return ''; -} - export default function (app: App): void { // const options: Options = Object.assign( // {}, // opts, // ); + function getTargetLabel(target: Element): string { + const dl = getLabelAttribute(target); + if (dl !== null) { + return dl; + } + const tag = target.tagName.toUpperCase(); + if (tag === 'INPUT') { + return getInputLabel(target as HTMLInputElement) + } + if (tag === 'BUTTON' || + tag === 'A' || + tag === 'LI' || + (target as HTMLElement).onclick != null || + target.getAttribute('role') === 'button' + ) { + const label: string = app.observer.getInnerTextSecure(target as HTMLElement); + return normSpaces(label).slice(0, 100); + } + return ''; + } + let mousePositionX = -1; let mousePositionY = -1; let mousePositionChanged = false; diff --git a/tracker/tracker/src/main/modules/timing.ts b/tracker/tracker/src/main/modules/timing.ts index e6e6df8ea..2466b7872 100644 --- a/tracker/tracker/src/main/modules/timing.ts +++ b/tracker/tracker/src/main/modules/timing.ts @@ -122,7 +122,7 @@ export default function (app: App, opts: Partial): void { let resources: ResourcesTimeMap | null = {} function resourceTiming(entry: PerformanceResourceTiming): void { - if (entry.duration <= 0 || !isURL(entry.name) || app.isServiceURL(entry.name)) return; + if (entry.duration < 0 || !isURL(entry.name) || app.isServiceURL(entry.name)) return; if (resources !== null) { resources[entry.name] = entry.startTime + entry.duration; } From 4c5c0ecc93dcc85cf2352abaa62dfb4a38c02bf0 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 14:12:18 +0530 Subject: [PATCH 152/218] version number 1.4.0 --- frontend/env.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/env.js b/frontend/env.js index 20b435e21..52f562031 100644 --- a/frontend/env.js +++ b/frontend/env.js @@ -13,7 +13,7 @@ const oss = { ORIGIN: () => 'window.location.origin', API_EDP: () => 'window.location.origin + "/api"', ASSETS_HOST: () => 'window.location.origin + "/assets"', - VERSION: '1.3.0', + VERSION: '1.4.0', SOURCEMAP: true, MINIO_ENDPOINT: process.env.MINIO_ENDPOINT, MINIO_PORT: process.env.MINIO_PORT, From a5eb22ade2b75d51e91fdda960977ee86c546f8a Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 14:48:51 +0530 Subject: [PATCH 153/218] change(ui) - axios docs --- .../Client/Integrations/AxiosDoc/AxiosDoc.js | 60 +++++++++++++++++++ .../Client/Integrations/AxiosDoc/index.js | 1 + .../Client/Integrations/Integrations.js | 14 ++++- 3 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js create mode 100644 frontend/app/components/Client/Integrations/AxiosDoc/index.js diff --git a/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js b/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js new file mode 100644 index 000000000..6d8233826 --- /dev/null +++ b/frontend/app/components/Client/Integrations/AxiosDoc/AxiosDoc.js @@ -0,0 +1,60 @@ +import Highlight from 'react-highlight' +import ToggleContent from 'Shared/ToggleContent' +import DocLink from 'Shared/DocLink/DocLink'; + +const AxiosDoc = (props) => { + const { projectKey } = props; + return ( +
+
This plugin allows you to capture axios requests and inspect them later on while replaying session recordings. This is very useful for understanding and fixing issues.
+ +
Installation
+ + {`npm i @openreplay/tracker-axios`} + + +
Usage
+

Initialize the @openreplay/tracker package as usual then load the axios plugin. Note that OpenReplay axios plugin requires axios@^0.21.2 as a peer dependency.

+
+ +
Usage
+ + {`import tracker from '@openreplay/tracker'; +import trackerAxios from '@openreplay/tracker-axios'; +const tracker = new OpenReplay({ + projectKey: '${projectKey}' +}); +tracker.use(trackerAxios(options)); // check list of available options below +tracker.start();`} + + } + second={ + + {`import OpenReplay from '@openreplay/tracker/cjs'; +import trackerAxios from '@openreplay/tracker-axios/cjs'; +const tracker = new OpenReplay({ + projectKey: '${projectKey}' +}); +tracker.use(trackerAxios(options)); // check list of available options below +//... +function MyApp() { + useEffect(() => { // use componentDidMount in case of React Class Component + tracker.start(); + }, []) +//... +}`} + + } + /> + + +
+ ) +}; + +AxiosDoc.displayName = "AxiosDoc"; + +export default AxiosDoc; diff --git a/frontend/app/components/Client/Integrations/AxiosDoc/index.js b/frontend/app/components/Client/Integrations/AxiosDoc/index.js new file mode 100644 index 000000000..a5a8a1873 --- /dev/null +++ b/frontend/app/components/Client/Integrations/AxiosDoc/index.js @@ -0,0 +1 @@ +export { default } from './AxiosDoc' \ No newline at end of file diff --git a/frontend/app/components/Client/Integrations/Integrations.js b/frontend/app/components/Client/Integrations/Integrations.js index f34050055..fa69d1861 100644 --- a/frontend/app/components/Client/Integrations/Integrations.js +++ b/frontend/app/components/Client/Integrations/Integrations.js @@ -29,6 +29,7 @@ import FetchDoc from './FetchDoc'; import MobxDoc from './MobxDoc'; import ProfilerDoc from './ProfilerDoc'; import AssistDoc from './AssistDoc'; +import AxiosDoc from './AxiosDoc/AxiosDoc'; const NONE = -1; const SENTRY = 0; @@ -51,6 +52,7 @@ const FETCH = 16; const MOBX = 17; const PROFILER = 18; const ASSIST = 19; +const AXIOS = 20; const TITLE = { [ SENTRY ]: 'Sentry', @@ -73,6 +75,7 @@ const TITLE = { [ MOBX ] : 'MobX', [ PROFILER ] : 'Profiler', [ ASSIST ] : 'Assist', + [ AXIOS ] : 'Axios', } const DOCS = [REDUX, VUE, GRAPHQL, NGRX, FETCH, MOBX, PROFILER, ASSIST] @@ -191,6 +194,8 @@ export default class Integrations extends React.PureComponent { return case ASSIST: return + case AXIOS: + return default: return null; } @@ -313,7 +318,6 @@ export default class Integrations extends React.PureComponent { onClick={ () => this.showIntegrationConfig(MOBX) } // integrated={ sentryIntegrated } /> - this.showIntegrationConfig(ASSIST) } // integrated={ sentryIntegrated } /> + this.showIntegrationConfig(AXIOS) } + // integrated={ sentryIntegrated } + />
)} From bc2cfad694d02c2c61338f3770746bba17c7109e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 29 Nov 2021 14:50:33 +0530 Subject: [PATCH 154/218] change(ui) - axios docs --- .../Client/Integrations/Integrations.js | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/frontend/app/components/Client/Integrations/Integrations.js b/frontend/app/components/Client/Integrations/Integrations.js index fa69d1861..855ff8511 100644 --- a/frontend/app/components/Client/Integrations/Integrations.js +++ b/frontend/app/components/Client/Integrations/Integrations.js @@ -300,7 +300,14 @@ export default class Integrations extends React.PureComponent { onClick={ () => this.showIntegrationConfig(NGRX) } // integrated={ sentryIntegrated } /> - + this.showIntegrationConfig(MOBX) } + // integrated={ sentryIntegrated } + /> this.showIntegrationConfig(FETCH) } // integrated={ sentryIntegrated } /> - - this.showIntegrationConfig(MOBX) } - // integrated={ sentryIntegrated } - /> this.showIntegrationConfig(PROFILER) } // integrated={ sentryIntegrated } /> - this.showIntegrationConfig(ASSIST) } - // integrated={ sentryIntegrated } - /> this.showIntegrationConfig(AXIOS) } // integrated={ sentryIntegrated } /> + this.showIntegrationConfig(ASSIST) } + // integrated={ sentryIntegrated } + />
)} From 31fc3cc07e3b10587f3517dded753fc89d8e8671 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 11:15:48 +0100 Subject: [PATCH 155/218] fix(tracker): 3.4.8: webpack 5 fully specified --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/index.ts | 18 +++---- tracker/tracker/src/main/app/observer.ts | 7 +-- tracker/tracker/src/main/app/ticker.ts | 2 +- tracker/tracker/src/main/index.ts | 51 ++++++++++--------- .../tracker/src/main/modules/connection.ts | 4 +- tracker/tracker/src/main/modules/console.ts | 6 +-- tracker/tracker/src/main/modules/cssrules.ts | 4 +- tracker/tracker/src/main/modules/exception.ts | 6 +-- tracker/tracker/src/main/modules/img.ts | 6 +-- tracker/tracker/src/main/modules/input.ts | 6 +-- tracker/tracker/src/main/modules/longtasks.ts | 4 +- tracker/tracker/src/main/modules/mouse.ts | 8 +-- .../tracker/src/main/modules/performance.ts | 6 +-- tracker/tracker/src/main/modules/scroll.ts | 4 +- tracker/tracker/src/main/modules/timing.ts | 8 +-- tracker/tracker/src/main/modules/viewport.ts | 4 +- tracker/tracker/src/main/tsconfig.json | 3 +- tracker/tracker/src/messages/index.ts | 4 +- tracker/tracker/src/messages/message.ts | 2 +- tracker/tracker/src/webworker/index.ts | 8 +-- .../tracker/src/webworker/transformer.js.temp | 21 -------- tracker/tracker/tsconfig-base.json | 2 +- 23 files changed, 85 insertions(+), 101 deletions(-) delete mode 100644 tracker/tracker/src/webworker/transformer.js.temp diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 5f13bde51..ab2a367b8 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.7", + "version": "3.4.8", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 3147cca14..e0bc47b2b 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -1,15 +1,15 @@ -import { timestamp, log, warn } from '../utils'; -import { Timestamp, PageClose } from '../../messages'; -import Message from '../../messages/message'; -import Nodes from './nodes'; -import Observer from './observer'; -import Ticker from './ticker'; +import { timestamp, log, warn } from "../utils.js"; +import { Timestamp, PageClose } from "../../messages/index.js"; +import Message from "../../messages/message.js"; +import Nodes from "./nodes.js"; +import Observer from "./observer.js"; +import Ticker from "./ticker.js"; -import { deviceMemory, jsHeapSizeLimit } from '../modules/performance'; +import { deviceMemory, jsHeapSizeLimit } from "../modules/performance.js"; -import type { Options as ObserverOptions } from './observer'; +import type { Options as ObserverOptions } from "./observer.js"; -import type { Options as WebworkerOptions, WorkerMessageData } from '../../messages/webworker'; +import type { Options as WebworkerOptions, WorkerMessageData } from "../../messages/webworker.js"; interface OnStartInfo { sessionID: string, diff --git a/tracker/tracker/src/main/app/observer.ts b/tracker/tracker/src/main/app/observer.ts index 189c56912..3ed5088af 100644 --- a/tracker/tracker/src/main/app/observer.ts +++ b/tracker/tracker/src/main/app/observer.ts @@ -1,4 +1,4 @@ -import { stars, hasOpenreplayAttribute } from '../utils'; +import { stars, hasOpenreplayAttribute } from "../utils.js"; import { CreateDocument, CreateElementNode, @@ -11,8 +11,8 @@ import { MoveNode, RemoveNode, CreateIFrameDocument, -} from '../../messages'; -import App from './index'; +} from "../../messages/index.js"; +import App from "./index.js"; interface Window extends WindowProxy { HTMLInputElement: typeof HTMLInputElement, @@ -292,6 +292,7 @@ export default class Observer { ? NodeFilter.FILTER_REJECT : NodeFilter.FILTER_ACCEPT, }, + // @ts-ignore false, ); while (walker.nextNode()) { diff --git a/tracker/tracker/src/main/app/ticker.ts b/tracker/tracker/src/main/app/ticker.ts index 51fa7f579..62ca69af9 100644 --- a/tracker/tracker/src/main/app/ticker.ts +++ b/tracker/tracker/src/main/app/ticker.ts @@ -1,4 +1,4 @@ -import App from './index'; +import App from "./index.js"; type Callback = () => void; function wrap(callback: Callback, n: number): Callback { diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index b9fdad6cc..2ddfc6961 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -1,30 +1,30 @@ -import App, { DEFAULT_INGEST_POINT } from './app'; -export { default as App } from './app'; +import App, { DEFAULT_INGEST_POINT } from "./app/index.js"; +export { default as App } from './app/index.js'; -import { UserID, UserAnonymousID, Metadata, RawCustomEvent, CustomIssue } from '../messages'; -import * as _Messages from '../messages'; +import { UserID, UserAnonymousID, Metadata, RawCustomEvent, CustomIssue } from "../messages/index.js"; +import * as _Messages from "../messages/index.js"; export const Messages = _Messages; -import Connection from './modules/connection'; -import Console from './modules/console'; -import Exception, { getExceptionMessageFromEvent, getExceptionMessage } from './modules/exception'; -import Img from './modules/img'; -import Input from './modules/input'; -import Mouse from './modules/mouse'; -import Timing from './modules/timing'; -import Performance from './modules/performance'; -import Scroll from './modules/scroll'; -import Viewport from './modules/viewport'; -import Longtasks from './modules/longtasks'; -import CSSRules from './modules/cssrules'; -import { IN_BROWSER, deprecationWarn, DOCS_HOST } from './utils'; +import Connection from "./modules/connection.js"; +import Console from "./modules/console.js"; +import Exception, { getExceptionMessageFromEvent, getExceptionMessage } from "./modules/exception.js"; +import Img from "./modules/img.js"; +import Input from "./modules/input.js"; +import Mouse from "./modules/mouse.js"; +import Timing from "./modules/timing.js"; +import Performance from "./modules/performance.js"; +import Scroll from "./modules/scroll.js"; +import Viewport from "./modules/viewport.js"; +import Longtasks from "./modules/longtasks.js"; +import CSSRules from "./modules/cssrules.js"; +import { IN_BROWSER, deprecationWarn, DOCS_HOST } from "./utils.js"; -import { Options as AppOptions } from './app'; -import { Options as ConsoleOptions } from './modules/console'; -import { Options as ExceptionOptions } from './modules/exception'; -import { Options as InputOptions } from './modules/input'; -import { Options as PerformanceOptions } from './modules/performance'; -import { Options as TimingOptions } from './modules/timing'; +import { Options as AppOptions } from "./app/index.js"; +import { Options as ConsoleOptions } from "./modules/console.js"; +import { Options as ExceptionOptions } from "./modules/exception.js"; +import { Options as InputOptions } from "./modules/input.js"; +import { Options as PerformanceOptions } from "./modules/performance.js"; +import { Options as TimingOptions } from "./modules/timing.js"; export type Options = Partial< AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & PerformanceOptions & TimingOptions @@ -78,7 +78,10 @@ export default class API { console.error("OpenReplay: Your website must be publicly accessible and running on SSL in order for OpenReplay to properly capture and replay the user session. You can disable this check by setting `__DISABLE_SECURE_MODE` option to `true` if you are testing in localhost. Keep in mind, that asset files on a local machine are not available to the outside world. This might affect tracking if you use css files.") return; } - const doNotTrack = options.respectDoNotTrack && (navigator.doNotTrack == '1' || window.doNotTrack == '1'); + const doNotTrack = options.respectDoNotTrack && + (navigator.doNotTrack == '1' + // @ts-ignore + || window.doNotTrack == '1'); this.app = doNotTrack || !('Map' in window) || !('Set' in window) || diff --git a/tracker/tracker/src/main/modules/connection.ts b/tracker/tracker/src/main/modules/connection.ts index bd582954f..a2767790c 100644 --- a/tracker/tracker/src/main/modules/connection.ts +++ b/tracker/tracker/src/main/modules/connection.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { ConnectionInformation } from '../../messages'; +import App from "../app/index.js"; +import { ConnectionInformation } from "../../messages/index.js"; export default function(app: App): void { const connection: diff --git a/tracker/tracker/src/main/modules/console.ts b/tracker/tracker/src/main/modules/console.ts index 0db199319..bbf259735 100644 --- a/tracker/tracker/src/main/modules/console.ts +++ b/tracker/tracker/src/main/modules/console.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { IN_BROWSER } from '../utils'; -import { ConsoleLog } from '../../messages'; +import App from "../app/index.js"; +import { IN_BROWSER } from "../utils.js"; +import { ConsoleLog } from "../../messages/index.js"; const printError: (e: Error) => string = IN_BROWSER && 'InstallTrigger' in window // detect Firefox diff --git a/tracker/tracker/src/main/modules/cssrules.ts b/tracker/tracker/src/main/modules/cssrules.ts index 54166f717..18aa3f154 100644 --- a/tracker/tracker/src/main/modules/cssrules.ts +++ b/tracker/tracker/src/main/modules/cssrules.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { CSSInsertRuleURLBased, CSSDeleteRule, TechnicalInfo } from '../../messages'; +import App from "../app/index.js"; +import { CSSInsertRuleURLBased, CSSDeleteRule, TechnicalInfo } from "../../messages/index.js"; export default function(app: App | null) { if (app === null) { diff --git a/tracker/tracker/src/main/modules/exception.ts b/tracker/tracker/src/main/modules/exception.ts index 6a4720c35..45fe37465 100644 --- a/tracker/tracker/src/main/modules/exception.ts +++ b/tracker/tracker/src/main/modules/exception.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { JSException } from '../../messages'; -import Message from '../../messages/message'; +import App from "../app/index.js"; +import { JSException } from "../../messages/index.js"; +import Message from "../../messages/message.js"; import ErrorStackParser from 'error-stack-parser'; export interface Options { diff --git a/tracker/tracker/src/main/modules/img.ts b/tracker/tracker/src/main/modules/img.ts index e20a4d531..61e793b89 100644 --- a/tracker/tracker/src/main/modules/img.ts +++ b/tracker/tracker/src/main/modules/img.ts @@ -1,6 +1,6 @@ -import { timestamp, isURL } from '../utils'; -import App from '../app'; -import { ResourceTiming, SetNodeAttributeURLBased } from '../../messages'; +import { timestamp, isURL } from "../utils.js"; +import App from "../app/index.js"; +import { ResourceTiming, SetNodeAttributeURLBased } from "../../messages/index.js"; export default function (app: App): void { const sendImgSrc = app.safe(function (this: HTMLImageElement): void { diff --git a/tracker/tracker/src/main/modules/input.ts b/tracker/tracker/src/main/modules/input.ts index 96ca2f7c2..746c26f8f 100644 --- a/tracker/tracker/src/main/modules/input.ts +++ b/tracker/tracker/src/main/modules/input.ts @@ -1,6 +1,6 @@ -import { normSpaces, IN_BROWSER, getLabelAttribute, hasOpenreplayAttribute } from '../utils'; -import App from '../app'; -import { SetInputTarget, SetInputValue, SetInputChecked } from '../../messages'; +import { normSpaces, IN_BROWSER, getLabelAttribute, hasOpenreplayAttribute } from "../utils.js"; +import App from "../app/index.js"; +import { SetInputTarget, SetInputValue, SetInputChecked } from "../../messages/index.js"; function isInput(node: any): node is HTMLInputElement { if (!(node instanceof HTMLInputElement)) { diff --git a/tracker/tracker/src/main/modules/longtasks.ts b/tracker/tracker/src/main/modules/longtasks.ts index c7515c88f..0f3a7e82a 100644 --- a/tracker/tracker/src/main/modules/longtasks.ts +++ b/tracker/tracker/src/main/modules/longtasks.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { LongTask } from '../../messages'; +import App from "../app/index.js"; +import { LongTask } from "../../messages/index.js"; // https://w3c.github.io/performance-timeline/#the-performanceentry-interface interface TaskAttributionTiming extends PerformanceEntry { diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index a2b406806..3ec70e844 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -1,7 +1,7 @@ -import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils'; -import App from '../app'; -import { MouseMove, MouseClick } from '../../messages'; -import { getInputLabel } from './input'; +import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from "../utils.js"; +import App from "../app/index.js"; +import { MouseMove, MouseClick } from "../../messages/index.js"; +import { getInputLabel } from "./input.js"; function _getSelector(target: Element): string { let el: Element | null = target diff --git a/tracker/tracker/src/main/modules/performance.ts b/tracker/tracker/src/main/modules/performance.ts index 7deac3ef5..8eb7701eb 100644 --- a/tracker/tracker/src/main/modules/performance.ts +++ b/tracker/tracker/src/main/modules/performance.ts @@ -1,6 +1,6 @@ -import App from '../app'; -import { IN_BROWSER } from '../utils'; -import { PerformanceTrack } from '../../messages'; +import App from "../app/index.js"; +import { IN_BROWSER } from "../utils.js"; +import { PerformanceTrack } from "../../messages/index.js"; type Perf = { diff --git a/tracker/tracker/src/main/modules/scroll.ts b/tracker/tracker/src/main/modules/scroll.ts index c3e4b37cc..0f54ba8f9 100644 --- a/tracker/tracker/src/main/modules/scroll.ts +++ b/tracker/tracker/src/main/modules/scroll.ts @@ -1,5 +1,5 @@ -import App from '../app'; -import { SetViewportScroll, SetNodeScroll } from '../../messages'; +import App from "../app/index.js"; +import { SetViewportScroll, SetNodeScroll } from "../../messages/index.js"; export default function (app: App): void { let documentScroll = false; diff --git a/tracker/tracker/src/main/modules/timing.ts b/tracker/tracker/src/main/modules/timing.ts index 2466b7872..60e30019f 100644 --- a/tracker/tracker/src/main/modules/timing.ts +++ b/tracker/tracker/src/main/modules/timing.ts @@ -1,7 +1,7 @@ -import { isURL } from '../utils'; -import App from '../app'; -import { ResourceTiming, PageLoadTiming, PageRenderTiming } from '../../messages'; -import type Message from '../../messages/message'; +import { isURL } from "../utils.js"; +import App from "../app/index.js"; +import { ResourceTiming, PageLoadTiming, PageRenderTiming } from "../../messages/index.js"; +import type Message from "../../messages/message.js"; // Inspired by https://github.com/WPO-Foundation/RUM-SpeedIndex/blob/master/src/rum-speedindex.js diff --git a/tracker/tracker/src/main/modules/viewport.ts b/tracker/tracker/src/main/modules/viewport.ts index c780c65e4..626eadd12 100644 --- a/tracker/tracker/src/main/modules/viewport.ts +++ b/tracker/tracker/src/main/modules/viewport.ts @@ -1,9 +1,9 @@ -import App from '../app'; +import App from "../app/index.js"; import { SetPageLocation, SetViewportSize, SetPageVisibility, -} from '../../messages'; +} from "../../messages/index.js"; export default function (app: App): void { let url: string, width: number, height: number; diff --git a/tracker/tracker/src/main/tsconfig.json b/tracker/tracker/src/main/tsconfig.json index 0a92ed914..f6ac938a6 100644 --- a/tracker/tracker/src/main/tsconfig.json +++ b/tracker/tracker/src/main/tsconfig.json @@ -6,5 +6,6 @@ }, "references": [ { "path": "../messages" } - ] + ], + "exclude": ["app/observer"] } diff --git a/tracker/tracker/src/messages/index.ts b/tracker/tracker/src/messages/index.ts index 210f534cb..f3267bee0 100644 --- a/tracker/tracker/src/messages/index.ts +++ b/tracker/tracker/src/messages/index.ts @@ -1,6 +1,6 @@ // Auto-generated, do not edit -import Message from './message'; -import Writer from './writer'; +import Message from "./message.js"; +import Writer from "./writer.js"; function bindNew( Class: C & { new(...args: A): T } diff --git a/tracker/tracker/src/messages/message.ts b/tracker/tracker/src/messages/message.ts index a2bf0864d..aeb8619de 100644 --- a/tracker/tracker/src/messages/message.ts +++ b/tracker/tracker/src/messages/message.ts @@ -1,4 +1,4 @@ -import Writer from './writer'; +import Writer from "./writer.js"; export default interface Message { encode(w: Writer): boolean; diff --git a/tracker/tracker/src/webworker/index.ts b/tracker/tracker/src/webworker/index.ts index d680bfab3..d94d77577 100644 --- a/tracker/tracker/src/webworker/index.ts +++ b/tracker/tracker/src/webworker/index.ts @@ -1,8 +1,8 @@ -import { classes, BatchMeta, Timestamp, SetPageVisibility, CreateDocument } from '../messages'; -import Message from '../messages/message'; -import Writer from '../messages/writer'; +import { classes, BatchMeta, Timestamp, SetPageVisibility, CreateDocument } from "../messages/index.js"; +import Message from "../messages/message.js"; +import Writer from "../messages/writer.js"; -import type { WorkerMessageData } from '../messages/webworker'; +import type { WorkerMessageData } from "../messages/webworker.js"; const SEND_INTERVAL = 20 * 1000; diff --git a/tracker/tracker/src/webworker/transformer.js.temp b/tracker/tracker/src/webworker/transformer.js.temp deleted file mode 100644 index cf80d681b..000000000 --- a/tracker/tracker/src/webworker/transformer.js.temp +++ /dev/null @@ -1,21 +0,0 @@ -import Message from '../messages/message'; - - - - -class MessageTransformer { - private urlRewriter?: URLRewriter - - constructor() { - - } - - transform(m: Message): Message { - if (m instanceof SetNodeAttribute) { - if (m.name == "src" || m.name == "href") { - sendAssetForCache - } - } - } - -} \ No newline at end of file diff --git a/tracker/tracker/tsconfig-base.json b/tracker/tracker/tsconfig-base.json index 3d99959a9..9af9edb73 100644 --- a/tracker/tracker/tsconfig-base.json +++ b/tracker/tracker/tsconfig-base.json @@ -9,6 +9,6 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node" + "moduleResolution": "nodenext" } } From 2cad36168c019607714260f47e0f1b508a7fb272 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 11:51:02 +0100 Subject: [PATCH 156/218] feat(tracker-assist): 3.4.8: webpack 5 fully specified imports --- tracker/tracker-assist/package-lock.json | 14 ++++++------- tracker/tracker-assist/package.json | 8 ++++---- tracker/tracker-assist/src/CallWindow.ts | 10 +++++----- tracker/tracker-assist/src/index.ts | 25 ++++++++++++------------ tracker/tracker-assist/tsconfig.json | 2 +- 5 files changed, 30 insertions(+), 29 deletions(-) diff --git a/tracker/tracker-assist/package-lock.json b/tracker/tracker-assist/package-lock.json index e1e86f4bf..6a78905a8 100644 --- a/tracker/tracker-assist/package-lock.json +++ b/tracker/tracker-assist/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-assist", - "version": "3.4.6", + "version": "3.4.7", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.4.7", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.7.tgz", - "integrity": "sha512-E9ZwjPwo9WbThV9nAQbK8EKLwJcLBgQG51ND3LB+p21xaz0WcMETIaJDFFmHhhwvkCQ1Vi43gK3cjoOoHF4XFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -811,9 +811,9 @@ } }, "typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 6f7bb536b..c4329c464 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.7", + "version": "3.4.8", "keywords": [ "WebRTC", "assistance", @@ -24,12 +24,12 @@ "peerjs": "^1.3.2" }, "peerDependencies": { - "@openreplay/tracker": "^3.4.3" + "@openreplay/tracker": "^3.4.8" }, "devDependencies": { - "@openreplay/tracker": "^3.4.3", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-assist/src/CallWindow.ts b/tracker/tracker-assist/src/CallWindow.ts index 3356449ce..909c35d09 100644 --- a/tracker/tracker-assist/src/CallWindow.ts +++ b/tracker/tracker-assist/src/CallWindow.ts @@ -1,4 +1,4 @@ -import type { LocalStream } from './LocalStream'; +import type { LocalStream } from './LocalStream.js'; const SS_START_TS_KEY = "__openreplay_assist_call_start_ts" @@ -149,14 +149,14 @@ export default class CallWindow { // Hack to determine if the remote video is enabled if (this.checkRemoteVideoInterval) { clearInterval(this.checkRemoteVideoInterval) } // just in case - let enable = false + let enabled = false this.checkRemoteVideoInterval = setInterval(() => { const settings = rStream.getVideoTracks()[0]?.getSettings() //console.log(settings) const isDummyVideoTrack = !!settings && (settings.width === 2 || settings.frameRate === 0) - const shouldEnable = !isDummyVideoTrack - if (enable !== shouldEnable) { - this.toggleRemoteVideoUI(enable=shouldEnable) + const shouldBeEnabled = !isDummyVideoTrack + if (enabled !== shouldBeEnabled) { + this.toggleRemoteVideoUI(enabled=shouldBeEnabled) } }, 1000) }) diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index d36faeb95..247fd407e 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -1,20 +1,20 @@ -import './_slim'; -import Peer, { MediaConnection } from 'peerjs'; +import './_slim.js'; +import Peer from 'peerjs'; import type { DataConnection } from 'peerjs'; import { App, Messages } from '@openreplay/tracker'; import type Message from '@openreplay/tracker'; -import BufferingConnection from './BufferingConnection'; -import Mouse from './Mouse'; -import CallWindow from './CallWindow'; -import ConfirmWindow from './ConfirmWindow'; -import RequestLocalStream from './LocalStream'; +import BufferingConnection from './BufferingConnection.js'; +import Mouse from './Mouse.js'; +import CallWindow from './CallWindow.js'; +import ConfirmWindow from './ConfirmWindow.js'; +import RequestLocalStream from './LocalStream.js'; export interface Options { confirmText: string, confirmStyle: Object, // Styles object session_calling_peer_key: string, - config: Object + config: RTCConfiguration, } enum CallingState { @@ -23,7 +23,7 @@ enum CallingState { False, }; -//@ts-ignore webpack5 hack (?!) +//@ts-ignore peerjs hack for webpack5 (?!) Peer = Peer.default || Peer; // type IncomeMessages = @@ -35,7 +35,7 @@ Peer = Peer.default || Peer; export default function(opts: Partial = {}) { const options: Options = Object.assign( { - confirmText: "You have a call. Do you want to answer?", + confirmText: "You have an incoming call. Do you want to answer?", confirmStyle: {}, session_calling_peer_key: "__openreplay_calling_peer", config: null @@ -198,18 +198,19 @@ export default function(opts: Partial = {}) { dataConn.on('data', (data: any) => { if (!data) { return } if (data === "call_end") { - //console.log('receiving callend on call') + log('"call_end" received') onCallEnd(); return; } if (data.name === 'string') { - //console.log("name",data) + log("name recieved: ", data) callUI.setAssistentName(data.name); } if (data.type === "click" && typeof data.x === 'number' && typeof data.y === 'number') { const el = document.elementFromPoint(data.x, data.y) if (el instanceof HTMLElement) { el.click() + el.focus() } return } diff --git a/tracker/tracker-assist/tsconfig.json b/tracker/tracker-assist/tsconfig.json index bb8f6a4c4..95d4f9408 100644 --- a/tracker/tracker-assist/tsconfig.json +++ b/tracker/tracker-assist/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "allowSyntheticDefaultImports": true, "declaration": true, "outDir": "./lib" From f4c65351d4c29b68de1a48a772e01dbd40163dab Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 16:16:05 +0100 Subject: [PATCH 157/218] upd(backend): confluent-kafka-go v1.7.0 --- backend/go.mod | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 8bd0386af..ab98ca444 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -8,7 +8,7 @@ require ( github.com/Masterminds/squirrel v1.5.0 github.com/aws/aws-sdk-go v1.35.23 github.com/btcsuite/btcutil v1.0.2 - github.com/confluentinc/confluent-kafka-go v1.5.2 // indirect + github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect github.com/elastic/go-elasticsearch/v7 v7.13.1 github.com/go-redis/redis v6.15.9+incompatible github.com/google/uuid v1.1.2 @@ -24,6 +24,6 @@ require ( github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe google.golang.org/api v0.50.0 - gopkg.in/confluentinc/confluent-kafka-go.v1 v1.5.2 + gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 ) From 5d14a65d3d7669188407bf6199bdfb5d013b647b Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 19:09:38 +0100 Subject: [PATCH 158/218] feat(tracker-ngrx): 3.4.8: webpack 5 fully specified imports --- tracker/tracker-ngrx/package-lock.json | 12 ++++++------ tracker/tracker-ngrx/package.json | 8 ++++---- tracker/tracker-ngrx/src/index.ts | 2 +- tracker/tracker-ngrx/tsconfig.json | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tracker/tracker-ngrx/package-lock.json b/tracker/tracker-ngrx/package-lock.json index 7870facb0..60cfabefe 100644 --- a/tracker/tracker-ngrx/package-lock.json +++ b/tracker/tracker-ngrx/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-ngrx/package.json b/tracker/tracker-ngrx/package.json index d79dce81b..614c96f18 100644 --- a/tracker/tracker-ngrx/package.json +++ b/tracker/tracker-ngrx/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-ngrx", "description": "Tracker plugin for NgRx state recording", - "version": "3.0.0", + "version": "3.4.8", "keywords": [ "ngrx", "logging", @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^5.1.0", + "@openreplay/tracker": "^3.4.8", "@ngrx/store": ">=4" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-ngrx/src/index.ts b/tracker/tracker-ngrx/src/index.ts index 508cf24b0..e967bacc7 100644 --- a/tracker/tracker-ngrx/src/index.ts +++ b/tracker/tracker-ngrx/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from './syncod'; +import { Encoder, sha1 } from './syncod/index.js'; export interface Options { actionFilter: (action: any) => boolean; diff --git a/tracker/tracker-ngrx/tsconfig.json b/tracker/tracker-ngrx/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-ngrx/tsconfig.json +++ b/tracker/tracker-ngrx/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } From fe5ba060eb4fdf1bdd41e45e8b8c237a915edb82 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 19:12:46 +0100 Subject: [PATCH 159/218] feat(tracker-redux): 3.4.8: webpack 5 fully specified imports --- tracker/tracker-redux/package-lock.json | 12 ++++++------ tracker/tracker-redux/package.json | 8 ++++---- tracker/tracker-redux/src/index.ts | 2 +- tracker/tracker-redux/tsconfig.json | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tracker/tracker-redux/package-lock.json b/tracker/tracker-redux/package-lock.json index 4d070f717..3cb97282f 100644 --- a/tracker/tracker-redux/package-lock.json +++ b/tracker/tracker-redux/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-redux/package.json b/tracker/tracker-redux/package.json index 62c806e2f..87a365754 100644 --- a/tracker/tracker-redux/package.json +++ b/tracker/tracker-redux/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-redux", "description": "Tracker plugin for Redux state recording", - "version": "3.0.0", + "version": "3.4.8", "keywords": [ "redux", "logging", @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "redux": "^4.0.0" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-redux/src/index.ts b/tracker/tracker-redux/src/index.ts index a3b5fee3a..5a4749e71 100644 --- a/tracker/tracker-redux/src/index.ts +++ b/tracker/tracker-redux/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from './syncod'; +import { Encoder, sha1 } from './syncod/index.js'; export interface Options { actionFilter: (action: any) => boolean; diff --git a/tracker/tracker-redux/tsconfig.json b/tracker/tracker-redux/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-redux/tsconfig.json +++ b/tracker/tracker-redux/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } From 31c62970792967a524d6968862687516652a24cf Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 19:14:51 +0100 Subject: [PATCH 160/218] feat(tracker-vuex): 3.4.8: webpack 5 fully specified imports --- tracker/tracker-vuex/package-lock.json | 12 ++++++------ tracker/tracker-vuex/package.json | 6 +++--- tracker/tracker-vuex/src/index.ts | 2 +- tracker/tracker-vuex/tsconfig.json | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tracker/tracker-vuex/package-lock.json b/tracker/tracker-vuex/package-lock.json index 04e6eb201..35fd251fe 100644 --- a/tracker/tracker-vuex/package-lock.json +++ b/tracker/tracker-vuex/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.0.tgz", - "integrity": "sha512-kuMnXxGFvieVsuQJJ70FVataAZgCT9//Vji/qrsTVjXStQuhPTe61iyUS4eAudaR/N3r5/yFt9Z0dEb3fJHDFg==", + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", + "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -767,9 +767,9 @@ } }, "typescript": { - "version": "3.8.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", - "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-vuex/package.json b/tracker/tracker-vuex/package.json index a9f09f100..005a9393c 100644 --- a/tracker/tracker-vuex/package.json +++ b/tracker/tracker-vuex/package.json @@ -23,13 +23,13 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "@ngrx/store": ">=4" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-vuex/src/index.ts b/tracker/tracker-vuex/src/index.ts index 20b1e2b83..30333904e 100644 --- a/tracker/tracker-vuex/src/index.ts +++ b/tracker/tracker-vuex/src/index.ts @@ -1,5 +1,5 @@ import { App, Messages } from '@openreplay/tracker'; -import { Encoder, sha1 } from "./syncod"; +import { Encoder, sha1 } from "./syncod/index.js"; export interface Options { filter: (mutation: any, state: any) => boolean; diff --git a/tracker/tracker-vuex/tsconfig.json b/tracker/tracker-vuex/tsconfig.json index ce07a685b..0c5b8d1b3 100644 --- a/tracker/tracker-vuex/tsconfig.json +++ b/tracker/tracker-vuex/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "node", + "moduleResolution": "nodenext", "declaration": true, "outDir": "./lib" } From 4332314200094abfe64fe085ce4262eaa054fe24 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Mon, 29 Nov 2021 19:27:25 +0100 Subject: [PATCH 161/218] feat(tracker):3.4.9:start returns promise --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/index.ts | 2 +- tracker/tracker/src/main/index.ts | 10 ++++++---- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index ab2a367b8..e128ee0e0 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.4.8", + "version": "3.4.9", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index e0bc47b2b..54fe9050f 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -11,7 +11,7 @@ import type { Options as ObserverOptions } from "./observer.js"; import type { Options as WebworkerOptions, WorkerMessageData } from "../../messages/webworker.js"; -interface OnStartInfo { +export interface OnStartInfo { sessionID: string, sessionToken: string, userUUID: string, diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index 2ddfc6961..6af325e57 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -26,6 +26,8 @@ import { Options as InputOptions } from "./modules/input.js"; import { Options as PerformanceOptions } from "./modules/performance.js"; import { Options as TimingOptions } from "./modules/timing.js"; +export type { OnStartInfo } from './app/index.js'; + export type Options = Partial< AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & PerformanceOptions & TimingOptions > & { @@ -138,15 +140,15 @@ export default class API { return this.isActive(); } - start(): void { + start() /*: Promise*/ { if (!IN_BROWSER) { console.error(`OpenReplay: you are trying to start Tracker on a node.js environment. If you want to use OpenReplay with SSR, please, use componentDidMount or useEffect API for placing the \`tracker.start()\` line. Check documentation on ${DOCS_HOST}${DOCS_SETUP}`) - return; + return Promise.reject("Trying to start not in browser."); } if (this.app === null) { - return; + return Promise.reject("Browser doesn't support required api, or doNotTrack is active."); } - this.app.start(); + return this.app.start(); } stop(): void { if (this.app === null) { From 1c7f7b3f2fa89ee9d4bb17313a8ae2c958f4c374 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 30 Nov 2021 16:13:27 +0530 Subject: [PATCH 162/218] change - version number to 1.3.6 --- frontend/env.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/env.js b/frontend/env.js index 52f562031..ee5ba5c04 100644 --- a/frontend/env.js +++ b/frontend/env.js @@ -13,7 +13,7 @@ const oss = { ORIGIN: () => 'window.location.origin', API_EDP: () => 'window.location.origin + "/api"', ASSETS_HOST: () => 'window.location.origin + "/assets"', - VERSION: '1.4.0', + VERSION: '1.3.6', SOURCEMAP: true, MINIO_ENDPOINT: process.env.MINIO_ENDPOINT, MINIO_PORT: process.env.MINIO_PORT, From 7eddbec73cec9a387f8d2f0c433de54b8444fd75 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 14:04:39 +0100 Subject: [PATCH 163/218] feat(api): assist credentials generator --- ee/api/.chalice/config.json | 3 ++- ee/api/chalicelib/blueprints/bp_ee.py | 22 ++++++++++++++++++++++ scripts/helm/app/chalice.yaml | 1 + 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index d4e130688..691429329 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -64,7 +64,8 @@ "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", "iosBucket": "openreplay-ios-images", - "version_number": "1.4.0" + "version_number": "1.3.6", + "assist_secret": "" }, "lambda_timeout": 150, "lambda_memory_size": 400, diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index b32bd6f52..7c9bbd8bf 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -1,8 +1,16 @@ +import base64 +import hashlib +import hmac +from time import time + from chalice import Blueprint from chalicelib import _overrides +from chalicelib.blueprints import bp_authorizers from chalicelib.core import roles from chalicelib.core import unlock +from chalicelib.utils import helper +from chalicelib.utils.helper import environ app = Blueprint(__name__) _overrides.chalice_app(app) @@ -50,3 +58,17 @@ def delete_role(roleId, context): return { 'data': data } + + +@app.route('/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) +def get_assist_credentials(context): + user = helper.generate_salt() + secret = environ["assist_secret"] + ttl = int(environ.get("assist_ttl", 48)) * 3600 + timestamp = int(time()) + ttl + username = str(timestamp) + ':' + user + dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) + dig = dig.digest() + password = base64.b64encode(dig).decode() + + return {"data": {'username': username, 'password': password}} diff --git a/scripts/helm/app/chalice.yaml b/scripts/helm/app/chalice.yaml index 4ef1eecc7..98d580db9 100644 --- a/scripts/helm/app/chalice.yaml +++ b/scripts/helm/app/chalice.yaml @@ -63,3 +63,4 @@ env: idp_sso_url: '' idp_x509cert: '' idp_sls_url: '' + assist_secret: '' \ No newline at end of file From c7d530d2d450e9c6f2dd4a16be8286a7366d464a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 15:27:13 +0100 Subject: [PATCH 164/218] feat(api): fixed EE signup auth --- ee/api/chalicelib/core/users.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 5ba613875..452cfc303 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -619,11 +619,11 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): roles.name AS role_name, roles.permissions FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) - LEFT JOIN public.roles USING (role_id) + LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id) WHERE users.email = %(email)s AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1) - AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s) + AND (roles.role_id IS NULL OR roles.deleted_at IS NULL) LIMIT 1;""", {"email": email, "password": password}) From 627a2eb22eee38f263d59468c7f0a664926115f0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 16:25:50 +0100 Subject: [PATCH 165/218] feat(api): EE v1/assis/credentials --- ee/api/chalicelib/blueprints/bp_ee.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index 7c9bbd8bf..9dae133a0 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -60,6 +60,7 @@ def delete_role(roleId, context): } +@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) @app.route('/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) def get_assist_credentials(context): user = helper.generate_salt() From 8c0b1ea6301e5cce66f750b3feb52c5341dd9946 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 16:48:23 +0100 Subject: [PATCH 166/218] feat(api): EE changed credentials endpoints --- ee/api/app.py | 6 ++--- ee/api/chalicelib/blueprints/app/v1_api_ee.py | 14 +++++++++++ ee/api/chalicelib/blueprints/bp_ee.py | 24 ++++--------------- ee/api/chalicelib/utils/assist_helper.py | 19 +++++++++++++++ 4 files changed, 40 insertions(+), 23 deletions(-) create mode 100644 ee/api/chalicelib/blueprints/app/v1_api_ee.py create mode 100644 ee/api/chalicelib/utils/assist_helper.py diff --git a/ee/api/app.py b/ee/api/app.py index cc901c6f8..e12b64e0b 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -5,15 +5,14 @@ from sentry_sdk import configure_scope from chalicelib import _overrides from chalicelib.blueprints import bp_authorizers from chalicelib.blueprints import bp_core, bp_core_crons -from chalicelib.blueprints.app import v1_api from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons +from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml +from chalicelib.blueprints.app import v1_api, v1_api_ee from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.helper import environ -from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml - app = Chalice(app_name='parrot') app.debug = not helper.is_production() or helper.is_local() @@ -123,6 +122,7 @@ app.register_blueprint(bp_core_dynamic.app) app.register_blueprint(bp_core_dynamic_crons.app) app.register_blueprint(bp_dashboard.app) app.register_blueprint(v1_api.app) +app.register_blueprint(v1_api_ee.app) # Enterprise app.register_blueprint(bp_ee.app) app.register_blueprint(bp_ee_crons.app) diff --git a/ee/api/chalicelib/blueprints/app/v1_api_ee.py b/ee/api/chalicelib/blueprints/app/v1_api_ee.py new file mode 100644 index 000000000..e6f3dc8f2 --- /dev/null +++ b/ee/api/chalicelib/blueprints/app/v1_api_ee.py @@ -0,0 +1,14 @@ +from chalice import Blueprint + +from chalicelib import _overrides +from chalicelib.blueprints import bp_authorizers +from chalicelib.utils import assist_helper + +app = Blueprint(__name__) +_overrides.chalice_app(app) + + +@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) +def get_assist_credentials(context): + username, credential = assist_helper.get_temporary_credentials() + return {"data": {'username': username, 'credential': credential}} diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index 9dae133a0..176e59455 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -1,16 +1,9 @@ -import base64 -import hashlib -import hmac -from time import time - from chalice import Blueprint from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers from chalicelib.core import roles from chalicelib.core import unlock -from chalicelib.utils import helper -from chalicelib.utils.helper import environ +from chalicelib.utils import assist_helper app = Blueprint(__name__) _overrides.chalice_app(app) @@ -60,16 +53,7 @@ def delete_role(roleId, context): } -@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -@app.route('/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) +@app.route('/assist/credentials', methods=['GET']) def get_assist_credentials(context): - user = helper.generate_salt() - secret = environ["assist_secret"] - ttl = int(environ.get("assist_ttl", 48)) * 3600 - timestamp = int(time()) + ttl - username = str(timestamp) + ':' + user - dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) - dig = dig.digest() - password = base64.b64encode(dig).decode() - - return {"data": {'username': username, 'password': password}} + username, credential = assist_helper.get_temporary_credentials() + return {"data": {'username': username, 'credential': credential}} diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py new file mode 100644 index 000000000..a709fdd2c --- /dev/null +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -0,0 +1,19 @@ +import base64 +import hashlib +import hmac +from time import time + +from chalicelib.utils import helper +from chalicelib.utils.helper import environ + + +def get_temporary_credentials(): + user = helper.generate_salt() + secret = environ["assist_secret"] + ttl = int(environ.get("assist_ttl", 48)) * 3600 + timestamp = int(time()) + ttl + username = str(timestamp) + ':' + user + dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) + dig = dig.digest() + password = base64.b64encode(dig).decode() + return user, password From 0419000b26988caa4adfce12a0c16ad4097f9356 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 18:02:21 +0100 Subject: [PATCH 167/218] feat(api): EE changed credentials logic --- ee/api/chalicelib/blueprints/app/v1_api_ee.py | 6 ++-- ee/api/chalicelib/blueprints/bp_ee.py | 3 +- ee/api/chalicelib/utils/assist_helper.py | 31 +++++++++++++++++-- 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/ee/api/chalicelib/blueprints/app/v1_api_ee.py b/ee/api/chalicelib/blueprints/app/v1_api_ee.py index e6f3dc8f2..5682bf5b2 100644 --- a/ee/api/chalicelib/blueprints/app/v1_api_ee.py +++ b/ee/api/chalicelib/blueprints/app/v1_api_ee.py @@ -10,5 +10,7 @@ _overrides.chalice_app(app) @app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) def get_assist_credentials(context): - username, credential = assist_helper.get_temporary_credentials() - return {"data": {'username': username, 'credential': credential}} + credentials = assist_helper.get_temporary_credentials() + if "errors" in credentials: + return credentials + return {"data": credentials} diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py index 176e59455..c71668e36 100644 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ b/ee/api/chalicelib/blueprints/bp_ee.py @@ -55,5 +55,4 @@ def delete_role(roleId, context): @app.route('/assist/credentials', methods=['GET']) def get_assist_credentials(context): - username, credential = assist_helper.get_temporary_credentials() - return {"data": {'username': username, 'credential': credential}} + return {"data": assist_helper.get_full_config()} diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index a709fdd2c..13d945b1a 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -3,17 +3,42 @@ import hashlib import hmac from time import time +from chalicelib.core import assist from chalicelib.utils import helper from chalicelib.utils.helper import environ +def __get_secret(): + return environ["assist_secret"] if environ["assist_secret"] is not None and len( + environ["assist_secret"]) > 0 else None + + def get_temporary_credentials(): + secret = __get_secret() + if secret is None: + return {"errors": ["secret not defined"]} user = helper.generate_salt() - secret = environ["assist_secret"] ttl = int(environ.get("assist_ttl", 48)) * 3600 timestamp = int(time()) + ttl username = str(timestamp) + ':' + user dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) dig = dig.digest() - password = base64.b64encode(dig).decode() - return user, password + credential = base64.b64encode(dig).decode() + return {'username': username, 'credential': credential} + + +def get_full_config(): + servers = assist.get_ice_servers() + servers = servers.split("|") + credentials = get_temporary_credentials() + if __get_secret() is not None: + servers = [{"url": s.split(",")[0], **credentials} for s in servers] + else: + for i in range(len(servers)): + s = servers[i].split("|") + if len(s) == 3: + servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + else: + servers[i] = {"url": s[0]} + + return servers From 2c76c9f91d116845734026cadb6a2b51c0aee09d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 30 Nov 2021 19:01:25 +0100 Subject: [PATCH 168/218] feat(api): return none if not server is provided --- ee/api/chalicelib/utils/assist_helper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index 13d945b1a..d31cadd1f 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -29,6 +29,8 @@ def get_temporary_credentials(): def get_full_config(): servers = assist.get_ice_servers() + if servers is None: + return None servers = servers.split("|") credentials = get_temporary_credentials() if __get_secret() is not None: From 799bf3cac32ca7a6a910c73e8de2cec4c046c46a Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 1 Dec 2021 00:04:00 +0530 Subject: [PATCH 169/218] Assist test cred (#225) * change(ui) - assist get credetials * change(ui) - removed type * change(ui) - removed type --- frontend/app/api_client.js | 1 + frontend/app/components/Session/LivePlayer.js | 46 ++++++++++++------- .../app/components/hocs/withPermissions.js | 2 +- .../MessageDistributor/MessageDistributor.ts | 4 +- .../managers/AssistManager.ts | 19 +++++--- frontend/app/player/singletone.js | 4 +- 6 files changed, 48 insertions(+), 28 deletions(-) diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index babb7a7c6..02e575033 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -80,6 +80,7 @@ export default class APIClient { path !== '/targets_temp' && !path.includes('/metadata/session_search') && !path.includes('/watchdogs/rules') && + !path.includes('/assist/credentials') && !!this.siteId && siteIdRequiredPaths.some(sidPath => path.startsWith(sidPath)) ) { diff --git a/frontend/app/components/Session/LivePlayer.js b/frontend/app/components/Session/LivePlayer.js index cf10dfbea..336ce9cea 100644 --- a/frontend/app/components/Session/LivePlayer.js +++ b/frontend/app/components/Session/LivePlayer.js @@ -2,6 +2,7 @@ import { useEffect } from 'react'; import { connect } from 'react-redux'; import { Loader } from 'UI'; import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player'; +import { withRequest } from 'HOCs' import { PlayerProvider, connectPlayer, @@ -30,17 +31,24 @@ const InitLoader = connectPlayer(state => ({ }))(Loader); -function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, live, fullscreen, jwt }) { +function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, live, fullscreen, jwt, loadingCredentials, assistCredendials, request }) { useEffect(() => { - initPlayer(session, jwt); + if (!loadingCredentials) { + initPlayer(session, jwt, assistCredendials); + } return () => cleanPlayer() - }, [ session.sessionId ]); + }, [ session.sessionId, loadingCredentials, assistCredendials ]); // LAYOUT (TODO: local layout state - useContext or something..) - useEffect(() => () => { - toggleFullscreen(false); - closeBottomBlock(); + useEffect(() => { + request(); + return () => { + toggleFullscreen(false); + closeBottomBlock(); + } }, []) + + return ( @@ -54,14 +62,18 @@ function WebPlayer ({ showAssist, session, toggleFullscreen, closeBottomBlock, l ); } - -export default connect(state => ({ - session: state.getIn([ 'sessions', 'current' ]), - showAssist: state.getIn([ 'sessions', 'showChatWindow' ]), - jwt: state.get('jwt'), - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), -}), { - toggleFullscreen, - closeBottomBlock, -})(WebPlayer) - +export default withRequest({ + initialData: null, + endpoint: '/assist/credentials', + dataWrapper: data => data, + dataName: 'assistCredendials', + loadingName: 'loadingCredentials', +})(connect( + state => ({ + session: state.getIn([ 'sessions', 'current' ]), + showAssist: state.getIn([ 'sessions', 'showChatWindow' ]), + jwt: state.get('jwt'), + fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), + }), + { toggleFullscreen, closeBottomBlock }, +)(WebPlayer)); \ No newline at end of file diff --git a/frontend/app/components/hocs/withPermissions.js b/frontend/app/components/hocs/withPermissions.js index c7a48609c..40597aa77 100644 --- a/frontend/app/components/hocs/withPermissions.js +++ b/frontend/app/components/hocs/withPermissions.js @@ -3,7 +3,7 @@ import { NoPermission } from 'UI'; export default (requiredPermissions, className) => BaseComponent => @connect((state, props) => ({ - permissions: state.getIn([ 'user', 'account', 'permissions' ]), + permissions: state.getIn([ 'user', 'account', 'permissions' ]) || [], isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', })) class extends React.PureComponent { diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index e8d807593..c742c10b5 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -118,11 +118,11 @@ export default class MessageDistributor extends StatedScreen { private navigationStartOffset: number = 0; private lastMessageTime: number = 0; - constructor(private readonly session: any /*Session*/, jwt: string) { + constructor(private readonly session: any /*Session*/, jwt: string, config) { super(); this.pagesManager = new PagesManager(this, this.session.isMobile) this.mouseManager = new MouseManager(this); - this.assistManager = new AssistManager(session, this); + this.assistManager = new AssistManager(session, this, config); this.sessionStart = this.session.startedAt; diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 992cdc987..f9b708601 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -117,10 +117,8 @@ function resolveCSS(baseURL: string, css: string): string { return rewriteCSSLinks(css, rawurl => resolveURL(baseURL, rawurl)); } - export default class AssistManager { - constructor(private session, private md: MessageDistributor) {} - + constructor(private session, private md: MessageDistributor, private config) {} private setStatus(status: ConnectionStatus) { if (status === ConnectionStatus.Connecting) { @@ -150,13 +148,22 @@ export default class AssistManager { } this.setStatus(ConnectionStatus.Connecting) import('peerjs').then(({ default: Peer }) => { - // @ts-ignore - const peer = new Peer({ + const _config = { // @ts-ignore host: new URL(window.ENV.API_EDP).host, path: '/assist', port: location.protocol === 'https:' ? 443 : 80, - }); + } + + if (this.config) { + _config['config'] = { + iceServers: this.config, + sdpSemantics: 'unified-plan', + iceTransportPolicy: 'relay', + }; + } + + const peer = new Peer(_config); this.peer = peer; peer.on('error', e => { if (e.type !== 'peer-unavailable') { diff --git a/frontend/app/player/singletone.js b/frontend/app/player/singletone.js index adca40cf6..619f9b02b 100644 --- a/frontend/app/player/singletone.js +++ b/frontend/app/player/singletone.js @@ -28,11 +28,11 @@ document.addEventListener("visibilitychange", function() { } }); -export function init(session, jwt) { +export function init(session, jwt, config) { const live = session.live; const endTime = !live && session.duration.valueOf(); - instance = new Player(session, jwt); + instance = new Player(session, jwt, config); update({ initialized: true, live, From 2f7286d8f8a65384cd88e24458fccaf077ac66df Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 1 Dec 2021 18:52:23 +0530 Subject: [PATCH 170/218] fix the session nav --- frontend/app/components/Session_/Autoplay/Autoplay.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Session_/Autoplay/Autoplay.js b/frontend/app/components/Session_/Autoplay/Autoplay.js index 654626cff..ef501f3e6 100644 --- a/frontend/app/components/Session_/Autoplay/Autoplay.js +++ b/frontend/app/components/Session_/Autoplay/Autoplay.js @@ -30,7 +30,7 @@ function Autoplay(props) { tooltip={'Autoplay'} /> - + From cda6f197f2fdd90e4b791bd640e4a2c7ddf4917a Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 1 Dec 2021 20:22:14 +0530 Subject: [PATCH 171/218] fix(ui) - session auto play navigation, back nav --- .../app/components/BugFinder/BugFinder.js | 9 +++++++-- .../FunnelIssueDetails/FunnelIssueDetails.js | 2 +- .../FunnelSessionList/FunnelSessionList.js | 16 ++++++++++++---- .../FunnelSessionsHeader.js | 1 - .../components/Session_/PlayerBlockHeader.js | 19 ++++++++++++------- frontend/app/duck/sessions.js | 18 +++++++++++++++--- 6 files changed, 47 insertions(+), 18 deletions(-) diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js index bac9ea8e6..4f7a8b1c3 100644 --- a/frontend/app/components/BugFinder/BugFinder.js +++ b/frontend/app/components/BugFinder/BugFinder.js @@ -18,7 +18,7 @@ import withLocationHandlers from "HOCs/withLocationHandlers"; import { fetch as fetchFilterVariables } from 'Duck/sources'; import { fetchList as fetchIntegrationVariables, fetchSources } from 'Duck/customField'; import { RehydrateSlidePanel } from './WatchDogs/components'; -import { setActiveTab } from 'Duck/sessions'; +import { setActiveTab, setFunnelPage } from 'Duck/sessions'; import SessionsMenu from './SessionsMenu/SessionsMenu'; import SessionFlowList from './SessionFlowList/SessionFlowList'; import { LAST_7_DAYS } from 'Types/app/period'; @@ -58,7 +58,8 @@ const weakEqual = (val1, val2) => { fetchSiteList, fetchFunnelsList, resetFunnel, - resetFunnelFilters + resetFunnelFilters, + setFunnelPage }) @withPageTitle("Sessions - OpenReplay") export default class BugFinder extends React.PureComponent { @@ -94,6 +95,10 @@ export default class BugFinder extends React.PureComponent { props.fetchFunnelsList(LAST_7_DAYS) } + componentDidMount() { + this.props.setFunnelPage(false); + } + toggleRehydratePanel = () => { this.setState({ showRehydratePanel: !this.state.showRehydratePanel }) } diff --git a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js index 0bb049d24..6672bc580 100644 --- a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js +++ b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js @@ -28,7 +28,7 @@ function FunnelIssueDetails(props) {
- +
) diff --git a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js index 3aef18003..707049faa 100644 --- a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js +++ b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js @@ -2,19 +2,27 @@ import React, { useState, useEffect } from 'react' import { connect } from 'react-redux' import SessionItem from 'Shared/SessionItem' import { fetchSessions, fetchSessionsFiltered } from 'Duck/funnels' +import { setFunnelPage } from 'Duck/sessions' import { LoadMoreButton, NoContent, Loader } from 'UI' import FunnelSessionsHeader from '../FunnelSessionsHeader' const PER_PAGE = 10; function FunnelSessionList(props) { - const { list, sessionsTotal, sessionsSort, inDetails = false } = props; + const { funnelId, issueId, list, sessionsTotal, sessionsSort, inDetails = false } = props; const [showPages, setShowPages] = useState(1) const displayedCount = Math.min(showPages * PER_PAGE, list.size); const addPage = () => setShowPages(showPages + 1); + useEffect(() => { + props.setFunnelPage({ + funnelId, + issueId + }) + }, []) + return (
@@ -24,7 +32,7 @@ function FunnelSessionList(props) { subtext="Please try changing your search parameters." icon="exclamation-circle" show={ list.size === 0} - > + > { list.take(displayedCount).map(session => ( + />
) @@ -51,4 +59,4 @@ export default connect(state => ({ liveFilters: state.getIn(['funnelFilters', 'appliedFilter']), funnelFilters: state.getIn(['funnels', 'funnelFilters']), sessionsSort: state.getIn(['funnels', 'sessionsSort']), -}), { fetchSessions, fetchSessionsFiltered })(FunnelSessionList) +}), { fetchSessions, fetchSessionsFiltered, setFunnelPage })(FunnelSessionList) diff --git a/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js b/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js index c598b1543..3a58ccca0 100644 --- a/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js +++ b/frontend/app/components/Funnels/FunnelSessionsHeader/FunnelSessionsHeader.js @@ -11,7 +11,6 @@ const sortOptions = Object.entries(sortOptionsMap) .map(([ value, text ]) => ({ value, text })); function FunnelSessionsHeader({ sessionsCount, inDetails = false }) { - const onSort = () => {} return (
diff --git a/frontend/app/components/Session_/PlayerBlockHeader.js b/frontend/app/components/Session_/PlayerBlockHeader.js index dee980e00..9a14541b2 100644 --- a/frontend/app/components/Session_/PlayerBlockHeader.js +++ b/frontend/app/components/Session_/PlayerBlockHeader.js @@ -2,7 +2,7 @@ import { connect } from 'react-redux'; import { withRouter } from 'react-router-dom'; import { browserIcon, osIcon, deviceTypeIcon } from 'App/iconNames'; import { formatTimeOrDate } from 'App/date'; -import { sessions as sessionsRoute, funnelIssue as funnelIssueRoute, withSiteId } from 'App/routes'; +import { sessions as sessionsRoute, funnel as funnelRoute, funnelIssue as funnelIssueRoute, withSiteId } from 'App/routes'; import { Icon, CountryFlag, IconButton, BackLink } from 'UI'; import { toggleFavorite } from 'Duck/sessions'; import cn from 'classnames'; @@ -36,12 +36,13 @@ function capitalise(str) { local: state.getIn(['sessions', 'timezone']), funnelRef: state.getIn(['funnels', 'navRef']), siteId: state.getIn([ 'user', 'siteId' ]), + funnelPage: state.getIn(['sessions', 'funnelPage']), }), { toggleFavorite, fetchListIntegration }) @withRouter export default class PlayerBlockHeader extends React.PureComponent { - componentDidMount() { + componentDidMount() { if (!this.props.issuesFetched) this.props.fetchListIntegration('issues') } @@ -53,10 +54,13 @@ export default class PlayerBlockHeader extends React.PureComponent { ); backHandler = () => { - const { history, siteId } = this.props; - if (history.action !== 'POP') - history.goBack(); - else + const { history, siteId, funnelPage } = this.props; + if (funnelPage) { + if (funnelPage.get('issueId')) { + history.push(withSiteId(funnelIssueRoute(funnelPage.get('funnelId'), funnelPage.get('issueId')), siteId)) + } else + history.push(withSiteId(funnelRoute(funnelPage.get('funnelId')), siteId)); + } else history.push(withSiteId(SESSIONS_ROUTE), siteId); } @@ -87,6 +91,7 @@ export default class PlayerBlockHeader extends React.PureComponent { jiraConfig, fullscreen, } = this.props; + const { history, siteId } = this.props; return (
@@ -111,7 +116,7 @@ export default class PlayerBlockHeader extends React.PureComponent { { live && } { !live && ( <> - +
{ @@ -117,9 +119,11 @@ const reducer = (state = initialState, action = {}) => { } + const sessionIds = list.map(({ sessionId }) => sessionId ).toJS(); + return state .set('list', list) - .set('sessionIds', list.map(({ sessionId }) => sessionId ).toJS()) + .set('sessionIds', sessionIds) .set('favoriteList', list.filter(({ favorite }) => favorite)) .set('total', total) .set('keyMap', keyMap) @@ -236,7 +240,8 @@ const reducer = (state = initialState, action = {}) => { return state.set('showChatWindow', action.state) case FETCH_INSIGHTS.SUCCESS:  return state.set('insights', List(action.data).sort((a, b) => b.count - a.count)); - + case SET_FUNNEL_PAGE_FLAG: + return state.set('funnelPage', action.funnelPage ? Map(action.funnelPage) : false); default: return state; } @@ -364,3 +369,10 @@ export function setEventFilter(filter) { } } +export function setFunnelPage(funnelPage) { + return { + type: SET_FUNNEL_PAGE_FLAG, + funnelPage + } +} + From 1264b8e89bcbeb5db67920147e0e36c30ee15085 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Dec 2021 18:24:39 +0100 Subject: [PATCH 172/218] feat(api): changed SAML blueprint feat(db): changed delta version --- ee/api/chalicelib/blueprints/bp_saml.py | 10 +++++----- ee/api/requirements.txt | 2 +- .../postgresql/{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} | 0 .../postgresql/{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} | 0 4 files changed, 6 insertions(+), 6 deletions(-) rename ee/scripts/helm/db/init_dbs/postgresql/{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} (100%) rename scripts/helm/db/init_dbs/postgresql/{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} (100%) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index d5a964211..7af6f5c58 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -15,7 +15,7 @@ from chalice import Response from chalicelib.core import users, tenants -@app.route("/saml2", methods=['GET'], authorizer=None) +@app.route('/sso/saml2', methods=['GET'], authorizer=None) def start_sso(): app.current_request.path = '' req = prepare_request(request=app.current_request) @@ -28,7 +28,7 @@ def start_sso(): headers={'Location': sso_built_url, 'Content-Type': 'text/plain'}) -@app.route('/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None) +@app.route('/sso/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None) def process_sso_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] @@ -100,7 +100,7 @@ def process_sso_assertion(): return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) -@app.route('/saml2/slo', methods=['GET']) +@app.route('/sso/saml2/slo', methods=['GET']) def process_slo_request(context): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] @@ -127,7 +127,7 @@ def process_slo_request(context): spnq=name_id_spnq), 'Content-Type': 'text/plain'}) -@app.route('/saml2/sls', methods=['GET'], authorizer=None) +@app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) def process_sls_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] @@ -169,7 +169,7 @@ def process_sls_assertion(): headers={'Location': environ["SITE_URL"], 'Content-Type': 'text/plain'}) -@app.route('/saml2/metadata', methods=['GET'], authorizer=None) +@app.route('/sso/saml2/metadata', methods=['GET'], authorizer=None) def saml2_metadata(): req = prepare_request(request=app.current_request) auth = init_saml_auth(req) diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 8dc6ce340..e241b5edd 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -10,4 +10,4 @@ jira==2.0.0 schedule==1.1.0 croniter==1.0.12 clickhouse-driver==0.1.5 -python3-saml==1.10.1 \ No newline at end of file +python3-saml==1.12.0 \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql similarity index 100% rename from ee/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql rename to ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql diff --git a/scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql b/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql similarity index 100% rename from scripts/helm/db/init_dbs/postgresql/1.4.0/1.4.0.sql rename to scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql From 78da31a5fef98de09fea94b4cc204ebb647ae02b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Dec 2021 18:33:28 +0100 Subject: [PATCH 173/218] feat(api): changed SAML helper --- ee/api/chalicelib/utils/SAML2_helper.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index e3fc31f02..da4c9a4a6 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -9,13 +9,13 @@ SAML2 = { "strict": True, "debug": True, "sp": { - "entityId": environ["SITE_URL"] + "/api/saml2/metadata/", + "entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/", "assertionConsumerService": { - "url": environ["SITE_URL"] + "/api/saml2/acs", + "url": environ["SITE_URL"] + "/api/sso/saml2/acs", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "singleLogoutService": { - "url": environ["SITE_URL"] + "/api/saml2/sls", + "url": environ["SITE_URL"] + "/api/sso/saml2/sls", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", From 1809c2eda1e1e4070d8be94fca5901a13f759c5a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Dec 2021 18:50:32 +0100 Subject: [PATCH 174/218] feat(api): changed SAML helper --- ee/api/chalicelib/utils/SAML2_helper.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index da4c9a4a6..5c0c0af9e 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -9,13 +9,13 @@ SAML2 = { "strict": True, "debug": True, "sp": { - "entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/", + "entityId": environ["SITE_URL"] + "/sso/saml2/metadata/", "assertionConsumerService": { - "url": environ["SITE_URL"] + "/api/sso/saml2/acs", + "url": environ["SITE_URL"] + "/sso/saml2/acs", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "singleLogoutService": { - "url": environ["SITE_URL"] + "/api/sso/saml2/sls", + "url": environ["SITE_URL"] + "/sso/saml2/sls", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", From f50355a67cf9cc1498cb3aca0bc47c3a63ebcb0c Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 21:14:11 +0100 Subject: [PATCH 175/218] feat (tracker-assist): 3.4.9: focus on remote click & remote scroll & less messages per send for some browsers --- tracker/tracker-assist/package-lock.json | 2 +- tracker/tracker-assist/package.json | 2 +- .../tracker-assist/src/BufferingConnection.ts | 15 ++++++-- tracker/tracker-assist/src/Mouse.ts | 10 ++++-- tracker/tracker-assist/src/_slim.ts | 2 +- tracker/tracker-assist/src/index.ts | 36 ++++++++++++++++--- 6 files changed, 53 insertions(+), 14 deletions(-) diff --git a/tracker/tracker-assist/package-lock.json b/tracker/tracker-assist/package-lock.json index 6a78905a8..cb3354060 100644 --- a/tracker/tracker-assist/package-lock.json +++ b/tracker/tracker-assist/package-lock.json @@ -1,6 +1,6 @@ { "name": "@openreplay/tracker-assist", - "version": "3.4.7", + "version": "3.4.8", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index c4329c464..345d42ecb 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "3.4.8", + "version": "3.4.9", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker-assist/src/BufferingConnection.ts b/tracker/tracker-assist/src/BufferingConnection.ts index 766bd7892..e90970c21 100644 --- a/tracker/tracker-assist/src/BufferingConnection.ts +++ b/tracker/tracker-assist/src/BufferingConnection.ts @@ -5,18 +5,27 @@ interface Message { encode(w: any): boolean; } +// 16kb should be max according to specification +const crOrFf: boolean = + typeof navigator !== "undefined" && + (navigator.userAgent.indexOf("Chrom") !== -1 || // Chrome && Chromium + navigator.userAgent.indexOf("Firefox") !== -1); + +const MESSAGES_PER_SEND = crOrFf ? 500 : 100 + // Bffering required in case of webRTC export default class BufferingConnection { private readonly buffer: Message[][] = [] private buffering: boolean = false - constructor(readonly conn: DataConnection){} + constructor(readonly conn: DataConnection, + private readonly msgsPerSend: number = MESSAGES_PER_SEND){} private sendNext() { if (this.buffer.length) { setTimeout(() => { this.conn.send(this.buffer.shift()) this.sendNext() - }, 50) + }, 15) } else { this.buffering = false } @@ -26,7 +35,7 @@ export default class BufferingConnection { if (!this.conn.open) { return; } let i = 0; while (i < messages.length) { - this.buffer.push(messages.slice(i, i+=1000)) + this.buffer.push(messages.slice(i, i+=this.msgsPerSend)) } if (!this.buffering) { this.buffering = true diff --git a/tracker/tracker-assist/src/Mouse.ts b/tracker/tracker-assist/src/Mouse.ts index 03558ce1c..51fb67e8e 100644 --- a/tracker/tracker-assist/src/Mouse.ts +++ b/tracker/tracker-assist/src/Mouse.ts @@ -1,8 +1,7 @@ - - export default class Mouse { private mouse: HTMLDivElement + private position: [number,number] = [0,0] constructor() { this.mouse = document.createElement('div'); Object.assign(this.mouse.style, { @@ -17,13 +16,18 @@ export default class Mouse { document.body.appendChild(this.mouse); } - move({x, y}: {x?: number, y?: number}) { + move({x, y}: {x: number, y: number}) { + this.position = [x, y]; Object.assign(this.mouse.style, { left: `${x || 0}px`, top: `${y || 0}px` }) } + getPosition(): [ number, number] { + return this.position; + } + remove() { if (this.mouse.parentElement) { document.body.removeChild(this.mouse); diff --git a/tracker/tracker-assist/src/_slim.ts b/tracker/tracker-assist/src/_slim.ts index 72c52dbd3..ce86863be 100644 --- a/tracker/tracker-assist/src/_slim.ts +++ b/tracker/tracker-assist/src/_slim.ts @@ -5,4 +5,4 @@ */ // @ts-ignore -window.parcelRequire = window.parcelRequire || undefined; +typeof window !== "undefined" && (window.parcelRequire = window.parcelRequire || undefined); diff --git a/tracker/tracker-assist/src/index.ts b/tracker/tracker-assist/src/index.ts index 247fd407e..61cd7b418 100644 --- a/tracker/tracker-assist/src/index.ts +++ b/tracker/tracker-assist/src/index.ts @@ -15,6 +15,7 @@ export interface Options { confirmStyle: Object, // Styles object session_calling_peer_key: string, config: RTCConfiguration, + __messages_per_send?: number, } enum CallingState { @@ -23,7 +24,7 @@ enum CallingState { False, }; -//@ts-ignore peerjs hack for webpack5 (?!) +//@ts-ignore peerjs hack for webpack5 (?!) TODO: ES/node modules; Peer = Peer.default || Peer; // type IncomeMessages = @@ -32,13 +33,13 @@ Peer = Peer.default || Peer; // { type: "click", x: number, y: number } | // { x: number, y: number } -export default function(opts: Partial = {}) { +export default function(opts?: Partial) { const options: Options = Object.assign( { confirmText: "You have an incoming call. Do you want to answer?", confirmStyle: {}, session_calling_peer_key: "__openreplay_calling_peer", - config: null + config: null, }, opts, ); @@ -58,6 +59,7 @@ export default function(opts: Partial = {}) { let assistDemandedRestart = false let peer : Peer | null = null + // This is required because internal peerjs connection list is not stable. https://peerjs.com/docs.html#peerconnections const openDataConnections: Record = {} app.addCommitCallback(function(messages) { @@ -66,7 +68,10 @@ export default function(opts: Partial = {}) { app.attachStopCallback(function() { if (assistDemandedRestart) { return; } - peer && peer.destroy(); + if (peer) { + peer.destroy(); + log('Peer destroyed!') + } }); app.attachStartCallback(function() { @@ -77,6 +82,7 @@ export default function(opts: Partial = {}) { host: app.getHost(), path: '/assist', port: location.protocol === 'http:' && appOptions.__DISABLE_SECURE_MODE ? 80 : 443, + //debug: // 0 Print nothing //1 Prints only errors. / 2 Prints errors and warnings. / 3 Prints all logs. } if (options.config) { _opt['config'] = options.config @@ -92,7 +98,7 @@ export default function(opts: Partial = {}) { log('Connection opened.') assistDemandedRestart = true; app.stop(); - openDataConnections[conn.peer] = new BufferingConnection(conn) + openDataConnections[conn.peer] = new BufferingConnection(conn, options.__messages_per_send) conn.on('close', () => { log("Connection close: ", conn.peer) delete openDataConnections[conn.peer] // TODO: check if works properly @@ -206,6 +212,26 @@ export default function(opts: Partial = {}) { log("name recieved: ", data) callUI.setAssistentName(data.name); } + if (data.type === "scroll" && Array.isArray(data.delta)) { + const scrEl = document.scrollingElement || document.documentElement + const [mouseX, mouseY] = mouse.getPosition() + const [dX, dY] = data.delta; + const el = document.elementFromPoint(mouseX-scrEl.scrollLeft, mouseY-scrEl.scrollTop) + let scrolled = false // what would be the browser-like logic? + if (el) { + if(el.scrollWidth > el.clientWidth) { + el.scrollLeft += data.delta[0] + scrolled = true + } + if (el && el.scrollHeight > el.clientHeight) { + el.scrollTop += data.delta[1] + scrolled = true + } + } + if (!scrolled) { + window.scroll(scrEl.scrollLeft + data.delta[0], scrEl.scrollTop + data.delta[1]) + } + } if (data.type === "click" && typeof data.x === 'number' && typeof data.y === 'number') { const el = document.elementFromPoint(data.x, data.y) if (el instanceof HTMLElement) { From 4e27930c70dbb5f855ab6eb3ce53bf2eeda7fd09 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 21:36:53 +0100 Subject: [PATCH 176/218] fix(frontend-player): internal coordinates consider scroll --- .../StatedScreen/Screen/BaseScreen.ts | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts index 1ec4f5932..f3e332aa7 100644 --- a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts +++ b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts @@ -76,9 +76,8 @@ export default abstract class BaseScreen { //return this.boundingRect; } - getInternalCoordinates({ x, y }: Point): Point { + getInternalViewportCoordinates({ x, y }: Point): Point { const { x: overlayX, y: overlayY, width } = this.getBoundingClientRect(); - //console.log("x y ", x,y,'ovx y', overlayX, overlayY, width) const screenWidth = this.overlay.offsetWidth; @@ -89,7 +88,19 @@ export default abstract class BaseScreen { return { x: screenX, y: screenY }; } + getInternalCoordinates(p: Point): Point { + const { x, y } = this.getInternalViewportCoordinates(p); + + const docEl = this.document?.documentElement + const scrollX = docEl ? docEl.scrollLeft : 0 + const scrollY = docEl ? docEl.scrollTop : 0 + + return { x: x+scrollX, y: y+scrollY }; + } + getElementFromInternalPoint({ x, y }: Point): Element | null { + // elementFromPoint && elementFromPoints require viewpoint-related coordinates, + // not document-related return this.document?.elementFromPoint(x, y) || null; } @@ -108,11 +119,11 @@ export default abstract class BaseScreen { } getElementFromPoint(point: Point): Element | null { - return this.getElementFromInternalPoint(this.getInternalCoordinates(point)); + return this.getElementFromInternalPoint(this.getInternalViewportCoordinates(point)); } getElementsFromPoint(point: Point): Element[] { - return this.getElementsFromInternalPoint(this.getInternalCoordinates(point)); + return this.getElementsFromInternalPoint(this.getInternalViewportCoordinates(point)); } getElementBySelector(selector: string): Element | null { From c7e4461c1d6fa485f795797d8360faedaec8aedf Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 21:42:55 +0100 Subject: [PATCH 177/218] feat(frontend-assist): send scroll & local focus when remote control enabled --- .../managers/AssistManager.ts | 49 +++++++++++++++---- 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index f9b708601..239613c22 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -349,12 +349,29 @@ export default class AssistManager { } } - private onMouseMove = (e: MouseEvent ): void => { - const conn = this.dataConnection; - if (!conn) { return; } - // @ts-ignore ??? + // private mmtid?:ReturnType + private onMouseMove = (e: MouseEvent): void => { + // this.mmtid && clearTimeout(this.mmtid) + // this.mmtid = setTimeout(() => { const data = this.md.getInternalCoordinates(e); - conn.send({ x: Math.round(data.x), y: Math.round(data.y) }); + this.send({ x: Math.round(data.x), y: Math.round(data.y) }); + // }, 5) + } + + + // private wtid?: ReturnType + // private scrollDelta: [number, number] = [0,0] + private onWheel = (e: WheelEvent): void => { + e.preventDefault() + //throttling makes movements less smooth + // this.wtid && clearTimeout(this.wtid) + // this.scrollDelta[0] += e.deltaX + // this.scrollDelta[1] += e.deltaY + // this.wtid = setTimeout(() => { + this.send({ type: "scroll", delta: [ e.deltaX, e.deltaY ]})//this.scrollDelta }); + this.onMouseMove(e) + // this.scrollDelta = [0,0] + // }, 20) } private onMouseClick = (e: MouseEvent): void => { @@ -362,16 +379,27 @@ export default class AssistManager { if (!conn) { return; } const data = this.md.getInternalCoordinates(e); // const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager + const el = this.md.getElementFromInternalPoint(data) + if (el instanceof HTMLElement) { + el.focus() + el.oninput = e => e.preventDefault(); + el.onkeydown = e => e.preventDefault(); + } conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) }); } - private toggleRemoteControl = () => { - if (getState().remoteControl) { - this.md.overlay.removeEventListener("click", this.onMouseClick); - update({ remoteControl: false }) - } else { + private toggleRemoteControl = (flag?: boolean) => { + const state = getState().remoteControl; + const newState = typeof flag === 'boolean' ? flag : !state; + if (state === newState) { return } + if (newState) { this.md.overlay.addEventListener("click", this.onMouseClick); + this.md.overlay.addEventListener("wheel", this.onWheel) update({ remoteControl: true }) + } else { + this.md.overlay.removeEventListener("click", this.onMouseClick); + this.md.overlay.removeEventListener("wheel", this.onWheel); + update({ remoteControl: false }) } } @@ -390,6 +418,7 @@ export default class AssistManager { onStream, onCallEnd: () => { onCallEnd(); + this.toggleRemoteControl(false); this.md.overlay.removeEventListener("mousemove", this.onMouseMove); update({ calling: CallingState.False }); this.localCallData = null; From 29a7ce93742d5bb53bff6f5ce343264faaa7a95c Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 21:43:24 +0100 Subject: [PATCH 178/218] fix(frontend/player): DOMManager codefix --- .../MessageDistributor/managers/DOMManager.ts | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index e42041f91..c4a4b5ab0 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -120,16 +120,18 @@ export default class DOMManager extends ListWalker { private applyMessage = (msg: Message): void => { let node; + let doc: Document | null; switch (msg.tp) { case "create_document": - // @ts-ignore ?? - this.screen.document.open(); - // @ts-ignore ?? - this.screen.document.write(`${ msg.doctype || "" }`); - // @ts-ignore ?? - this.screen.document.close(); - // @ts-ignore ?? - const fRoot = this.screen.document.documentElement; + doc = this.screen.document; + if (!doc) { + logger.error("No iframe document found", msg) + return; + } + doc.open(); + doc.write(""); + doc.close(); + const fRoot = doc.documentElement; fRoot.innerText = ''; this.nl = [ fRoot ]; @@ -213,7 +215,7 @@ export default class DOMManager extends ListWalker { // @ts-ignore node.data = msg.data; if (node instanceof HTMLStyleElement) { - const doc = this.screen.document + doc = this.screen.document doc && rewriteNodeStyleSheet(doc, node) } break; @@ -255,7 +257,7 @@ export default class DOMManager extends ListWalker { } // await new Promise(resolve => { node.onload = resolve }) - const doc = node.contentDocument; + doc = node.contentDocument; if (!doc) { logger.warn("No iframe doc", msg, node, node.contentDocument); return; From 343bb1792d3425cdb97958c9966683c7c9068deb Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 21:56:52 +0100 Subject: [PATCH 179/218] fix(frontend-player): try-catch around insertRule and querySelector --- .../MessageDistributor/StatedScreen/Screen/BaseScreen.ts | 7 ++++++- .../app/player/MessageDistributor/managers/DOMManager.ts | 6 +++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts index f3e332aa7..e2cd635fd 100644 --- a/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts +++ b/frontend/app/player/MessageDistributor/StatedScreen/Screen/BaseScreen.ts @@ -128,7 +128,12 @@ export default abstract class BaseScreen { getElementBySelector(selector: string): Element | null { if (!selector) return null; - return this.document?.querySelector(selector) || null; + try { + return this.document?.querySelector(selector) || null; + } catch (e) { + console.error("Can not select element. ", e) + return null + } } display(flag: boolean = true) { diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index c4a4b5ab0..0db1a199d 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -231,7 +231,11 @@ export default class DOMManager extends ListWalker { node.sheet.insertRule(msg.rule, msg.index) } catch (e) { logger.warn(e, msg) - node.sheet.insertRule(msg.rule) + try { + node.sheet.insertRule(msg.rule) + } catch (e) { + logger.warn("Cannot insert rule.", e, msg) + } } break; case "css_delete_rule": From 354f091aaefa779834aa6a527c4f1e4b56471718 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Wed, 1 Dec 2021 22:24:41 +0100 Subject: [PATCH 180/218] feat(tracker-axios): 3.4.2: check if error is axios response & webpack 5 fully specified --- tracker/tracker-axios/package-lock.json | 12 ++++++------ tracker/tracker-axios/package.json | 8 ++++---- tracker/tracker-axios/src/index.ts | 18 +++++++++++++++--- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/tracker/tracker-axios/package-lock.json b/tracker/tracker-axios/package-lock.json index 244d16daf..9894d6bc8 100644 --- a/tracker/tracker-axios/package-lock.json +++ b/tracker/tracker-axios/package-lock.json @@ -57,9 +57,9 @@ } }, "@openreplay/tracker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.3.tgz", - "integrity": "sha512-50C2cwJFENeHNjXVV90uIA5YE1bxfGbhI8e76Nfw9Pg+GVN38DcvGhr3PJ3OKjioT9V4gXBbvtE/RDGRaJJWLA==", + "version": "3.4.9", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.9.tgz", + "integrity": "sha512-7w1ddAboWu6NN926ySMUsKG6kmlYM0BYelSRIPM1xdoddLMRKZT4XaggLYjFezNSi9UJ9WYI8qwMHFIkS9lhCQ==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -782,9 +782,9 @@ } }, "typescript": { - "version": "3.9.9", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", - "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", "dev": true }, "validate-npm-package-license": { diff --git a/tracker/tracker-axios/package.json b/tracker/tracker-axios/package.json index 0d6de4ca1..06d2b41a0 100644 --- a/tracker/tracker-axios/package.json +++ b/tracker/tracker-axios/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-axios", "description": "Tracker plugin for axios requests recording", - "version": "3.4.1", + "version": "3.4.2", "keywords": [ "axios", "logging", @@ -20,14 +20,14 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.8", "axios": "^0.21.2" }, "devDependencies": { - "@openreplay/tracker": "^3.0.0", + "@openreplay/tracker": "^3.4.9", "axios": "^0.21.2", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", - "typescript": "^3.6.4" + "typescript": "^4.6.0-dev.20211126" } } diff --git a/tracker/tracker-axios/src/index.ts b/tracker/tracker-axios/src/index.ts index 01eb79dca..f9d53e025 100644 --- a/tracker/tracker-axios/src/index.ts +++ b/tracker/tracker-axios/src/index.ts @@ -1,8 +1,8 @@ -import { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'; +import type { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'; import axios from 'axios'; import { App, Messages } from '@openreplay/tracker'; -import { getExceptionMessage } from '@openreplay/tracker/lib/modules/exception'; // TODO: export from tracker root -import { buildFullPath } from './url'; +import { getExceptionMessage } from '@openreplay/tracker/lib/modules/exception.js'; // TODO: export from tracker root +import { buildFullPath } from './url.js'; export interface Options { sessionTokenHeader?: string; @@ -12,6 +12,13 @@ export interface Options { ignoreHeaders: Array | boolean; } + +function isAxiosResponse(r: any): r is AxiosResponse { + return typeof r === "object" && + typeof r.config === "object" && + typeof r.status === "number" +} + export default function(opts: Partial = {}) { const options: Options = Object.assign( { @@ -145,6 +152,11 @@ export default function(opts: Partial = {}) { app.send(getExceptionMessage(error, [])); } + // TODO: common case (selector) + if (isAxiosResponse(error)) { + sendFetchMessage(error) + } + return Promise.reject(error); }); } From 1ce1203e04f3d8eb82a73470a590f859b0a9db0c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 12:49:45 +0100 Subject: [PATCH 181/218] feat(api): changed SAML helper feat(api): changed helper feat(api): changed EE /signup response --- api/chalicelib/utils/helper.py | 6 +++- .../chalicelib/blueprints/bp_core_dynamic.py | 31 ++++++++++--------- ee/api/chalicelib/blueprints/bp_saml.py | 1 - ee/api/chalicelib/utils/SAML2_helper.py | 3 ++ .../{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} | 0 5 files changed, 24 insertions(+), 17 deletions(-) rename ee/scripts/helm/db/init_dbs/clickhouse/{1.4.0/1.4.0.sql => 1.3.6/1.3.6.sql} (100%) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index ecc2b920a..1a743a57c 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -1,8 +1,8 @@ -import math import random import re import string +import math import requests local_prefix = 'local-' @@ -367,3 +367,7 @@ def get_internal_project_id(project_id64): def has_smtp(): return environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0 + + +def get_edition(): + return "foss" if is_free_open_source_edition() else "ee" diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index bc2a9c5ef..093ea13db 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -1,24 +1,22 @@ from chalice import Blueprint, Response from chalicelib import _overrides -from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integration_github, \ - integrations_manager +from chalicelib.core import assist +from chalicelib.core import boarding +from chalicelib.core import errors +from chalicelib.core import license +from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager +from chalicelib.core import notifications +from chalicelib.core import projects +from chalicelib.core import signup +from chalicelib.core import tenants +from chalicelib.core import users +from chalicelib.core import webhook +from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import captcha, SAML2_helper from chalicelib.utils import helper from chalicelib.utils.helper import environ -from chalicelib.core import tenants -from chalicelib.core import signup -from chalicelib.core import users -from chalicelib.core import projects -from chalicelib.core import errors -from chalicelib.core import notifications -from chalicelib.core import boarding -from chalicelib.core import webhook -from chalicelib.core import license -from chalicelib.core import assist -from chalicelib.core.collaboration_slack import Slack - app = Blueprint(__name__) _overrides.chalice_app(app) @@ -148,7 +146,10 @@ def put_client(context): @app.route('/signup', methods=['GET'], authorizer=None) def get_all_signup(): - return {"data": tenants.tenants_exists()} + return {"data": {"tenants": tenants.tenants_exists(), + "sso": SAML2_helper.is_saml2_available(), + "ssoProvider": SAML2_helper.get_saml2_provider(), + "edition": helper.get_edition()}} @app.route('/signup', methods=['POST', 'PUT'], authorizer=None) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 7af6f5c58..fc2c42ff4 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -22,7 +22,6 @@ def start_sso(): auth = init_saml_auth(req) sso_built_url = auth.login() return Response( - # status_code=301, status_code=307, body='', headers={'Location': sso_built_url, 'Content-Type': 'text/plain'}) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index 5c0c0af9e..eb5244d7c 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -102,3 +102,6 @@ def prepare_request(request): def is_saml2_available(): return idp is not None + +def get_saml2_provider(): + return "Okta" diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.4.0/1.4.0.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql similarity index 100% rename from ee/scripts/helm/db/init_dbs/clickhouse/1.4.0/1.4.0.sql rename to ee/scripts/helm/db/init_dbs/clickhouse/1.3.6/1.3.6.sql From 2e88642d77ecf91c9e0d45209424b7b7db9e020b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 13:32:25 +0100 Subject: [PATCH 182/218] feat(api): changed EE entrypoint for on the fly dev --- ee/api/.gitignore | 2 +- ee/api/entrypoint.sh | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 06eb982a9..f4fc2bc33 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -236,7 +236,7 @@ Pipfile /chalicelib/utils/TimeUTC.py /chalicelib/core/heatmaps.py /entrypoint.bundle.sh -/entrypoint.sh +#/entrypoint.sh /env_handler.py /chalicelib/blueprints/app/v1_api.py /build.sh diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh new file mode 100755 index 000000000..57ef8e42f --- /dev/null +++ b/ee/api/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/bash +python env_handler.py +#chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} +chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From 50f42253ce83926e45f8e31cd64126f21f8f4250 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 2 Dec 2021 12:33:59 +0530 Subject: [PATCH 183/218] fix(ui) - tenantKey display --- .../Client/ProfileSettings/TenantKey.js | 18 ++++++++---------- frontend/app/types/client/client.js | 1 + 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/frontend/app/components/Client/ProfileSettings/TenantKey.js b/frontend/app/components/Client/ProfileSettings/TenantKey.js index 8e76bdb43..e6c43591d 100644 --- a/frontend/app/components/Client/ProfileSettings/TenantKey.js +++ b/frontend/app/components/Client/ProfileSettings/TenantKey.js @@ -4,37 +4,35 @@ import { connect } from 'react-redux'; import styles from './profileSettings.css'; @connect(state => ({ - key: state.getIn([ 'user', 'client', 'tenantKey' ]), - loading: state.getIn([ 'user', 'updateAccountRequest', 'loading' ]) || - state.getIn([ 'user', 'putClientRequest', 'loading' ]), + tenantKey: state.getIn([ 'user', 'client', 'tenantKey' ]), })) export default class TenantKey extends React.PureComponent { state = { copied: false } copyHandler = () => { - const { key } = this.props; + const { tenantKey } = this.props; this.setState({ copied: true }); - copy(key); + copy(tenantKey); setTimeout(() => { this.setState({ copied: false }); }, 1000); }; render() { - const { key } = this.props; + const { tenantKey } = this.props; const { copied } = this.state; return (
- +
Date: Thu, 2 Dec 2021 16:46:34 +0530 Subject: [PATCH 184/218] change(ui) - metadata popup --- .../Client/ProfileSettings/ProfileSettings.js | 23 +++++++----- .../Session_/EventsBlock/Metadata/Metadata.js | 37 +++++++++++++------ 2 files changed, 39 insertions(+), 21 deletions(-) diff --git a/frontend/app/components/Client/ProfileSettings/ProfileSettings.js b/frontend/app/components/Client/ProfileSettings/ProfileSettings.js index c6fbda5ec..5df0d1aec 100644 --- a/frontend/app/components/Client/ProfileSettings/ProfileSettings.js +++ b/frontend/app/components/Client/ProfileSettings/ProfileSettings.js @@ -11,10 +11,11 @@ import { connect } from 'react-redux'; @withPageTitle('Account - OpenReplay Preferences') @connect(state => ({ account: state.getIn([ 'user', 'account' ]), + isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', })) export default class ProfileSettings extends React.PureComponent { render() { - const { account } = this.props; + const { account, isEnterprise } = this.props; return (
@@ -55,15 +56,19 @@ export default class ProfileSettings extends React.PureComponent {
-
+ { !isEnterprise && ( + <> +
+
+
+

{ 'Data Collection' }

+
{ 'Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.' }
+
+
+
+ + )} -
-
-

{ 'Data Collection' }

-
{ 'Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.' }
-
-
-
{ account.license && ( <>
diff --git a/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js b/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js index 6d24be57b..8b26459f3 100644 --- a/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js +++ b/frontend/app/components/Session_/EventsBlock/Metadata/Metadata.js @@ -1,6 +1,6 @@ import React, { useCallback, useState } from 'react'; import { connect } from 'react-redux'; -import { NoContent, IconButton } from 'UI'; +import { NoContent, IconButton, Popup } from 'UI'; import withToggle from 'HOCs/withToggle'; import MetadataItem from './MetadataItem'; import stl from './metadata.css'; @@ -9,19 +9,32 @@ export default connect(state => ({ metadata: state.getIn([ 'sessions', 'current', 'metadata' ]), }))(function Metadata ({ metadata }) { const [ visible, setVisible ] = useState(false); - const toggle = useCallback(() => setVisible(v => !v), []); + const toggle = useCallback(() => metadata.size > 0 && setVisible(v => !v), []); return ( <> - + } + content={ +
+ Metadata must be explicitly specified from the dashboard from Preferences > Metadata. +
+ } + on="click" + disabled={metadata.length > 0} + size="tiny" + inverted + position="top center" /> { visible &&
From b236cd50c859c614cf622a295c6a3a2b2e25f503 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 2 Dec 2021 19:54:34 +0530 Subject: [PATCH 185/218] feat(ui) - sso login --- frontend/app/Router.js | 2 +- frontend/app/components/Login/Login.js | 15 ++++++++++++--- frontend/app/duck/user.js | 9 +++++++-- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/frontend/app/Router.js b/frontend/app/Router.js index bbbd98c3c..643351d67 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -69,7 +69,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); organisation: state.getIn([ 'user', 'client', 'name' ]), tenantId: state.getIn([ 'user', 'client', 'tenantId' ]), tenants: state.getIn(['user', 'tenants']), - existingTenant: state.getIn(['user', 'existingTenant']), + existingTenant: state.getIn(['user', 'authDetails', 'tenants']), onboarding: state.getIn([ 'user', 'onboarding' ]) }; }, { diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index ba5a6a410..a1350000b 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -15,7 +15,8 @@ const recaptchaRef = React.createRef(); state => ({ errors: state.getIn([ 'user', 'loginRequest', 'errors' ]), loading: state.getIn([ 'user', 'loginRequest', 'loading' ]), - existingTenant: state.getIn(['user', 'existingTenant']) + // existingTenant: state.getIn(['user', 'authDetails', 'tenants']), + authDetails: state.getIn(['user', 'authDetails']), }), { login, }, ) @@ -45,7 +46,7 @@ export default class Login extends React.Component { write = ({ target: { value, name } }) => this.setState({ [ name ]: value }) render() { - const { errors, loading, existingTenant } = this.props; + const { errors, loading, authDetails } = this.props; return (
@@ -63,7 +64,7 @@ export default class Login extends React.Component {

Login to OpenReplay

- { !existingTenant &&
Don't have an account? Sign up
} + { !authDetails.tenants &&
Don't have an account? Sign up
}
{ window.ENV.CAPTCHA_ENABLED && ( @@ -126,6 +127,14 @@ export default class Login extends React.Component {
+ { authDetails.sso && ( + + )}
diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 5edd112ae..6f29ef945 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -31,7 +31,12 @@ const initialState = Map({ passwordRequestError: false, passwordErrors: List(), tenants: [], - existingTenant: true, + authDetails: Map({ + tenants: false, + sso: false, + ssoProvider: '', + edition: '' + }), onboarding: false }); @@ -70,7 +75,7 @@ const reducer = (state = initialState, action = {}) => { case FETCH_ACCOUNT.SUCCESS: return state.set('account', Account(action.data)).set('passwordErrors', List()); case FETCH_TENANTS.SUCCESS: - return state.set('existingTenant', action.data); + return state.set('authDetails', Map(action.data)); // return state.set('tenants', action.data.map(i => ({ text: i.name, value: i.tenantId}))); case UPDATE_PASSWORD.FAILURE: return state.set('passwordErrors', List(action.errors)) From b38d940ed029ac22ab5c83f566fee16edd908e2a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 16:24:18 +0100 Subject: [PATCH 186/218] feat(nginx): changed configmap to forward original protocol for api --- .../helm/nginx-ingress/nginx-ingress/templates/configmap.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index 422171a73..ab6c51997 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -61,6 +61,7 @@ data: proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "Upgrade"; proxy_set_header Host $host; + proxy_set_header X-Forwarded-Proto $scheme; proxy_pass http://chalice-openreplay.app.svc.cluster.local:8000; } location /assist/ { From 5c55bd10ed825651a4f035cdf50fe7095b3b1826 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 16:25:54 +0100 Subject: [PATCH 187/218] feat(api): SAML helper add /api to script_name --- ee/api/chalicelib/utils/SAML2_helper.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index eb5244d7c..43b6c6bc7 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -9,13 +9,13 @@ SAML2 = { "strict": True, "debug": True, "sp": { - "entityId": environ["SITE_URL"] + "/sso/saml2/metadata/", + "entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/", "assertionConsumerService": { - "url": environ["SITE_URL"] + "/sso/saml2/acs", + "url": environ["SITE_URL"] + "/api/sso/saml2/acs", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "singleLogoutService": { - "url": environ["SITE_URL"] + "/sso/saml2/sls", + "url": environ["SITE_URL"] + "/api/sso/saml2/sls", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", @@ -90,7 +90,7 @@ def prepare_request(request): 'https': 'on' if request.headers.get('x-forwarded-proto', 'http') == 'https' else 'off', 'http_host': request.headers['host'], 'server_port': url_data.port, - 'script_name': request.path, + 'script_name': "/api"+request.path, 'get_data': request.args.copy(), # Uncomment if using ADFS as IdP, https://github.com/onelogin/python-saml/pull/144 # 'lowercase_urlencoding': True, From 7a4977931d3c6bdd395101ab9c6171ff6e61d8a3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 17:36:47 +0100 Subject: [PATCH 188/218] feat(db): changed users structure for SSO feat(api): changed user's origin feat(api): support SSO idp_name feat(api): SSO ignore relay state feat(api): allow invite users if SSO is enabled --- .../chalicelib/blueprints/bp_core_dynamic.py | 4 +-- ee/api/chalicelib/blueprints/bp_saml.py | 32 +++++++++++-------- ee/api/chalicelib/utils/SAML2_helper.py | 5 +-- .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 16 ++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 7 ++-- scripts/helm/app/chalice.yaml | 1 + 6 files changed, 45 insertions(+), 20 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 093ea13db..444cd3e68 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -354,8 +354,8 @@ def get_members(context): @app.route('/client/members', methods=['PUT', 'POST']) def add_member(context): - if SAML2_helper.is_saml2_available(): - return {"errors": ["please use your SSO server to add teammates"]} + # if SAML2_helper.is_saml2_available(): + # return {"errors": ["please use your SSO server to add teammates"]} data = app.current_request.json_body return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index fc2c42ff4..c5f869359 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -1,6 +1,7 @@ from chalice import Blueprint from chalicelib import _overrides +from chalicelib.utils import SAML2_helper from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth app = Blueprint(__name__) @@ -9,7 +10,6 @@ _overrides.chalice_app(app) from chalicelib.utils.helper import environ from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request -from onelogin.saml2.utils import OneLogin_Saml2_Utils from chalice import Response from chalicelib.core import users, tenants @@ -54,22 +54,27 @@ def process_sso_assertion(): # session['samlSessionExpiration'] = auth.get_session_expiration() # print('>>>>') # print(session) - self_url = OneLogin_Saml2_Utils.get_self_url(req) - if 'RelayState' in request.form and self_url != request.form['RelayState']: - print("====>redirect") - return Response( - status_code=307, - body='', - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) + + # ---- ignore relay-state + # self_url = OneLogin_Saml2_Utils.get_self_url(req) + # if 'RelayState' in request.form and self_url != request.form['RelayState']: + # print("====>redirect to") + # print("====>redirect to") + # return Response( + # status_code=307, + # body='', + # headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) elif auth.get_settings().is_debug_active(): error_reason = auth.get_last_error_reason() return {"errors": [error_reason]} email = auth.get_nameid() + print("received nameId:") + print(email) existing = users.get_by_email_only(auth.get_nameid()) internal_id = next(iter(user_data.get("internalId", [])), None) - if len(existing) == 0 or existing[0].get("origin") != 'saml': + if len(existing) == 0 or existing[0].get("origin") is None: tenant_key = user_data.get("tenantKey", []) if len(tenant_key) == 0: print("tenantKey not present in assertion") @@ -86,15 +91,16 @@ def process_sso_assertion(): headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) if len(existing) == 0: print("== new user ==") - users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, origin='saml', + users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, + origin=SAML2_helper.get_saml2_provider(), name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), internal_id=internal_id) else: existing = existing[0] - if existing.get("origin") != 'saml': - print("== migrating user to SAML ==") + if existing.get("origin") is None: + print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") users.update(tenant_id=t['tenantId'], user_id=existing["id"], - changes={"origin": 'saml', "internal_id": internal_id}) + changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index 43b6c6bc7..acfb2df07 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -90,7 +90,7 @@ def prepare_request(request): 'https': 'on' if request.headers.get('x-forwarded-proto', 'http') == 'https' else 'off', 'http_host': request.headers['host'], 'server_port': url_data.port, - 'script_name': "/api"+request.path, + 'script_name': "/api" + request.path, 'get_data': request.args.copy(), # Uncomment if using ADFS as IdP, https://github.com/onelogin/python-saml/pull/144 # 'lowercase_urlencoding': True, @@ -103,5 +103,6 @@ def prepare_request(request): def is_saml2_available(): return idp is not None + def get_saml2_provider(): - return "Okta" + return environ.get("idp_name", "saml2") if is_saml2_available() else None diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index ed2ce7672..ff88ea913 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -60,5 +60,21 @@ FROM (SELECT tenant_id, role_id WHERE users.tenant_id = r.tenant_id AND users.role = 'member'; +DO +$$ + BEGIN + IF NOT EXISTS(SELECT 1 FROM pg_type WHERE typname = 'user_origin') THEN + CREATE TYPE user_origin AS ENUM ('saml'); + END IF; + END +$$; +ALTER TABLE public.users + ADD COLUMN IF NOT EXISTS origin user_origin NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS internal_id text NULL DEFAULT NULL; + + +ALTER TABLE public.users + ALTER COLUMN origin TYPE text; +DROP TYPE IF EXISTS user_origin; COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 947f1282e..aea01d85b 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -129,7 +129,7 @@ $$ ); CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); - CREATE TYPE user_origin AS ENUM ('saml'); + CREATE TABLE users ( user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, @@ -204,8 +204,9 @@ $$ jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT '{}'::jsonb, weekly_report boolean NOT NULL DEFAULT TRUE, - origin user_origin NULL DEFAULT NULL, - role_id integer REFERENCES roles (role_id) ON DELETE SET NULL + origin text NULL DEFAULT NULL, + role_id integer REFERENCES roles (role_id) ON DELETE SET NULL, + internal_id text NULL DEFAULT NULL ); diff --git a/scripts/helm/app/chalice.yaml b/scripts/helm/app/chalice.yaml index 98d580db9..807aaf646 100644 --- a/scripts/helm/app/chalice.yaml +++ b/scripts/helm/app/chalice.yaml @@ -63,4 +63,5 @@ env: idp_sso_url: '' idp_x509cert: '' idp_sls_url: '' + idp_name: '' assist_secret: '' \ No newline at end of file From 779d4fba44d363b20eb63ee62254d1a63e009e8c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 18:52:15 +0100 Subject: [PATCH 189/218] feat(api): user's SSO login check --- .../chalicelib/blueprints/bp_core_dynamic.py | 4 +--- ee/api/chalicelib/core/users.py | 19 +++++++++++++++---- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 444cd3e68..09be3df89 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -26,9 +26,7 @@ def login(): data = app.current_request.json_body if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): return {"errors": ["Invalid captcha."]} - r = users.authenticate(data['email'], data['password'], - for_plugin=False - ) + r = users.authenticate(data['email'], data['password'], for_plugin=False) if r is None: return Response(status_code=401, body={ 'errors': ['You’ve entered invalid Email or Password.'] diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 452cfc303..7bbcda8c9 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -4,7 +4,7 @@ import secrets from chalicelib.core import assist from chalicelib.core import authorizers, metadata, projects from chalicelib.core import tenants -from chalicelib.utils import dev +from chalicelib.utils import dev, SAML2_helper from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -293,7 +293,7 @@ def generate_new_api_key(user_id): def edit(user_id_to_update, tenant_id, changes, editor_id): - ALLOW_EDIT = ["name", "email", "admin", "appearance","roleId"] + ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"] user = get(user_id=user_id_to_update, tenant_id=tenant_id) if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: admin = get(tenant_id=tenant_id, user_id=editor_id) @@ -629,9 +629,20 @@ def authenticate(email, password, for_change_password=False, for_plugin=False): cur.execute(query) r = cur.fetchone() + if r is None and SAML2_helper.is_saml2_available(): + query = cur.mogrify( + f"""SELECT 1 + FROM public.users + WHERE users.email = %(email)s + AND users.deleted_at IS NULL + AND users.origin IS NOT NULL + LIMIT 1;""", + {"email": email}) + cur.execute(query) + if cur.fetchone() is not None: + return {"errors": ["must sign-in with SSO"]} + if r is not None: - if r["origin"] is not None: - return {"errors": ["must sign-in with SSO"]} if for_change_password: return True r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) From 1216ed74ad0d5e50a65ddc76c04c8797dbc8548b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 19:35:09 +0100 Subject: [PATCH 190/218] feat(api): changed SSO assertion handler feat(api): removed SLO endpoint --- ee/api/chalicelib/blueprints/bp_saml.py | 97 ++++++------------------- ee/api/chalicelib/core/users.py | 10 +-- 2 files changed, 29 insertions(+), 78 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index c5f869359..8267f3140 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -45,25 +45,6 @@ def process_sso_assertion(): if 'AuthNRequestID' in session: del session['AuthNRequestID'] user_data = auth.get_attributes() - # session['samlUserdata'] = user_data - # session['samlNameId'] = auth.get_nameid() - # session['samlNameIdFormat'] = auth.get_nameid_format() - # session['samlNameIdNameQualifier'] = auth.get_nameid_nq() - # session['samlNameIdSPNameQualifier'] = auth.get_nameid_spnq() - # session['samlSessionIndex'] = auth.get_session_index() - # session['samlSessionExpiration'] = auth.get_session_expiration() - # print('>>>>') - # print(session) - - # ---- ignore relay-state - # self_url = OneLogin_Saml2_Utils.get_self_url(req) - # if 'RelayState' in request.form and self_url != request.form['RelayState']: - # print("====>redirect to") - # print("====>redirect to") - # return Response( - # status_code=307, - # body='', - # headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) elif auth.get_settings().is_debug_active(): error_reason = auth.get_last_error_reason() return {"errors": [error_reason]} @@ -74,64 +55,34 @@ def process_sso_assertion(): existing = users.get_by_email_only(auth.get_nameid()) internal_id = next(iter(user_data.get("internalId", [])), None) - if len(existing) == 0 or existing[0].get("origin") is None: - tenant_key = user_data.get("tenantKey", []) - if len(tenant_key) == 0: - print("tenantKey not present in assertion") - return Response( - status_code=307, - body={"errors": ["tenantKey not present in assertion"]}, - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) - else: - t = tenants.get_by_tenant_key(tenant_key[0]) - if t is None: - return Response( - status_code=307, - body={"errors": ["Unknown tenantKey"]}, - headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'}) - if len(existing) == 0: - print("== new user ==") - users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, - origin=SAML2_helper.get_saml2_provider(), - name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), - internal_id=internal_id) - else: - existing = existing[0] - if existing.get("origin") is None: - print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") - users.update(tenant_id=t['tenantId'], user_id=existing["id"], - changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) + tenant_key = user_data.get("tenantKey", []) + if len(tenant_key) == 0: + print("tenantKey not present in assertion, please check your SP-assertion-configuration") + return {"errors": ["tenantKey not present in assertion, please check your SP-assertion-configuration"]} + else: + t = tenants.get_by_tenant_key(tenant_key[0]) + if t is None: + print("invalid tenantKey, please copy the correct value from Preferences > Account") + return {"errors": ["invalid tenantKey, please copy the correct value from Preferences > Account"]} + + if existing is None: + print("== new user ==") + users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, + origin=SAML2_helper.get_saml2_provider(), + name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), + internal_id=internal_id) + else: + if existing.get("origin") is None: + print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") + users.update(tenant_id=t['tenantId'], user_id=existing[0]["id"], + changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) + elif t['tenantId'] != existing["tenantId"]: + print("user exists for a different tenant") + return {"errors": ["user exists for a different tenant"]} return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) -@app.route('/sso/saml2/slo', methods=['GET']) -def process_slo_request(context): - req = prepare_request(request=app.current_request) - session = req["cookie"]["session"] - request = req['request'] - auth = init_saml_auth(req) - - name_id = session_index = name_id_format = name_id_nq = name_id_spnq = None - if 'samlNameId' in session: - name_id = session['samlNameId'] - if 'samlSessionIndex' in session: - session_index = session['samlSessionIndex'] - if 'samlNameIdFormat' in session: - name_id_format = session['samlNameIdFormat'] - if 'samlNameIdNameQualifier' in session: - name_id_nq = session['samlNameIdNameQualifier'] - if 'samlNameIdSPNameQualifier' in session: - name_id_spnq = session['samlNameIdSPNameQualifier'] - users.change_jwt_iat(context["userId"]) - return Response( - status_code=307, - body='', - headers={'Location': auth.logout(name_id=name_id, session_index=session_index, nq=name_id_nq, - name_id_format=name_id_format, - spnq=name_id_spnq), 'Content-Type': 'text/plain'}) - - @app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) def process_sls_assertion(): req = prepare_request(request=app.current_request) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 7bbcda8c9..f6f1f28c8 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -339,13 +339,13 @@ def get_by_email_only(email): (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, origin FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id - WHERE - users.email = %(email)s - AND users.deleted_at IS NULL;""", + WHERE users.email = %(email)s + AND users.deleted_at IS NULL + LIMIT 1;""", {"email": email}) ) - r = cur.fetchall() - return helper.list_to_camel_case(r) + r = cur.fetchone() + return helper.dict_to_camel_case(r) def get_by_email_reset(email, reset_token): From f5cf2051888000ddf1881e4d188ad849316ab058 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 20:08:07 +0100 Subject: [PATCH 191/218] feat(api): changed SSO assertion handler feat(api): changed members list to support SSO users --- ee/api/.gitignore | 2 +- ee/api/chalicelib/blueprints/bp_saml.py | 10 +++++----- ee/api/chalicelib/core/users.py | 2 +- ee/api/entrypoint.sh | 4 ---- 4 files changed, 7 insertions(+), 11 deletions(-) delete mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/.gitignore b/ee/api/.gitignore index f4fc2bc33..06eb982a9 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -236,7 +236,7 @@ Pipfile /chalicelib/utils/TimeUTC.py /chalicelib/core/heatmaps.py /entrypoint.bundle.sh -#/entrypoint.sh +/entrypoint.sh /env_handler.py /chalicelib/blueprints/app/v1_api.py /build.sh diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 8267f3140..5bb42fead 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -72,13 +72,13 @@ def process_sso_assertion(): name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), internal_id=internal_id) else: - if existing.get("origin") is None: - print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") - users.update(tenant_id=t['tenantId'], user_id=existing[0]["id"], - changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) - elif t['tenantId'] != existing["tenantId"]: + if t['tenantId'] != existing["tenantId"]: print("user exists for a different tenant") return {"errors": ["user exists for a different tenant"]} + if existing.get("origin") is None: + print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") + users.update(tenant_id=t['tenantId'], user_id=existing["id"], + changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index f6f1f28c8..baed9e487 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -388,7 +388,7 @@ def get_members(tenant_id): (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, DATE_PART('day',timezone('utc'::text, now()) \ - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, - basic_authentication.password IS NOT NULL AS joined, + basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined, invitation_token, role_id, roles.name AS role_name diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh deleted file mode 100755 index 57ef8e42f..000000000 --- a/ee/api/entrypoint.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -python env_handler.py -#chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} -chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From f45a9e511240eb12945dd55f4178680dcd17e64a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 20:31:37 +0100 Subject: [PATCH 192/218] feat(api): changed reset password EE --- ee/api/.chalice/config.json | 1 + ee/api/chalicelib/core/users.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 691429329..71548c9ce 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -61,6 +61,7 @@ "idp_sso_url": "", "idp_x509cert": "", "idp_sls_url": "", + "idp_name": "", "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", "iosBucket": "openreplay-ios-images", diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index baed9e487..a6584385d 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -337,7 +337,8 @@ def get_by_email_only(email): (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - origin + origin, + basic_authentication.password IS NOT NULL AS has_password FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.email = %(email)s AND users.deleted_at IS NULL From 50a3c448f529b05a6e2ea5aa60b2bc85404f697d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 20:32:13 +0100 Subject: [PATCH 193/218] feat(api): changed reset password EE --- ee/api/chalicelib/core/reset_password.py | 31 ++++++++---------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index 3a636c967..c1ac1fe65 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -1,9 +1,9 @@ -from chalicelib.utils import email_helper, captcha, helper from chalicelib.core import users +from chalicelib.utils import email_helper, captcha, helper -def step1(data): - print("====================== reset password 1 ===============") +def reset(data): + print("====================== reset password ===============") print(data) if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): print("error: Invalid captcha.") @@ -11,26 +11,15 @@ def step1(data): if "email" not in data: return {"errors": ["email not found in body"]} - a_users = users.get_by_email_only(data["email"]) - if len(a_users) > 1: - print(f"multiple users found for [{data['email']}] please contact our support") - return {"errors": ["multiple users, please contact our support"]} - elif len(a_users) == 1: - a_users = a_users[0] - invitation_link = users.generate_new_invitation(user_id=a_users["id"]) + a_user = users.get_by_email_only(data["email"]) + if a_user is not None: + # ---FOR SSO + if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False: + return {"errors": ["Please use your SSO to login"]} + # ---------- + invitation_link = users.generate_new_invitation(user_id=a_user["id"]) email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link) else: print(f"invalid email address [{data['email']}]") return {"errors": ["invalid email address"]} return {"data": {"state": "success"}} - -# def step2(data): -# print("====================== change password 2 ===============") -# user = users.get_by_email_reset(data["email"], data["code"]) -# if not user: -# print("error: wrong email or reset code") -# return {"errors": ["wrong email or reset code"]} -# users.update(tenant_id=user["tenantId"], user_id=user["id"], -# changes={"token": None, "password": data["password"], "generatedPassword": False, -# "verifiedEmail": True}) -# return {"data": {"state": "success"}} From 20104e8cd9a7bf9d12b33243cf9965eac39951fb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 2 Dec 2021 21:42:28 +0100 Subject: [PATCH 194/218] feat(api): changed SAML Helper --- ee/api/chalicelib/utils/SAML2_helper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index acfb2df07..06ab54f90 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -105,4 +105,5 @@ def is_saml2_available(): def get_saml2_provider(): - return environ.get("idp_name", "saml2") if is_saml2_available() else None + return environ.get("idp_name", "saml2") if is_saml2_available() and len( + environ.get("idp_name", "saml2")) > 0 else None From 15581360e35880db90f2648222411fd5168f61ae Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Thu, 2 Dec 2021 22:28:48 +0100 Subject: [PATCH 195/218] feat(frontend): support shadowDOM --- .../MessageDistributor/managers/DOMManager.ts | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index 0db1a199d..f226c1b4e 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -149,7 +149,7 @@ export default class DOMManager extends ListWalker { this.insertNode(msg); break; case "create_element_node": - // console.log('elementnode', msg) + // console.log('elementnode', msg) if (msg.svg) { this.nl[ msg.id ] = document.createElementNS('http://www.w3.org/2000/svg', msg.tag); } else { @@ -253,20 +253,27 @@ export default class DOMManager extends ListWalker { } break; case "create_i_frame_document": - // console.log('ifr', msg) node = this.nl[ msg.frameID ]; - if (!(node instanceof HTMLIFrameElement)) { - logger.warn("create_i_frame_document message. Node is not iframe") - return; - } - // await new Promise(resolve => { node.onload = resolve }) + // console.log('ifr', msg, node) - doc = node.contentDocument; - if (!doc) { - logger.warn("No iframe doc", msg, node, node.contentDocument); + if (node instanceof HTMLIFrameElement) { + doc = node.contentDocument; + if (!doc) { + logger.warn("No iframe doc", msg, node, node.contentDocument); + return; + } + this.nl[ msg.id ] = doc.documentElement return; + } else if (node instanceof Element) { // shadow DOM + try { + this.nl[ msg.id ] = node.attachShadow({ mode: 'open' }) + } catch(e) { + logger.warn("Can not attach shadow dom", e, msg) + } + } else { + logger.warn("Context message host is not Element", msg) } - this.nl[ msg.id ] = doc.documentElement + break; //not sure what to do with this one //case "disconnected": From 63bc981dc8418e2e0779341f891180dea98c492c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 11:19:15 +0100 Subject: [PATCH 196/218] feat(api): reset password check for SMTP --- api/chalicelib/core/reset_password.py | 5 +++-- ee/api/chalicelib/core/reset_password.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/reset_password.py b/api/chalicelib/core/reset_password.py index 0b7302d5d..e4ee1b61d 100644 --- a/api/chalicelib/core/reset_password.py +++ b/api/chalicelib/core/reset_password.py @@ -1,5 +1,5 @@ -from chalicelib.utils import email_helper, captcha, helper from chalicelib.core import users +from chalicelib.utils import email_helper, captcha, helper def reset(data): @@ -10,7 +10,8 @@ def reset(data): return {"errors": ["Invalid captcha."]} if "email" not in data: return {"errors": ["email not found in body"]} - + if not helper.has_smtp(): + return {"errors": ["no SMTP configuration found"]} a_users = users.get_by_email_only(data["email"]) if len(a_users) > 1: print(f"multiple users found for [{data['email']}] please contact our support") diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index c1ac1fe65..fe8cdd15c 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -10,7 +10,8 @@ def reset(data): return {"errors": ["Invalid captcha."]} if "email" not in data: return {"errors": ["email not found in body"]} - + if not helper.has_smtp(): + return {"errors": ["no SMTP configuration found"]} a_user = users.get_by_email_only(data["email"]) if a_user is not None: # ---FOR SSO From 8a1c05697f1b137928b2d4b1eb8bfdaf32c3e6da Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 12:02:50 +0100 Subject: [PATCH 197/218] feat(api): allow change password for double-auth --- api/chalicelib/core/reset_password.py | 2 +- ee/api/chalicelib/core/reset_password.py | 2 +- ee/api/chalicelib/core/users.py | 5 +++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/reset_password.py b/api/chalicelib/core/reset_password.py index e4ee1b61d..a8dbabf31 100644 --- a/api/chalicelib/core/reset_password.py +++ b/api/chalicelib/core/reset_password.py @@ -11,7 +11,7 @@ def reset(data): if "email" not in data: return {"errors": ["email not found in body"]} if not helper.has_smtp(): - return {"errors": ["no SMTP configuration found"]} + return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} a_users = users.get_by_email_only(data["email"]) if len(a_users) > 1: print(f"multiple users found for [{data['email']}] please contact our support") diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index fe8cdd15c..e51816e85 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -11,7 +11,7 @@ def reset(data): if "email" not in data: return {"errors": ["email not found in body"]} if not helper.has_smtp(): - return {"errors": ["no SMTP configuration found"]} + return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} a_user = users.get_by_email_only(data["email"]) if a_user is not None: # ---FOR SSO diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index a6584385d..8b5ae591b 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -261,7 +261,8 @@ def get(user_id, tenant_id): origin, role_id, roles.name AS role_name, - roles.permissions + roles.permissions, + basic_authentication.password IS NOT NULL AS has_password FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) WHERE @@ -446,7 +447,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): item = get(tenant_id=tenant_id, user_id=user_id) if item is None: return {"errors": ["access denied"]} - if item["origin"] is not None: + if item["origin"] is not None and item["hasPassword"] is False: return {"errors": ["cannot change your password because you are logged-in form an SSO service"]} if old_password == new_password: return {"errors": ["old and new password are the same"]} From ecd631345b7a6e7fca6c239a9532c1d8ec71fe5b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 12:16:41 +0100 Subject: [PATCH 198/218] feat(api): SSO log received TTL --- ee/api/chalicelib/blueprints/bp_saml.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 5bb42fead..47ee1381c 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -79,8 +79,10 @@ def process_sso_assertion(): print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==") users.update(tenant_id=t['tenantId'], user_id=existing["id"], changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) - - return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration()) + expiration = auth.get_session_expiration() + print("TTL:") + print(auth.get_session_expiration()) + return users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration) @app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) From c97c1ada294b88ff3ed9d7b00645686b06df9c27 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Dec 2021 15:20:01 +0530 Subject: [PATCH 199/218] feat(ui) - sso login jwt --- frontend/app/components/Login/Login.js | 18 +++++++++++++++--- frontend/app/duck/jwt.js | 7 +++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index a1350000b..53ae9e7a6 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -4,29 +4,41 @@ import { Icon, Loader, Button, Link } from 'UI'; import { login } from 'Duck/user'; import { forgotPassword, signup } from 'App/routes'; import ReCAPTCHA from 'react-google-recaptcha'; +import { withRouter } from 'react-router-dom'; import stl from './login.css'; import cn from 'classnames'; +import { setJwt } from 'Duck/jwt'; const FORGOT_PASSWORD = forgotPassword(); const SIGNUP_ROUTE = signup(); const recaptchaRef = React.createRef(); @connect( - state => ({ + (state, props) => ({ errors: state.getIn([ 'user', 'loginRequest', 'errors' ]), loading: state.getIn([ 'user', 'loginRequest', 'loading' ]), - // existingTenant: state.getIn(['user', 'authDetails', 'tenants']), authDetails: state.getIn(['user', 'authDetails']), + params: new URLSearchParams(props.location.search) }), - { login, }, + { login, setJwt }, ) @withPageTitle('Login - OpenReplay') +@withRouter export default class Login extends React.Component { state = { email: '', password: '', }; + componentDidMount() { + const { params } = this.props; + const jwt = params.get('jwt') + if (jwt) { + this.props.setJwt(jwt); + window.location.href = '/'; + } + } + handleSubmit = (token) => { const { email, password } = this.state; this.props.login({ email: email.trim(), password, 'g-recaptcha-response': token }).then(() => { diff --git a/frontend/app/duck/jwt.js b/frontend/app/duck/jwt.js index 946dcaa6a..4d4147f34 100644 --- a/frontend/app/duck/jwt.js +++ b/frontend/app/duck/jwt.js @@ -10,3 +10,10 @@ export default (state = null, action = {}) => { } return state; }; + +export function setJwt(data) { + return { + type: UPDATE, + data, + }; +} From 5ebfd8ae6a22afafeac1b9ea738ff99ac0e5d574 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 12:48:13 +0100 Subject: [PATCH 200/218] feat(api): SSO landing page --- ee/api/.chalice/config.json | 2 ++ ee/api/chalicelib/blueprints/bp_saml.py | 14 +++++++++----- ee/api/chalicelib/core/users.py | 12 ++++-------- ee/api/chalicelib/utils/SAML2_helper.py | 4 ++++ 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 71548c9ce..81c63add8 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -62,6 +62,8 @@ "idp_x509cert": "", "idp_sls_url": "", "idp_name": "", + "sso_exp_delta_seconds": "86400", + "sso_landing": "/login?jwt=%s", "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", "iosBucket": "openreplay-ios-images", diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 47ee1381c..4a0f057df 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -31,7 +31,6 @@ def start_sso(): def process_sso_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] - request = req['request'] auth = init_saml_auth(req) request_id = None @@ -80,16 +79,21 @@ def process_sso_assertion(): users.update(tenant_id=t['tenantId'], user_id=existing["id"], changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) expiration = auth.get_session_expiration() - print("TTL:") - print(auth.get_session_expiration()) - return users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration) + expiration = expiration if expiration is not None and expiration > 10 * 60 \ + else int(environ.get("sso_exp_delta_seconds", 24 * 60 * 60)) + jwt = users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration) + if jwt is None: + return {"errors": ["null JWT"]} + return Response( + status_code=307, + body='', + headers={'Location': SAML2_helper.get_landing_URL(jwt), 'Content-Type': 'text/plain'}) @app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) def process_sls_assertion(): req = prepare_request(request=app.current_request) session = req["cookie"]["session"] - request = req['request'] auth = init_saml_auth(req) request_id = None if 'LogoutRequestID' in session: diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 8b5ae591b..f6609aae4 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -690,14 +690,10 @@ def authenticate_sso(email, internal_id, exp=None): RETURNING jwt_iat;""", {"user_id": r["id"]}) cur.execute(query) - return { - "jwt": authorizers.generate_jwt(r['id'], r['tenantId'], - TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), - aud=f"front:{helper.get_stage_name()}", - exp=exp), - "email": email, - **r - } + return authorizers.generate_jwt(r['id'], r['tenantId'], + TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), + aud=f"front:{helper.get_stage_name()}", + exp=exp) return None diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index 06ab54f90..25f279d3a 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -107,3 +107,7 @@ def is_saml2_available(): def get_saml2_provider(): return environ.get("idp_name", "saml2") if is_saml2_available() and len( environ.get("idp_name", "saml2")) > 0 else None + + +def get_landing_URL(jwt): + return environ["SITE_URL"] + environ.get("sso_landing", "/login?jwt=%s") % jwt From 8329f5c3d2c9e0c760ac940a74826fd68518289d Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Dec 2021 18:07:54 +0530 Subject: [PATCH 201/218] feat(ui) - sso login jwt --- frontend/app/components/Login/Login.js | 1 + frontend/app/duck/user.js | 9 ++------- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index 53ae9e7a6..183704ae4 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -59,6 +59,7 @@ export default class Login extends React.Component { render() { const { errors, loading, authDetails } = this.props; + return (
diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 6f29ef945..13ff44af5 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -31,12 +31,7 @@ const initialState = Map({ passwordRequestError: false, passwordErrors: List(), tenants: [], - authDetails: Map({ - tenants: false, - sso: false, - ssoProvider: '', - edition: '' - }), + authDetails: {}, onboarding: false }); @@ -75,7 +70,7 @@ const reducer = (state = initialState, action = {}) => { case FETCH_ACCOUNT.SUCCESS: return state.set('account', Account(action.data)).set('passwordErrors', List()); case FETCH_TENANTS.SUCCESS: - return state.set('authDetails', Map(action.data)); + return state.set('authDetails', action.data); // return state.set('tenants', action.data.map(i => ({ text: i.name, value: i.tenantId}))); case UPDATE_PASSWORD.FAILURE: return state.set('passwordErrors', List(action.errors)) From 266bc2ae724618db5dad60250d4f51755ddef2af Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 14:43:03 +0100 Subject: [PATCH 202/218] feat(api): allo on the fly editing --- ee/api/entrypoint.sh | 3 +++ 1 file changed, 3 insertions(+) create mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh new file mode 100755 index 000000000..0b87bc311 --- /dev/null +++ b/ee/api/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/bash +python env_handler.py +chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From 0b94b629c0fe85ee7a6f3553af40f745b5ac0c98 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Dec 2021 19:51:41 +0530 Subject: [PATCH 203/218] change(ui) - versions and login sso separator --- frontend/app/components/Login/Login.js | 2 +- frontend/app/components/Login/login.css | 4 ++++ frontend/env.js | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index 183704ae4..33074a390 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -141,7 +141,7 @@ export default class Login extends React.Component {
{ authDetails.sso && ( -
+
or
diff --git a/frontend/app/components/Login/login.css b/frontend/app/components/Login/login.css index 4ab843f18..04a0768c7 100644 --- a/frontend/app/components/Login/login.css +++ b/frontend/app/components/Login/login.css @@ -144,4 +144,8 @@ > & label { margin-bottom: 10px !important; } +} + +.sso { + border-top: solid thin $gray-light; } \ No newline at end of file diff --git a/frontend/env.js b/frontend/env.js index ee5ba5c04..f17a4d3dc 100644 --- a/frontend/env.js +++ b/frontend/env.js @@ -20,7 +20,7 @@ const oss = { MINIO_USE_SSL: process.env.MINIO_USE_SSL, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, - TRACKER_VERSION: '3.2.0', // trackerInfo.version, + TRACKER_VERSION: '3.4.10', // trackerInfo.version, } module.exports = { From c6157cee4ff59445fe580fb5ac6627c73c37a0c5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 15:27:13 +0100 Subject: [PATCH 204/218] feat(api): disable on the fly edit feat(api): fixed redirect response feat(api): fixed jwt expiration date --- ee/api/chalicelib/blueprints/bp_saml.py | 2 +- ee/api/chalicelib/core/users.py | 3 ++- ee/api/entrypoint.sh | 3 --- 3 files changed, 3 insertions(+), 5 deletions(-) delete mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 4a0f057df..79adde8c7 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -85,7 +85,7 @@ def process_sso_assertion(): if jwt is None: return {"errors": ["null JWT"]} return Response( - status_code=307, + status_code=302, body='', headers={'Location': SAML2_helper.get_landing_URL(jwt), 'Content-Type': 'text/plain'}) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index f6609aae4..80fd5f0de 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -693,7 +693,8 @@ def authenticate_sso(email, internal_id, exp=None): return authorizers.generate_jwt(r['id'], r['tenantId'], TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), aud=f"front:{helper.get_stage_name()}", - exp=exp) + exp=exp + TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]) \ + if exp is not None else None) return None diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh deleted file mode 100755 index 0b87bc311..000000000 --- a/ee/api/entrypoint.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -python env_handler.py -chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From 34516abf9a6244e7a9b86143d2667ad29421823d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 15:33:36 +0100 Subject: [PATCH 205/218] feat(api): fixed jwt expiration date --- ee/api/chalicelib/core/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 80fd5f0de..4a26e5282 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -693,7 +693,7 @@ def authenticate_sso(email, internal_id, exp=None): return authorizers.generate_jwt(r['id'], r['tenantId'], TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), aud=f"front:{helper.get_stage_name()}", - exp=exp + TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]) \ + exp=(exp + TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"])) \ if exp is not None else None) return None From 232354d2fe2915a7ab79f1b58bea3e09a986f79e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 15:40:25 +0100 Subject: [PATCH 206/218] feat(api): fixed null object --- ee/api/chalicelib/core/users.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 4a26e5282..6187f1aa1 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -690,10 +690,11 @@ def authenticate_sso(email, internal_id, exp=None): RETURNING jwt_iat;""", {"user_id": r["id"]}) cur.execute(query) + r = cur.fetchone() return authorizers.generate_jwt(r['id'], r['tenantId'], - TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), + TimeUTC.datetime_to_timestamp(r["jwt_iat"]), aud=f"front:{helper.get_stage_name()}", - exp=(exp + TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"])) \ + exp=(exp + TimeUTC.datetime_to_timestamp(r["jwt_iat"])) \ if exp is not None else None) return None From 10a3e9fac5fa64044064d838b737af0ac0b09b95 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 15:46:28 +0100 Subject: [PATCH 207/218] feat(api): changed JWT --- ee/api/chalicelib/core/users.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 6187f1aa1..97a90960e 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -690,12 +690,10 @@ def authenticate_sso(email, internal_id, exp=None): RETURNING jwt_iat;""", {"user_id": r["id"]}) cur.execute(query) - r = cur.fetchone() + rt = TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]) return authorizers.generate_jwt(r['id'], r['tenantId'], - TimeUTC.datetime_to_timestamp(r["jwt_iat"]), - aud=f"front:{helper.get_stage_name()}", - exp=(exp + TimeUTC.datetime_to_timestamp(r["jwt_iat"])) \ - if exp is not None else None) + rt, aud=f"front:{helper.get_stage_name()}", + exp=(exp + rt) if exp is not None else None) return None From eaa515e0f21faeb0f3042394b28e16378d1266d8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 15:54:01 +0100 Subject: [PATCH 208/218] feat(api): changed JWT exp --- ee/api/chalicelib/core/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 97a90960e..473c0aff0 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -693,7 +693,7 @@ def authenticate_sso(email, internal_id, exp=None): rt = TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]) return authorizers.generate_jwt(r['id'], r['tenantId'], rt, aud=f"front:{helper.get_stage_name()}", - exp=(exp + rt) if exp is not None else None) + exp=(exp + rt // 1000) if exp is not None else None) return None From 31234413397f7f2d1e2ebc150f4fe3e3cf504107 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 16:12:57 +0100 Subject: [PATCH 209/218] feat(api): changed JWT iat --- ee/api/chalicelib/core/authorizers.py | 8 ++++---- ee/api/chalicelib/core/users.py | 13 +++---------- ee/api/entrypoint.sh | 3 +++ 3 files changed, 10 insertions(+), 14 deletions(-) create mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index f7f50f52b..ea326c2a1 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -1,10 +1,10 @@ -from chalicelib.utils.helper import environ import jwt -from chalicelib.utils import helper -from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.core import tenants from chalicelib.core import users +from chalicelib.utils import helper +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.helper import environ def jwt_authorizer(token): @@ -44,7 +44,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None): "userId": id, "tenantId": tenant_id, "exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000 \ - if exp is None else exp, + if exp is None else exp + TimeUTC.get_utc_offset() // 1000, "iss": environ["jwt_issuer"], "iat": iat // 1000, "aud": aud diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 473c0aff0..a3516ae29 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -683,17 +683,10 @@ def authenticate_sso(email, internal_id, exp=None): if r is not None: r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) - query = cur.mogrify( - f"""UPDATE public.users - SET jwt_iat = timezone('utc'::text, now()) - WHERE user_id = %(user_id)s - RETURNING jwt_iat;""", - {"user_id": r["id"]}) - cur.execute(query) - rt = TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]) + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) return authorizers.generate_jwt(r['id'], r['tenantId'], - rt, aud=f"front:{helper.get_stage_name()}", - exp=(exp + rt // 1000) if exp is not None else None) + jwt_iat, aud=f"front:{helper.get_stage_name()}", + exp=(exp + jwt_iat // 1000) if exp is not None else None) return None diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh new file mode 100755 index 000000000..0b87bc311 --- /dev/null +++ b/ee/api/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/bash +python env_handler.py +chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From fa57e2fe74ed353ee53f002eaae64e22d3ea2514 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 16:44:07 +0100 Subject: [PATCH 210/218] feat(api): changed SSO new user handler --- ee/api/chalicelib/core/users.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index a3516ae29..801cb6ae2 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -681,12 +681,12 @@ def authenticate_sso(email, internal_id, exp=None): cur.execute(query) r = cur.fetchone() - if r is not None: - r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) - jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) - return authorizers.generate_jwt(r['id'], r['tenantId'], - jwt_iat, aud=f"front:{helper.get_stage_name()}", - exp=(exp + jwt_iat // 1000) if exp is not None else None) + if r is not None: + r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id'])) + return authorizers.generate_jwt(r['id'], r['tenantId'], + jwt_iat, aud=f"front:{helper.get_stage_name()}", + exp=(exp + jwt_iat // 1000) if exp is not None else None) return None @@ -697,6 +697,10 @@ def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None): INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id) VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s) RETURNING * + ), + au AS ( + INSERT INTO public.basic_authentication(user_id) + VALUES ((SELECT user_id FROM u)) ) SELECT u.user_id AS id, u.email, From 84de52a178fe1af4fded47ab41282c35a4e93ee3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 16:54:28 +0100 Subject: [PATCH 211/218] feat(api): changed SSO exp delta --- ee/api/.chalice/config.json | 2 +- ee/api/entrypoint.sh | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) delete mode 100755 ee/api/entrypoint.sh diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 81c63add8..db58c76ba 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -62,7 +62,7 @@ "idp_x509cert": "", "idp_sls_url": "", "idp_name": "", - "sso_exp_delta_seconds": "86400", + "sso_exp_delta_seconds": "172800", "sso_landing": "/login?jwt=%s", "invitation_link": "/api/users/invitation?token=%s", "change_password_link": "/reset-password?invitation=%s&&pass=%s", diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh deleted file mode 100755 index 0b87bc311..000000000 --- a/ee/api/entrypoint.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -python env_handler.py -chalice local --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} From db4b2c5a6206818c3ccf7d5681810d6888f11ed6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 17:44:56 +0100 Subject: [PATCH 212/218] feat(api): changed roles permissions --- ee/api/chalicelib/core/signup.py | 5 ++--- ee/api/chalicelib/core/users.py | 2 +- .../db/init_dbs/postgresql/1.3.6/1.3.6.sql | 18 +++++------------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 9aa99f994..4650736a5 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -70,9 +70,8 @@ def create_step1(data): ), r AS ( INSERT INTO public.roles(tenant_id, name, description, permissions, protected) - VALUES ((SELECT tenant_id FROM t), 'Owner', 'The company''s owner', '{}'::text[], TRUE), - ((SELECT tenant_id FROM t), 'Admin', 'Admin member', '{}'::text[], TRUE), - ((SELECT tenant_id FROM t), 'Member', 'A member', '{}'::text[], TRUE) + VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE), + ((SELECT tenant_id FROM t), 'Member', 'Member', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE) RETURNING * ), u AS ( diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 801cb6ae2..b3353f2fc 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -448,7 +448,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): if item is None: return {"errors": ["access denied"]} if item["origin"] is not None and item["hasPassword"] is False: - return {"errors": ["cannot change your password because you are logged-in form an SSO service"]} + return {"errors": ["cannot change your password because you are logged-in from an SSO service"]} if old_password == new_password: return {"errors": ["old and new password are the same"]} auth = authenticate(email, old_password, for_change_password=True) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql index ff88ea913..15ce18803 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.3.6/1.3.6.sql @@ -24,9 +24,10 @@ CREATE TABLE roles INSERT INTO roles(tenant_id, name, description, permissions, protected) SELECT * FROM (SELECT tenant_id FROM tenants) AS tenants, - (VALUES ('Owner', 'The company''s owner', '{}'::text[], TRUE), - ('Admin', 'Admin member', '{}'::text[], TRUE), - ('Member', 'A member', '{}'::text[], TRUE) + (VALUES ('Owner', 'Owner', + '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE), + ('Member', 'Member', + '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE) ) AS default_roles(name, description, permissions, protected); @@ -42,15 +43,6 @@ FROM (SELECT tenant_id, role_id WHERE users.tenant_id = r.tenant_id AND users.role = 'owner'; -UPDATE users -SET role_id = r.role_id -FROM (SELECT tenant_id, role_id - FROM tenants - INNER JOIN roles USING (tenant_id) - WHERE roles.name = 'Admin') AS r(tenant_id, role_id) -WHERE users.tenant_id = r.tenant_id - AND users.role = 'admin'; - UPDATE users SET role_id = r.role_id FROM (SELECT tenant_id, role_id @@ -58,7 +50,7 @@ FROM (SELECT tenant_id, role_id INNER JOIN roles USING (tenant_id) WHERE roles.name = 'Member') AS r(tenant_id, role_id) WHERE users.tenant_id = r.tenant_id - AND users.role = 'member'; + AND users.role != 'owner'; DO $$ From 63946ad777b237d98c208fc5fa6e575e15985eb2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Dec 2021 18:24:50 +0100 Subject: [PATCH 213/218] feat(api): add role to SSO assertion --- ee/api/chalicelib/blueprints/bp_saml.py | 14 +++++++++++--- ee/api/chalicelib/core/roles.py | 19 ++++++++++++++++++- ee/api/chalicelib/core/users.py | 8 ++++---- 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/chalicelib/blueprints/bp_saml.py index 79adde8c7..76e73b3b7 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/chalicelib/blueprints/bp_saml.py @@ -12,7 +12,7 @@ from chalicelib.utils.helper import environ from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request from chalice import Response -from chalicelib.core import users, tenants +from chalicelib.core import users, tenants, roles @app.route('/sso/saml2', methods=['GET'], authorizer=None) @@ -63,13 +63,21 @@ def process_sso_assertion(): if t is None: print("invalid tenantKey, please copy the correct value from Preferences > Account") return {"errors": ["invalid tenantKey, please copy the correct value from Preferences > Account"]} - + print(user_data) + role_name = user_data.get("role", []) + if len(role_name) == 0: + print("No role specified, setting role to member") + role_name = ["member"] + role_name = role_name[0] + role = roles.get_role_by_name(tenant_id=t['tenantId'], name=role_name) + if role is None: + return {"errors": [f"role {role_name} not found, please create it in openreplay first"]} if existing is None: print("== new user ==") users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, origin=SAML2_helper.get_saml2_provider(), name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])), - internal_id=internal_id) + internal_id=internal_id, role_id=role["roleId"]) else: if t['tenantId'] != existing["tenantId"]: print("user exists for a different tenant") diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index 0a32d9d5e..8ba62091a 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -46,7 +46,7 @@ def create(tenant_id, user_id, name, description, permissions): RETURNING *;""", {"tenant_id": tenant_id, "name": name, "description": description, "permissions": permissions}) ) - row=cur.fetchone() + row = cur.fetchone() row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) return helper.dict_to_camel_case(row) @@ -67,6 +67,23 @@ def get_roles(tenant_id): return helper.list_to_camel_case(rows) +def get_role_by_name(tenant_id, name): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""SELECT * + FROM public.roles + where tenant_id =%(tenant_id)s + AND deleted_at IS NULL + AND name ILIKE %(name)s + ;""", + {"tenant_id": tenant_id, "name": name}) + ) + row = cur.fetchone() + if row is not None: + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) + + def delete(tenant_id, user_id, role_id): admin = users.get(user_id=user_id, tenant_id=tenant_id) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index b3353f2fc..ad757c9c8 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -690,12 +690,12 @@ def authenticate_sso(email, internal_id, exp=None): return None -def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None): +def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ WITH u AS ( - INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id) - VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s) + INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id, role_id) + VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s, %(role_id)s) RETURNING * ), au AS ( @@ -715,7 +715,7 @@ def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None): FROM u;""", {"tenantId": tenant_id, "email": email, "internal_id": internal_id, "role": "admin" if admin else "member", "name": name, "origin": origin, - "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}) + "role_id": role_id, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}) cur.execute( query ) From ee077936898547b3de81c694bab2e6c5000e8ddb Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Dec 2021 21:45:38 +0530 Subject: [PATCH 214/218] fix(ui) - fetch roles in manage users --- frontend/app/components/Client/ManageUsers/ManageUsers.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/Client/ManageUsers/ManageUsers.js b/frontend/app/components/Client/ManageUsers/ManageUsers.js index 04902757e..ccac283dd 100644 --- a/frontend/app/components/Client/ManageUsers/ManageUsers.js +++ b/frontend/app/components/Client/ManageUsers/ManageUsers.js @@ -5,6 +5,7 @@ import { IconButton, SlideModal, Input, Button, Loader, NoContent, Popup, CopyButton, Dropdown } from 'UI'; import { init, save, edit, remove as deleteMember, fetchList, generateInviteLink } from 'Duck/member'; +import { fetchList as fetchRoles } from 'Duck/roles'; import styles from './manageUsers.css'; import UserItem from './UserItem'; import { confirm } from 'UI/Confirmation'; @@ -29,7 +30,8 @@ const LIMIT_WARNING = 'You have reached users limit.'; edit, deleteMember, fetchList, - generateInviteLink + generateInviteLink, + fetchRoles }) @withPageTitle('Users - OpenReplay Preferences') class ManageUsers extends React.PureComponent { @@ -42,6 +44,7 @@ class ManageUsers extends React.PureComponent { closeModal = () => this.setState({ showModal: false }); componentWillMount = () => { this.props.fetchList(); + this.props.fetchRoles(); } adminLabel = (user) => { From 407931569edc151859058ff5b4f21254d466540d Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Dec 2021 21:48:18 +0530 Subject: [PATCH 215/218] fix(ui) - typo --- frontend/app/duck/roles.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/duck/roles.js b/frontend/app/duck/roles.js index abc3ce0f4..f1dd1ecec 100644 --- a/frontend/app/duck/roles.js +++ b/frontend/app/duck/roles.js @@ -10,7 +10,7 @@ const initialState = Map({ list: List(), permissions: List([ { name: 'Session Replay', value: 'SESSION_REPLAY' }, - { name: 'Develoepr Tools', value: 'DEV_TOOLS' }, + { name: 'Developer Tools', value: 'DEV_TOOLS' }, { name: 'Errors', value: 'ERRORS' }, { name: 'Metrics', value: 'METRICS' }, { name: 'Assist Live', value: 'ASSIST_LIVE' }, From 85ffa6fd8518318b510ece395298493726f3b550 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Sat, 4 Dec 2021 00:36:31 +0530 Subject: [PATCH 216/218] fix(ui) - roles review --- .../Client/ManageUsers/ManageUsers.js | 6 ++-- .../Client/PreferencesMenu/PreferencesMenu.js | 1 - .../app/components/Client/Roles/Roles.tsx | 30 +++++++++++++++---- .../Roles/components/RoleForm/RoleForm.tsx | 1 + .../Roles/components/RoleItem/RoleItem.tsx | 21 +++++++++++-- .../Roles/components/RoleItem/roleItem.css | 10 +++++++ frontend/app/types/member.js | 1 + 7 files changed, 57 insertions(+), 13 deletions(-) diff --git a/frontend/app/components/Client/ManageUsers/ManageUsers.js b/frontend/app/components/Client/ManageUsers/ManageUsers.js index ccac283dd..cd634a8cd 100644 --- a/frontend/app/components/Client/ManageUsers/ManageUsers.js +++ b/frontend/app/components/Client/ManageUsers/ManageUsers.js @@ -22,7 +22,7 @@ const LIMIT_WARNING = 'You have reached users limit.'; errors: state.getIn([ 'members', 'saveRequest', 'errors' ]), loading: state.getIn([ 'members', 'loading' ]), saving: state.getIn([ 'members', 'saveRequest', 'loading' ]), - roles: state.getIn(['roles', 'list']).map(r => ({ text: r.name, value: r.roleId })).toJS(), + roles: state.getIn(['roles', 'list']).filter(r => !r.protected).map(r => ({ text: r.name, value: r.roleId })).toJS(), isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee', }), { init, @@ -127,7 +127,7 @@ class ManageUsers extends React.PureComponent { onChange={ this.onChangeCheckbox } disabled={member.superAdmin} /> - { 'Admin' } + { 'Admin Privileges' }
{ 'Can manage Projects and team members.' }
@@ -220,7 +220,7 @@ class ManageUsers extends React.PureComponent { />
} - // disabled={ canAddUsers } + disabled={ canAddUsers } content={ `${ !canAddUsers ? (!isAdmin ? PERMISSION_WARNING : LIMIT_WARNING) : 'Add team member' }` } size="tiny" inverted diff --git a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js index f139afbe9..fa3d51db9 100644 --- a/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js +++ b/frontend/app/components/Client/PreferencesMenu/PreferencesMenu.js @@ -50,7 +50,6 @@ function PreferencesMenu({ activeTab, appearance, history, isEnterprise }) { {
- void, fetchList: () => Promise, + account: any, + permissionsMap: any } function Roles(props: Props) { - const { loading, instance, roles, init, edit, deleteRole } = props + const { loading, instance, roles, init, edit, deleteRole, account, permissionsMap } = props const [showModal, setShowmModal] = useState(false) + const isAdmin = account.admin || account.superAdmin; + + console.log('permissionsMap', permissionsMap) + useEffect(() => { props.fetchList() @@ -69,10 +75,12 @@ function Roles(props: Props) { circle icon="plus" outline + disabled={ !isAdmin } onClick={ () => setShowmModal(true) } />
} + disabled={ isAdmin } size="tiny" inverted position="top left" @@ -90,6 +98,7 @@ function Roles(props: Props) { {roles.map(role => ( @@ -102,8 +111,17 @@ function Roles(props: Props) { ) } -export default connect(state => ({ - instance: state.getIn(['roles', 'instance']) || null, - roles: state.getIn(['roles', 'list']), - loading: state.getIn(['roles', 'fetchRequest', 'loading']), -}), { init, edit, fetchList, deleteRole })(Roles) \ No newline at end of file +export default connect(state => { + const permissions = state.getIn(['roles', 'permissions']) + const permissionsMap = {} + permissions.forEach(p => { + permissionsMap[p.value] = p.name + }); + return { + instance: state.getIn(['roles', 'instance']) || null, + permissionsMap: permissionsMap, + roles: state.getIn(['roles', 'list']), + loading: state.getIn(['roles', 'fetchRequest', 'loading']), + account: state.getIn([ 'user', 'account' ]) + } +}, { init, edit, fetchList, deleteRole })(Roles) \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx index d12b60269..7b79f6651 100644 --- a/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx +++ b/frontend/app/components/Client/Roles/components/RoleForm/RoleForm.tsx @@ -97,4 +97,5 @@ const RoleForm = ({ role, closeModal, edit, save, saving, permissions }: Props) export default connect(state => ({ role: state.getIn(['roles', 'instance']), permissions: state.getIn(['roles', 'permissions']), + saving: state.getIn([ 'roles', 'saveRequest', 'loading' ]), }), { edit, save })(RoleForm); \ No newline at end of file diff --git a/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx index a242ea6f2..c4cdb7a25 100644 --- a/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx +++ b/frontend/app/components/Client/Roles/components/RoleItem/RoleItem.tsx @@ -1,18 +1,33 @@ -import React from 'react'; +import React from 'react' import { Icon } from 'UI' import stl from './roleItem.css' import cn from 'classnames' +function PermisionLabel({ permission }: any) { + return ( +
{ permission }
+ ); +} + interface Props { role: any, deleteHandler?: (role: any) => void, editHandler?: (role: any) => void, + permissions: any } -function RoleItem({ role, deleteHandler, editHandler }: Props) { +function RoleItem({ role, deleteHandler, editHandler, permissions }: Props) { return (
- { role.name } +
+ { role.name } +
+ {role.permissions.map((permission: any) => ( + + // { permissions[permission].name } + ))} +
+
{ !!deleteHandler && diff --git a/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css index 50a56afb4..7173d1c33 100644 --- a/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css +++ b/frontend/app/components/Client/Roles/components/RoleItem/roleItem.css @@ -34,4 +34,14 @@ pointer-events: none; opacity: 0.5; } +} + +.label { + margin-left: 10px; + padding: 0 10px; + border-radius: 3px; + background-color: $gray-lightest; + font-size: 12px; + border: solid thin $gray-light; + width: fit-content; } \ No newline at end of file diff --git a/frontend/app/types/member.js b/frontend/app/types/member.js index 4c064e90d..03495784c 100644 --- a/frontend/app/types/member.js +++ b/frontend/app/types/member.js @@ -12,6 +12,7 @@ export default Record({ joined: false, expiredInvitation: false, roleId: undefined, + roleName: undefined, invitationLink: '', }, { idKey: 'id', From e346a289ed822eb860ec21645630b29ee2c4bf30 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Sat, 4 Dec 2021 00:49:41 +0530 Subject: [PATCH 217/218] fix(ui) - user show role badge --- frontend/app/components/Client/ManageUsers/UserItem.js | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/app/components/Client/ManageUsers/UserItem.js b/frontend/app/components/Client/ManageUsers/UserItem.js index 3c533080f..b40b5182a 100644 --- a/frontend/app/components/Client/ManageUsers/UserItem.js +++ b/frontend/app/components/Client/ManageUsers/UserItem.js @@ -7,6 +7,7 @@ const UserItem = ({ user, adminLabel, deleteHandler, editHandler, generateInvite
{ user.name || user.email }
{ adminLabel &&
{ adminLabel }
} + { user.roleName &&
{ user.roleName }
}
{ user.expiredInvitation && !user.joined && Date: Sat, 4 Dec 2021 02:58:55 +0530 Subject: [PATCH 218/218] fix(ui) - compile errors after merge --- .../components/Assist/ChatControls/ChatControls.tsx | 1 - .../app/components/Assist/ChatWindow/ChatWindow.tsx | 12 ++++++------ .../components/AssistActions/AssistActions.tsx | 5 +++-- .../MessageDistributor/managers/AssistManager.ts | 10 +--------- 4 files changed, 10 insertions(+), 18 deletions(-) diff --git a/frontend/app/components/Assist/ChatControls/ChatControls.tsx b/frontend/app/components/Assist/ChatControls/ChatControls.tsx index 3ddee01dd..61803bc2f 100644 --- a/frontend/app/components/Assist/ChatControls/ChatControls.tsx +++ b/frontend/app/components/Assist/ChatControls/ChatControls.tsx @@ -13,7 +13,6 @@ interface Props { } function ChatControls({ stream, endCall, videoEnabled, setVideoEnabled } : Props) { const [audioEnabled, setAudioEnabled] = useState(true) - const [videoEnabled, setVideoEnabled] = useState(false) const toggleAudio = () => { if (!stream) { return; } diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx index 1851b8a72..0327b8254 100644 --- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx +++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx @@ -10,21 +10,21 @@ import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream export interface Props { - remoteStream: MediaStream | null, + incomeStream: MediaStream | null, localStream: LocalStream | null, userId: String, endCall: () => void } -const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localStream, endCall }) { +const ChatWindow: FC = function ChatWindow({ userId, incomeStream, localStream, endCall }) { const [localVideoEnabled, setLocalVideoEnabled] = useState(false) const [remoteVideoEnabled, setRemoteVideoEnabled] = useState(false) useEffect(() => { - if (!remoteStream) { return } + if (!incomeStream) { return } const iid = setInterval(() => { - const settings = remoteStream.getVideoTracks()[0]?.getSettings() + const settings = incomeStream.getVideoTracks()[0]?.getSettings() const isDummyVideoTrack = !!settings ? (settings.width === 2 || settings.frameRate === 0) : true console.log(isDummyVideoTrack, settings) const shouldBeEnabled = !isDummyVideoTrack @@ -33,7 +33,7 @@ const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localS } }, 1000) return () => clearInterval(iid) - }, [ remoteStream, localVideoEnabled ]) + }, [ incomeStream, localVideoEnabled ]) const minimize = !localVideoEnabled && !remoteVideoEnabled @@ -48,7 +48,7 @@ const ChatWindow: FC = function ChatWindow({ userId, remoteStream, localS
- +
diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index f75b7e740..fd0505e7f 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -38,7 +38,7 @@ interface Props { } function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled, hasPermission, isEnterprise }: Props) { - const [ remoteStream, setRemoteStream ] = useState(null); + const [ incomeStream, setIncomeStream ] = useState(null); const [ localStream, setLocalStream ] = useState(null); const [ callObject, setCallObject ] = useState<{ end: ()=>void, toggleRemoteControl: ()=>void } | null >(null); @@ -63,6 +63,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus onError )); }).catch(onError) + } const confirmCall = async () => { if (await confirm({ @@ -124,7 +125,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
}
- { inCall && callObject && } + { inCall && callObject && }
) diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 421185bb0..f11c0c70a 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -118,7 +118,7 @@ function resolveCSS(baseURL: string, css: string): string { } export default class AssistManager { - constructor(private session, private config, private md: MessageDistributor, private config) {} + constructor(private session, private md: MessageDistributor, private config) {} private setStatus(status: ConnectionStatus) { if (status === ConnectionStatus.Connecting) { @@ -403,14 +403,6 @@ export default class AssistManager { } } - private onMouseClick = (e: MouseEvent): void => { - const conn = this.dataConnection; - if (!conn) { return; } - const data = this.md.getInternalCoordinates(e); - // const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager - conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) }); - } - private localCallData: { localStream: LocalStream, onStream: (s: MediaStream)=>void,