From 5315ac66c3871ce4d2d04fd60911d5017071f76b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 26 Sep 2022 17:17:29 +0200 Subject: [PATCH 01/68] feat(chalice): optimized telemetry --- api/chalicelib/core/telemetry.py | 10 +++++++--- ee/api/chalicelib/core/telemetry.py | 19 ++++++++++--------- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 10 ++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 5 +++-- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 10 ++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 5 +++-- 6 files changed, 43 insertions(+), 16 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql create mode 100644 scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index 8098c9cd7..50fdc3c11 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -20,15 +20,19 @@ def process_data(data): def compute(): - with pg_client.PostgresClient() as cur: + with pg_client.PostgresClient(long_query=True) as cur: cur.execute( f"""UPDATE public.tenants SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) + (SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') + (SELECT COUNT(*) FROM public.jira_cloud), 0), t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0), - t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0), - t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0) + t_sessions=t_sessions + COALESCE((SELECT COUNT(*) + FROM public.sessions + WHERE start_ts >= (SELECT last_telemetry FROM tenants) + AND start_ts <=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)), 0), + t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0), + last_telemetry=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT) RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" ) diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index a002f8501..889c1b8f6 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -20,7 +20,7 @@ def process_data(data): def compute(): - with pg_client.PostgresClient() as cur: + with pg_client.PostgresClient(long_query=True) as cur: cur.execute( f"""UPDATE public.tenants SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) @@ -39,17 +39,18 @@ def compute(): FROM public.projects WHERE deleted_at ISNULL AND projects.tenant_id = all_tenants.tenant_id), 0), - t_sessions=COALESCE((SELECT COUNT(*) - FROM public.sessions - INNER JOIN public.projects USING (project_id) - WHERE projects.tenant_id = all_tenants.tenant_id), 0), + t_sessions=t_sessions + COALESCE((SELECT COUNT(*) + FROM public.sessions INNER JOIN public.projects USING (project_id) + WHERE projects.tenant_id = all_tenants.tenant_id + AND start_ts >= (SELECT last_telemetry FROM tenants) + AND start_ts <=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)), 0), t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL - AND users.tenant_id = all_tenants.tenant_id), 0) - FROM ( - SELECT tenant_id - FROM public.tenants + AND users.tenant_id = all_tenants.tenant_id), 0), + last_telemetry=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT) + FROM (SELECT tenant_id + FROM public.tenants ) AS all_tenants WHERE tenants.tenant_id = all_tenants.tenant_id RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql new file mode 100644 index 000000000..4eb88bd9e --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -0,0 +1,10 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.8.2-ee' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE IF EXISTS public.tenants + ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 723b8eb09..2be29136b 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -153,7 +153,8 @@ $$ t_projects integer NOT NULL DEFAULT 1, t_sessions bigint NOT NULL DEFAULT 0, t_users integer NOT NULL DEFAULT 1, - t_integrations integer NOT NULL DEFAULT 0 + t_integrations integer NOT NULL DEFAULT 0, + last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT) ); @@ -223,7 +224,7 @@ $$ provider_user_id text NOT NULL, token text NOT NULL ); - CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider); + CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); CREATE TABLE IF NOT EXISTS projects ( diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql new file mode 100644 index 000000000..57deb548d --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -0,0 +1,10 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.8.2' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE IF EXISTS public.tenants + ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 94bfa04e2..ad435348e 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -128,7 +128,8 @@ $$ t_sessions bigint NOT NULL DEFAULT 0, t_users integer NOT NULL DEFAULT 1, t_integrations integer NOT NULL DEFAULT 0, - CONSTRAINT onerow_uni CHECK (tenant_id = 1) + last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT) + CONSTRAINT onerow_uni CHECK (tenant_id = 1) ); CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); @@ -167,7 +168,7 @@ $$ provider_user_id text NOT NULL, token text NOT NULL ); - CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider); + CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); -- --- projects.sql --- From 8baf050a006edd01b6f6af371b3a07c3276f133c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 27 Sep 2022 12:38:56 +0200 Subject: [PATCH 02/68] feat(chalice): refactored code feat(chalice): code cleanup --- api/chalicelib/core/authorizers.py | 2 +- api/chalicelib/core/signup.py | 2 +- api/chalicelib/utils/helper.py | 81 +----------------------------- 3 files changed, 3 insertions(+), 82 deletions(-) diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index c32f08208..a474fcb8d 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -15,7 +15,7 @@ def jwt_authorizer(token): token[1], config("jwt_secret"), algorithms=config("jwt_algorithm"), - audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"] + audience=[ f"front:{helper.get_stage_name()}"] ) except jwt.ExpiredSignatureError: print("! JWT Expired signature") diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 23c2c8744..9106084ad 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -21,7 +21,7 @@ def create_step1(data: schemas.UserSignupSchema): password = data.password print("Verifying email validity") - if email is None or len(email) < 5 or not helper.is_valid_email(email): + if email is None or len(email) < 5: errors.append("Invalid email address.") else: print("Verifying email existance") diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 192c309f5..919ac9d9f 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -18,40 +18,13 @@ def get_version_number(): def get_stage_name(): - stage = config("STAGE") - return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage - - -def is_production(): - return get_stage_name() == "production" - - -def is_staging(): - return get_stage_name() == "staging" - - -def is_onprem(): - return not is_production() and not is_staging() - - -def is_local(): - return config("STAGE").startswith(local_prefix) + return "OpenReplay" def generate_salt(): return "".join(random.choices(string.hexdigits, k=36)) -def unique_ordered_list(array): - uniq = [] - [uniq.append(x) for x in array if x not in uniq] - return uniq - - -def unique_unordered_list(array): - return list(set(array)) - - def list_to_camel_case(items, flatten=False): for i in range(len(items)): if flatten: @@ -130,12 +103,6 @@ def key_to_snake_case(name, delimiter='_', split_number=False): TRACK_TIME = True -def __sbool_to_bool(value): - if value is None or not isinstance(value, str): - return False - return value.lower() in ["true", "yes", "1"] - - def allow_captcha(): return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \ and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0 @@ -210,54 +177,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator return value -def is_valid_email(email): - return re.match(r"[^@]+@[^@]+\.[^@]+", email) is not None - - -def is_valid_http_url(url): - regex = re.compile( - r'^(?:http|ftp)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... - r'localhost|' # localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port - r'(?:/?|[/?]\S+)$', re.IGNORECASE) - - return re.match(regex, url) is not None - - -def is_valid_url(url): - regex = re.compile( - # r'^(?:http|ftp)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... - r'localhost|' # localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port - r'(?:/?|[/?]\S+)$', re.IGNORECASE) - - return re.match(regex, url) is not None - - -def is_alphabet_space(word): - r = re.compile("^[a-zA-Z ]*$") - return r.match(word) is not None - - -def is_alphabet_latin_space(word): - r = re.compile("^[a-zA-Z\u00C0-\u00D6\u00D8-\u00f6\u00f8-\u00ff\s ]*$") - return r.match(word) is not None - - def is_alphabet_space_dash(word): r = re.compile("^[a-zA-Z -]*$") return r.match(word) is not None -def is_alphanumeric_space(word): - r = re.compile("^[a-zA-Z0-9._\- ]*$") - return r.match(word) is not None - - def merge_lists_by_key(l1, l2, key): merged = {} for item in l1 + l2: @@ -310,9 +234,6 @@ def explode_widget(data, key=None): return result -TEMP_PATH = "./" if is_local() else "/tmp/" - - def get_issue_title(issue_type): return {'click_rage': "Click Rage", 'dead_click': "Dead Click", From 976bf7713e326c16c25d48df18236f485dfccd61 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 27 Sep 2022 17:57:23 +0200 Subject: [PATCH 03/68] feat(chalice): session notes --- api/auth/auth_project.py | 4 +- api/chalicelib/core/sessions_notes.py | 105 ++++++++++++++++++++++++++ api/routers/core_dynamic.py | 56 +++++++++++++- api/schemas.py | 28 +++++++ ee/api/.gitignore | 1 + ee/api/clean.sh | 1 + ee/api/routers/core_dynamic.py | 61 ++++++++++++++- 7 files changed, 252 insertions(+), 4 deletions(-) create mode 100644 api/chalicelib/core/sessions_notes.py diff --git a/api/auth/auth_project.py b/api/auth/auth_project.py index 6f842916b..0f28b4162 100644 --- a/api/auth/auth_project.py +++ b/api/auth/auth_project.py @@ -17,8 +17,8 @@ class ProjectAuthorizer: current_user: schemas.CurrentContext = await OR_context(request) value = request.path_params[self.project_identifier] if (self.project_identifier == "projectId" \ - and not (isinstance(value, int) or isinstance(value, str) and value.isnumeric()) - and projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None) \ + and (not (isinstance(value, int) or isinstance(value, str) and value.isnumeric()) + or projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None)) \ or (self.project_identifier == "projectKey" \ and projects.get_internal_project_id(project_key=value) is None): print("project not found") diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py new file mode 100644 index 000000000..916af221a --- /dev/null +++ b/api/chalicelib/core/sessions_notes.py @@ -0,0 +1,105 @@ +import json + +import schemas +from chalicelib.core import users +from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils.TimeUTC import TimeUTC + + +def get_session_notes(tenant_id, project_id, session_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND sessions_notes.session_id = %(session_id)s + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, + "tenant_id": tenant_id, "session_id": session_id}) + + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def get_all_notes(tenant_id, project_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + RETURNING *;""", + {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) + cur.execute(query) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): + sub_query = [] + if data.message is not None: + sub_query.append("message = %(message)s") + if data.tags is not None: + sub_query.append("tags = %(tags)s") + if data.is_public is not None: + sub_query.append("is_public = %(is_public)s") + if data.timestamp is not None: + sub_query.append("timestamp = %(timestamp)s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.sessions_notes + SET + {" ,".join(sub_query)} + WHERE + project_id = %(project_id)s + AND user_id = %(user_id)s + AND note_id = %(note_id)s + AND deleted_at ISNULL + RETURNING *;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) + ) + row = helper.dict_to_camel_case(cur.fetchone()) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + +def delete(tenant_id, user_id, project_id, note_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.sessions_notes + SET + deleted_at = timezone('utc'::text, now()) + WHERE + note_id = %(note_id)s + AND project_id = %(project_id)s\ + AND user_id = %(user_id)s + AND deleted_at ISNULL;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id}) + ) + return {"data": {"state": "success"}} diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index a9b50b4dc..d2357b319 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist + sessions_favorite, assist, sessions_notes from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -372,3 +372,57 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem return { 'data': data } + + +@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId) + return data + + +@app.get('/{projectId}/notes', tags=["sessions", "notes"]) +def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } diff --git a/api/schemas.py b/api/schemas.py index f6dc8b34b..9be29e84a 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1084,3 +1084,31 @@ class IntegrationType(str, Enum): stackdriver = "STACKDRIVER" cloudwatch = "CLOUDWATCH" newrelic = "NEWRELIC" + + +class SessionNoteSchema(BaseModel): + message: str = Field(..., min_length=2) + tags: List[str] = Field(default=[]) + timestamp: int = Field(default=-1) + is_public: bool = Field(default=False) + + class Config: + alias_generator = attribute_to_camel_case + + +class SessionUpdateNoteSchema(SessionNoteSchema): + message: Optional[str] = Field(default=None, min_length=2) + tags: Optional[List[str]] = Field(default=None) + timestamp: Optional[int] = Field(default=None, ge=-1) + is_public: Optional[bool] = Field(default=None) + + @root_validator + def validator(cls, values): + assert len(values.keys()) > 0, "at least 1 attribute should be provided for update" + c = 0 + for v in values.values(): + if v is not None and (not isinstance(v, str) or len(v) > 0): + c += 1 + break + assert c > 0, "at least 1 value should be provided for update" + return values diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 811b00301..924060617 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -213,6 +213,7 @@ Pipfile /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py +/chalicelib/core/sessions_notes.py #exp /chalicelib/core/significance.py /chalicelib/core/slack.py /chalicelib/core/socket_ios.py diff --git a/ee/api/clean.sh b/ee/api/clean.sh index ce58fe45e..53607cb25 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -35,6 +35,7 @@ rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions_assignments.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py +rm -rf ./chalicelib/core/sessions_notes.py #exp rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index ed31fd56c..ee3c3a83f 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite + errors_favorite, sessions_notes from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -396,3 +396,62 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem return { 'data': data } + + +@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId) + return data + + +@app.get('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } From ab450b7945025e9a52df51486b0970692d0f405b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 11:36:10 +0200 Subject: [PATCH 04/68] feat(chalice): session mobsUrl --- api/chalicelib/core/sessions.py | 1 + api/chalicelib/core/sessions_mobs.py | 20 ++++++++++++++++++++ ee/api/chalicelib/core/sessions.py | 1 + 3 files changed, 22 insertions(+) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 4a27d0b13..5b43ddbd1 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -95,6 +95,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 1107ee6d4..53385f6a6 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -26,6 +26,26 @@ def get_urls(project_id, session_id): return results +def get_urls_depercated(sessionId): + return [ + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + }, + ExpiresIn=100000 + ), + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + "e" + }, + ExpiresIn=100000 + )] + + def get_ios(session_id): return client.generate_presigned_url( 'get_object', diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 92c6e8f74..0c908c500 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -96,6 +96,7 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, context=context) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, From 968d25e29e4cecc3bbe9a3d85c71e7e0f1fe3e18 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 12:23:40 +0200 Subject: [PATCH 05/68] feat(chalice): session mobsUrl --- api/chalicelib/core/sessions_mobs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 53385f6a6..3d966a47c 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -26,13 +26,13 @@ def get_urls(project_id, session_id): return results -def get_urls_depercated(sessionId): +def get_urls_depercated(session_id): return [ client.generate_presigned_url( 'get_object', Params={ 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) + 'Key': str(session_id) }, ExpiresIn=100000 ), @@ -40,7 +40,7 @@ def get_urls_depercated(sessionId): 'get_object', Params={ 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) + "e" + 'Key': str(session_id) + "e" }, ExpiresIn=100000 )] From 21632f8c466f3bc3917efbdd7df7134f249d275f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 13:40:22 +0200 Subject: [PATCH 06/68] feat(DB): sessions_notes structure --- .../helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 15 +++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 17 ++++++++++++++++- .../helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 15 +++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 14 ++++++++++++++ 4 files changed, 60 insertions(+), 1 deletion(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 4eb88bd9e..3586ff63a 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -7,4 +7,19 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + +CREATE TABLE IF NOT EXISTS sessions_notes +( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE +); + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 2be29136b..792003dab 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -129,7 +129,8 @@ $$ ('user_viewed_errors'), ('user_viewed_sessions'), ('users'), - ('webhooks')) + ('webhooks'), + ('sessions_notes')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'public' @@ -857,6 +858,20 @@ $$ FOR EACH ROW EXECUTE PROCEDURE notify_alert(); + CREATE TABLE IF NOT EXISTS sessions_notes + ( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE + ); + RAISE NOTICE 'Created missing public schema tables'; END IF; END; diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 57deb548d..4ce5009a3 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -7,4 +7,19 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + +CREATE TABLE IF NOT EXISTS sessions_notes +( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE +); + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index ad435348e..9e3a0f924 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1000,6 +1000,20 @@ $$ FOR EACH ROW EXECUTE PROCEDURE notify_alert(); + CREATE TABLE sessions_notes + ( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE + ); + raise notice 'DB created'; END IF; END; From fa9c96d1394df6d1911bb3ce81a7fda46e99ef40 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:17:11 +0200 Subject: [PATCH 07/68] feat(chalice): changed assist agent secret --- api/chalicelib/core/assist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 3cd3cec25..b4fc3a9f8 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -93,7 +93,7 @@ def __get_agent_token(project_id, project_key, session_id): "iss": config("JWT_ISSUER"), "aud": f"openreplay:agent" }, - key=config("jwt_secret"), + key=config("ASSIST_JWT_SECRET"), algorithm=config("jwt_algorithm") ) From 6cb1f82d7d6b1a935a62b8aca5c3a7040d69ef07 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:34:05 +0200 Subject: [PATCH 08/68] feat(chalice): changed notes --- api/chalicelib/core/sessions_notes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 916af221a..6c66ebcf5 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -20,6 +20,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, "session_id": session_id}) + cur.execute(query=query) rows = cur.fetchall() rows = helper.list_to_camel_case(rows) for row in rows: From f2b10bceb4f3b82ec236cf96e3512f1e84c2a237 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:44:23 +0200 Subject: [PATCH 09/68] feat(chalice): changed notes --- api/chalicelib/core/sessions_notes.py | 4 +- ee/api/.gitignore | 1 - ee/api/chalicelib/core/sessions_notes.py | 106 +++++++++++++++++++++++ ee/api/clean.sh | 1 - 4 files changed, 108 insertions(+), 4 deletions(-) create mode 100644 ee/api/chalicelib/core/sessions_notes.py diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 6c66ebcf5..c1eec5dfc 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -15,7 +15,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): AND sessions_notes.deleted_at IS NULL AND sessions_notes.session_id = %(session_id)s AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + OR sessions_notes.is_public) ORDER BY created_at DESC;""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, "session_id": session_id}) @@ -36,7 +36,7 @@ def get_all_notes(tenant_id, project_id, user_id): WHERE sessions_notes.project_id = %(project_id)s AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + OR sessions_notes.is_public) ORDER BY created_at DESC;""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 924060617..811b00301 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -213,7 +213,6 @@ Pipfile /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py -/chalicelib/core/sessions_notes.py #exp /chalicelib/core/significance.py /chalicelib/core/slack.py /chalicelib/core/socket_ios.py diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py new file mode 100644 index 000000000..6c66ebcf5 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -0,0 +1,106 @@ +import json + +import schemas +from chalicelib.core import users +from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils.TimeUTC import TimeUTC + + +def get_session_notes(tenant_id, project_id, session_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND sessions_notes.session_id = %(session_id)s + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, + "tenant_id": tenant_id, "session_id": session_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def get_all_notes(tenant_id, project_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + RETURNING *;""", + {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) + cur.execute(query) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): + sub_query = [] + if data.message is not None: + sub_query.append("message = %(message)s") + if data.tags is not None: + sub_query.append("tags = %(tags)s") + if data.is_public is not None: + sub_query.append("is_public = %(is_public)s") + if data.timestamp is not None: + sub_query.append("timestamp = %(timestamp)s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.sessions_notes + SET + {" ,".join(sub_query)} + WHERE + project_id = %(project_id)s + AND user_id = %(user_id)s + AND note_id = %(note_id)s + AND deleted_at ISNULL + RETURNING *;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) + ) + row = helper.dict_to_camel_case(cur.fetchone()) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + +def delete(tenant_id, user_id, project_id, note_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.sessions_notes + SET + deleted_at = timezone('utc'::text, now()) + WHERE + note_id = %(note_id)s + AND project_id = %(project_id)s\ + AND user_id = %(user_id)s + AND deleted_at ISNULL;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id}) + ) + return {"data": {"state": "success"}} diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 53607cb25..ce58fe45e 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -35,7 +35,6 @@ rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions_assignments.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py -rm -rf ./chalicelib/core/sessions_notes.py #exp rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py From eda3bb3dcbcf112f143522b9fd32ff66cd266744 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 17:14:22 +0200 Subject: [PATCH 10/68] feat(assist): enhanced code --- utilities/utils/assistHelper.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utilities/utils/assistHelper.js b/utilities/utils/assistHelper.js index 006223d25..ece1ea0f3 100644 --- a/utilities/utils/assistHelper.js +++ b/utilities/utils/assistHelper.js @@ -134,7 +134,7 @@ function check(socket, next) { debug && console.error(`projectKey:${projectKey}, sessionId:${sessionId}`); return next(new Error('Authentication error')); } - if (projectKey !== decoded.projectKey || sessionId !== decoded.sessionId) { + if (String(projectKey) !== String(decoded.projectKey) || String(sessionId) !== String(decoded.sessionId)) { debug && console.error(`Trying to access projectKey:${projectKey} instead of ${decoded.projectKey}\nor`); debug && console.error(`Trying to access sessionId:${sessionId} instead of ${decoded.sessionId}`); return next(new Error('Authorization error')); From 0d26b1cc9a1fcadffaedf775268985480eef14c7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 17:44:05 +0200 Subject: [PATCH 11/68] feat(chalice): session mobsUrl exp feat(chalice): notes in replay response --- api/chalicelib/core/sessions.py | 8 ++++--- api/chalicelib/core/sessions_favorite.py | 19 ++++++++------- api/routers/core_dynamic.py | 13 ++++++----- ee/api/chalicelib/core/sessions.py | 8 ++++--- ee/api/chalicelib/core/sessions_exp.py | 16 +++++++++---- ee/api/chalicelib/core/sessions_favorite.py | 26 ++++++++++----------- ee/api/routers/core_dynamic.py | 9 ++++--- 7 files changed, 55 insertions(+), 44 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 5b43ddbd1..7189146a6 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -3,7 +3,7 @@ from typing import List import schemas from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ - sessions_devtool + sessions_devtool, sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, @@ -40,8 +40,8 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False, - live=True): +def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] if include_fav_viewed: @@ -100,6 +100,8 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=tenant_id, project_id=project_id, + session_id=session_id, user_id=user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 691e5ec3e..41d241b4d 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -2,7 +2,7 @@ from chalicelib.core import sessions from chalicelib.utils import pg_client -def add_favorite_session(project_id, user_id, session_id): +def add_favorite_session(tenant_id, project_id, user_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -10,11 +10,11 @@ def add_favorite_session(project_id, user_id, session_id): VALUES (%(userId)s,%(session_id)s);""", {"userId": user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + full_data=False, include_fav_viewed=True) -def remove_favorite_session(project_id, user_id, session_id): +def remove_favorite_session(tenant_id, project_id, user_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -23,15 +23,16 @@ def remove_favorite_session(project_id, user_id, session_id): AND session_id = %(session_id)s;""", {"userId": user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + full_data=False, include_fav_viewed=True) -def favorite_session(project_id, user_id, session_id): +def favorite_session(tenant_id, project_id, user_id, session_id): if favorite_session_exists(user_id=user_id, session_id=session_id): - return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return remove_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, + session_id=session_id) - return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return add_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, session_id=session_id) def favorite_session_exists(user_id, session_id): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index d2357b319..326d31ab9 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -175,8 +175,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, - include_fav_viewed=True, group_metadata=True) + data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, + full_data=True, user_id=context.user_id, include_fav_viewed=True, group_metadata=True) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -265,8 +265,9 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, + full_data=True, user_id=context.user_id, include_fav_viewed=True, + group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -328,8 +329,8 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId)} + "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id, session_id=sessionId)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 0c908c500..18da3e200 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -4,7 +4,7 @@ import schemas import schemas_ee from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ - sessions_devtool + sessions_devtool, sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, @@ -41,7 +41,7 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentContext, full_data=False, +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] @@ -65,7 +65,7 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -102,6 +102,8 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 81953bcc5..20c0db9dd 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -3,7 +3,8 @@ from typing import List, Union import schemas import schemas_ee from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics + sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics, sessions_devtool, \ + sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper SESSION_PROJECTION_COLS_CH = """\ @@ -58,8 +59,8 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False, - live=True): +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] if include_fav_viewed: @@ -82,7 +83,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -112,11 +113,16 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ :500] # limit the number of errors to reduce the response-body size data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index c3128cd03..c1616e0f9 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -5,35 +5,35 @@ from chalicelib.core import sessions, sessions_favorite_exp from chalicelib.utils import pg_client, s3_extra -def add_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def add_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) VALUES (%(userId)s,%(sessionId)s);""", - {"userId": user_id, "sessionId": session_id}) + {"userId": context.user_id, "sessionId": session_id}) ) - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True, context=context) + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, + full_data=False, include_fav_viewed=True, context=context) -def remove_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def remove_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s AND session_id = %(sessionId)s;""", - {"userId": user_id, "sessionId": session_id}) + {"userId": context.user_id, "sessionId": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True, context=context) + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, + full_data=False, include_fav_viewed=True, context=context) -def favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def favorite_session(tenant_id, project_id, user_id, session_id, context: schemas_ee.CurrentContext): if favorite_session_exists(user_id=user_id, session_id=session_id): key = str(session_id) try: @@ -47,7 +47,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren except Exception as e: print(f"!!!Error while tagging: {key} to default") print(str(e)) - return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return remove_favorite_session(project_id=project_id, session_id=session_id, context=context) key = str(session_id) try: s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault')) @@ -60,7 +60,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren except Exception as e: print(f"!!!Error while tagging: {key} to vault") print(str(e)) - return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, context=context) + return add_favorite_session(project_id=project_id, session_id=session_id, context=context) def favorite_session_exists(user_id, session_id): diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index ee3c3a83f..d06467cdd 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -183,7 +183,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} @@ -275,8 +275,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False, - context=context) + include_fav_viewed=True, group_metadata=True, live=False, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -344,8 +343,8 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId, context=context)} + "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id, session_id=sessionId, context=context)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], From c3913e6dca3fc42638d24ed4bc9736f0229d179f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 29 Sep 2022 15:34:37 +0200 Subject: [PATCH 12/68] feat(chalice): changes --- api/chalicelib/core/sessions.py | 11 +++++------ api/chalicelib/core/sessions_notes.py | 9 +++++---- api/routers/core_dynamic.py | 4 ++-- ee/api/chalicelib/core/sessions_notes.py | 6 ++++-- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 7189146a6..6361c08c7 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -40,7 +40,7 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, include_fav_viewed=False, +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] @@ -64,7 +64,7 @@ def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, i FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -100,12 +100,11 @@ def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, i data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) - data['notes'] = sessions_notes.get_session_notes(tenant_id=tenant_id, project_id=project_id, - session_id=session_id, user_id=user_id) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) data["inDB"] = True return data diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index c1eec5dfc..da2305981 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -55,8 +55,10 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) - result = cur.fetchone() - return helper.dict_to_camel_case(result) + result = helper.dict_to_camel_case(cur.fetchone()) + if result: + result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"]) + return result def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): @@ -94,8 +96,7 @@ def delete(tenant_id, user_id, project_id, note_id): cur.execute( cur.mogrify("""\ UPDATE public.sessions_notes - SET - deleted_at = timezone('utc'::text, now()) + SET deleted_at = timezone('utc'::text, now()) WHERE note_id = %(note_id)s AND project_id = %(project_id)s\ diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 326d31ab9..2cb1a7c51 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -175,8 +175,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, - full_data=True, user_id=context.user_id, include_fav_viewed=True, group_metadata=True) + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 6c66ebcf5..8dc8411ca 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -55,8 +55,10 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) - result = cur.fetchone() - return helper.dict_to_camel_case(result) + result = helper.dict_to_camel_case(cur.fetchone()) + if result: + result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"]) + return result def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): From 224541a0c2987037e69f3546ec6593fa48081256 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 29 Sep 2022 18:06:51 +0200 Subject: [PATCH 13/68] feat(chalice): notes pagination feat(chalice): notes sort --- api/chalicelib/core/sessions_notes.py | 5 +++-- api/routers/core_dynamic.py | 7 ++++--- api/schemas.py | 8 ++++++++ ee/api/chalicelib/core/sessions_notes.py | 5 +++-- ee/api/routers/core_dynamic.py | 8 +++++--- 5 files changed, 23 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index da2305981..ab3dbd5a1 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -28,7 +28,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): return rows -def get_all_notes(tenant_id, project_id, user_id): +def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes @@ -37,7 +37,8 @@ def get_all_notes(tenant_id, project_id, user_id): AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public) - ORDER BY created_at DESC;""", + ORDER BY created_at {data.order} + LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) cur.execute(query=query) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 2cb1a7c51..2695a6b09 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -419,9 +419,10 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.get('/{projectId}/notes', tags=["sessions", "notes"]) -def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) +@app.post('/{projectId}/notes', tags=["sessions", "notes"]) +def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,data=data) if "errors" in data: return data return { diff --git a/api/schemas.py b/api/schemas.py index 9be29e84a..b18550dcd 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1086,6 +1086,14 @@ class IntegrationType(str, Enum): newrelic = "NEWRELIC" +class SearchNoteSchema(_PaginatedSchema): + sort: str = Field(default="createdAt") + order: SortOrderType = Field(default=SortOrderType.desc) + + class Config: + alias_generator = attribute_to_camel_case + + class SessionNoteSchema(BaseModel): message: str = Field(..., min_length=2) tags: List[str] = Field(default=[]) diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 8dc8411ca..918b5da0a 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -28,7 +28,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): return rows -def get_all_notes(tenant_id, project_id, user_id): +def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes @@ -37,7 +37,8 @@ def get_all_notes(tenant_id, project_id, user_id): AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) - ORDER BY created_at DESC;""", + ORDER BY created_at {data.order} + LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) cur.execute(query=query) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index d06467cdd..176896ebb 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -446,9 +446,11 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.get('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) -def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) +@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id) if "errors" in data: return data return { From cc24b1368652f10055369d5211e5932f7a0942a5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 15:41:47 +0200 Subject: [PATCH 14/68] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 17 ++++++++++------- api/schemas.py | 1 + ee/api/chalicelib/core/sessions_notes.py | 15 ++++++++++----- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index ab3dbd5a1..77e487df3 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -2,6 +2,7 @@ import json import schemas from chalicelib.core import users +from chalicelib.core.sessions import _multiple_conditions, _multiple_values from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -10,7 +11,6 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes - INNER JOIN users USING (user_id) WHERE sessions_notes.project_id = %(project_id)s AND sessions_notes.deleted_at IS NULL AND sessions_notes.session_id = %(session_id)s @@ -30,16 +30,19 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", + "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)"] + extra_params = {} + if data.tags and len(data.tags) > 0: + k = "tag" + conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = _multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes - INNER JOIN users USING (user_id) - WHERE sessions_notes.project_id = %(project_id)s - AND sessions_notes.deleted_at IS NULL - AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public) + WHERE {" AND ".join(conditions)} ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", - {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) cur.execute(query=query) rows = cur.fetchall() diff --git a/api/schemas.py b/api/schemas.py index b18550dcd..db5c0cf82 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1089,6 +1089,7 @@ class IntegrationType(str, Enum): class SearchNoteSchema(_PaginatedSchema): sort: str = Field(default="createdAt") order: SortOrderType = Field(default=SortOrderType.desc) + tags: Optional[List[str]] = Field(default=[]) class Config: alias_generator = attribute_to_camel_case diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 918b5da0a..baf71526d 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -2,6 +2,7 @@ import json import schemas from chalicelib.core import users +from chalicelib.core.sessions import _multiple_conditions, _multiple_values from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -30,16 +31,20 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", + "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)"] + extra_params = {} + if data.tags and len(data.tags) > 0: + k = "tag" + conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = _multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes INNER JOIN users USING (user_id) - WHERE sessions_notes.project_id = %(project_id)s - AND sessions_notes.deleted_at IS NULL - AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + WHERE {" AND ".join(conditions)} ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", - {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) cur.execute(query=query) rows = cur.fetchall() From c21aa47f6e470fb07fa1c4da0d784069e752c5fa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 15:54:02 +0200 Subject: [PATCH 15/68] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 11 ++++------- ee/api/chalicelib/core/sessions_notes.py | 11 ++++------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 77e487df3..d7b29fbb3 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,6 @@ -import json - import schemas -from chalicelib.core import users -from chalicelib.core.sessions import _multiple_conditions, _multiple_values -from chalicelib.utils import pg_client, helper, dev +from chalicelib.core import sessions +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -35,8 +32,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) - extra_params = _multiple_values(data.tags, value_key=k) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes WHERE {" AND ".join(conditions)} diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index baf71526d..fe26e23ac 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,6 @@ -import json - import schemas -from chalicelib.core import users -from chalicelib.core.sessions import _multiple_conditions, _multiple_values -from chalicelib.utils import pg_client, helper, dev +from chalicelib.core import sessions +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -36,8 +33,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) - extra_params = _multiple_values(data.tags, value_key=k) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes INNER JOIN users USING (user_id) From a1cfc3db8fccbc0c48fdc93f0fa8eac6563ba4ba Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 16:02:11 +0200 Subject: [PATCH 16/68] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index d7b29fbb3..04855c591 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index fe26e23ac..36aff24c2 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -33,7 +33,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes From 91202cfa38478a7bc334c44bf338055f93a18acf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 16:06:05 +0200 Subject: [PATCH 17/68] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 04855c591..a82ff9975 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -31,7 +31,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)"] extra_params = {} if data.tags and len(data.tags) > 0: - k = "tag" + k = "tag_value" conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 36aff24c2..4b2826dfd 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)"] extra_params = {} if data.tags and len(data.tags) > 0: - k = "tag" + k = "tag_value" conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* From 6f190f3a81d30751b19aa20335216eae28bbd642 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 17:02:17 +0200 Subject: [PATCH 18/68] feat(chalice): jwt expiration changes --- api/chalicelib/core/assist.py | 2 +- api/chalicelib/core/authorizers.py | 2 +- api/env.default | 4 ++-- ee/api/env.default | 6 ++++-- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index b4fc3a9f8..ebf1b7ab8 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -89,7 +89,7 @@ def __get_agent_token(project_id, project_key, session_id): "projectId": project_id, "sessionId": session_id, "iat": iat // 1000, - "exp": iat // 1000 + config("JWT_EXP_DELTA_SECONDS", cast=int) + TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("JWT_ISSUER"), "aud": f"openreplay:agent" }, diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index a474fcb8d..2ec3fa01f 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -42,7 +42,7 @@ def generate_jwt(id, tenant_id, iat, aud): payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("JWT_ISSUER"), "iat": iat // 1000, "aud": aud diff --git a/api/env.default b/api/env.default index 703cdc887..676feb541 100644 --- a/api/env.default +++ b/api/env.default @@ -18,7 +18,7 @@ change_password_link=/reset-password?invitation=%s&&pass=%s invitation_link=/api/users/invitation?token=%s js_cache_bucket=sessions-assets jwt_algorithm=HS512 -JWT_EXP_DELTA_SECONDS=2592000 +JWT_EXPIRATION=2592000 JWT_ISSUER=openreplay-oss jwt_secret="SET A RANDOM STRING HERE" ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s @@ -48,5 +48,5 @@ SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 -ASSIST_JWT_EXPIRATION=1800 +ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file diff --git a/ee/api/env.default b/ee/api/env.default index 8f0765d92..94037cf1a 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -28,7 +28,7 @@ idp_x509cert= invitation_link=/api/users/invitation?token=%s js_cache_bucket=sessions-assets jwt_algorithm=HS512 -JWT_EXP_DELTA_SECONDS=2592000 +JWT_EXPIRATION=2592000 JWT_ISSUER=openreplay-ee jwt_secret="SET A RANDOM STRING HERE" ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s @@ -66,4 +66,6 @@ EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob -PRESIGNED_URL_EXPIRATION=3600 \ No newline at end of file +PRESIGNED_URL_EXPIRATION=3600 +ASSIST_JWT_EXPIRATION=144000 +ASSIST_JWT_SECRET= \ No newline at end of file From 71fb4e9e83989aabcadb5426a4ad64b7f1f76e81 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 17:19:56 +0200 Subject: [PATCH 19/68] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index a82ff9975..f0e7bfb48 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 4b2826dfd..df0e8bfa6 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -33,7 +33,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes From 3484e128840bbc8b375919c587642c0d498d0b2d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 18:25:08 +0200 Subject: [PATCH 20/68] feat(chalice): changed devtools URL pattern --- api/env.default | 2 +- ee/api/env.default | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/env.default b/api/env.default index 676feb541..2dcafbc8a 100644 --- a/api/env.default +++ b/api/env.default @@ -46,7 +46,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file diff --git a/ee/api/env.default b/ee/api/env.default index 94037cf1a..98c94c9b5 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -65,7 +65,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file From 223d3ea751bdb28dec1054629c6078315e9b9e2e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 19:21:45 +0200 Subject: [PATCH 21/68] feat(chalice): changed update member --- api/chalicelib/core/users.py | 73 +++++++++++++++++++++++++-- api/routers/core_dynamic.py | 2 +- ee/api/chalicelib/core/users.py | 87 +++++++++++++++++++++++++++++++-- ee/api/routers/core_dynamic.py | 4 +- 4 files changed, 153 insertions(+), 13 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 9af070fc5..b8b3e9898 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -252,9 +252,8 @@ def generate_new_api_key(user_id): cur.mogrify( f"""UPDATE public.users SET api_key=generate_api_key(20) - WHERE - users.user_id = %(userId)s - AND deleted_at IS NULL + WHERE users.user_id = %(userId)s + AND deleted_at IS NULL RETURNING api_key;""", {"userId": user_id}) ) @@ -295,6 +294,39 @@ def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_i return {"data": user} +def edit_member(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id): + user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: + admin = get(tenant_id=tenant_id, user_id=editor_id) + if not admin["superAdmin"] and not admin["admin"]: + return {"errors": ["unauthorized"]} + _changes = {} + if editor_id == user_id_to_update: + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email + + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name + + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if len(_changes.keys()) > 0: + update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": get_member(user_id=user_id_to_update, tenant_id=tenant_id)} + return {"data": user} + + def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -342,11 +374,42 @@ def get_by_email_reset(email, reset_token): return helper.dict_to_camel_case(r) +def get_member(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify( + f"""SELECT + users.user_id, + users.email, + users.role, + users.name, + users.created_at, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, + DATE_PART('day',timezone('utc'::text, now()) \ + - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, + basic_authentication.password IS NOT NULL AS joined, + invitation_token + FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + WHERE users.deleted_at IS NULL AND users.user_id=%(user_id)s + ORDER BY name, user_id""", {"user_id": user_id}) + ) + u = helper.dict_to_camel_case(cur.fetchone()) + if u: + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) + if u["invitationToken"]: + u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) + else: + u["invitationLink"] = None + + return u + + def get_members(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( f"""SELECT - users.user_id AS id, + users.user_id, users.email, users.role, users.name, @@ -360,7 +423,7 @@ def get_members(tenant_id): invitation_token FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.deleted_at IS NULL - ORDER BY name, id""" + ORDER BY name, user_id""" ) r = cur.fetchall() if len(r): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 2695a6b09..7bb02461a 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -140,7 +140,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, + return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 661194bbb..f533fa698 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -293,9 +293,8 @@ def generate_new_api_key(user_id): cur.mogrify( f"""UPDATE public.users SET api_key=generate_api_key(20) - WHERE - users.user_id = %(userId)s - AND deleted_at IS NULL + WHERE users.user_id = %(userId)s + AND deleted_at IS NULL RETURNING api_key;""", {"userId": user_id}) ) @@ -344,6 +343,47 @@ def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, edito return {"data": user} +def edit_member(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id): + user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: + admin = get(tenant_id=tenant_id, user_id=editor_id) + if not admin["superAdmin"] and not admin["admin"]: + return {"errors": ["unauthorized"]} + _changes = {} + if editor_id == user_id_to_update: + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + if changes.roleId is not None: + if user["superAdmin"]: + changes.roleId = None + elif changes.roleId != user["roleId"]: + return {"errors": ["cannot change your own role"]} + + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email + + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name + + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if changes.roleId is not None: + _changes["roleId"] = changes.roleId + + if len(_changes.keys()) > 0: + update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": get_member(tenant_id=tenant_id, user_id=user_id_to_update)} + return {"data": user} + + def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -393,12 +433,49 @@ def get_by_email_reset(email, reset_token): return helper.dict_to_camel_case(r) +def get_member(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + f"""SELECT + users.user_id, + users.email, + users.role, + users.name, + users.created_at, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, + DATE_PART('day',timezone('utc'::text, now()) \ + - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, + basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined, + invitation_token, + role_id, + roles.name AS role_name + FROM public.users + LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) + WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL AND users.user_id = %(user_id)s + ORDER BY name, user_id""", + {"tenant_id": tenant_id, "user_id": user_id}) + ) + u = helper.dict_to_camel_case(cur.fetchone()) + if u: + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) + if u["invitationToken"]: + u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) + else: + u["invitationLink"] = None + + return u + + def get_members(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.email, users.role, users.name, @@ -416,7 +493,7 @@ def get_members(tenant_id): LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL - ORDER BY name, id""", + ORDER BY name, user_id""", {"tenant_id": tenant_id}) ) r = cur.fetchall() diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 176896ebb..b3dac897d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -144,8 +144,8 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, - user_id_to_update=memberId) + return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, + user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) From 843969f2ea165cdc0923fa68d0faf78a72dbe50c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 3 Oct 2022 16:06:22 +0200 Subject: [PATCH 22/68] feat(chalice): changed unprocessed endpoints --- ee/api/routers/core_dynamic.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index b3dac897d..f1d4ef6df 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -284,9 +284,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun return {'data': data} -@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) -@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"], +@app.get('/{projectId}/assist/sessions/{sessionId}/dom.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): @@ -307,9 +305,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) -@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"], +@app.get('/{projectId}/assist/sessions/{sessionId}/devtools.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): From db2509456db1ab55cbca5351eaaa002d45b47ca7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 3 Oct 2022 16:09:09 +0200 Subject: [PATCH 23/68] feat(chalice): changed unprocessed endpoints --- api/routers/core_dynamic.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 7bb02461a..5a434c288 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -141,7 +141,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, - user_id_to_update=memberId) + user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) @@ -276,8 +276,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun return {'data': data} -@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"]) -@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"]) +@app.get('/{projectId}/assist/sessions/{sessionId}/dom.mob', tags=["assist"]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} @@ -297,8 +296,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"]) -@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"]) +@app.get('/{projectId}/assist/sessions/{sessionId}/devtools.mob', tags=["assist"]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} @@ -422,7 +420,8 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D @app.post('/{projectId}/notes', tags=["sessions", "notes"]) def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,data=data) + data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id, data=data) if "errors" in data: return data return { From 4a66d3641661037f26ed9ab1ba3bef598c4835a6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 3 Oct 2022 16:12:33 +0200 Subject: [PATCH 24/68] feat(chalice): changed unprocessed endpoints --- api/routers/core_dynamic.py | 4 ++-- ee/api/routers/core_dynamic.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 5a434c288..93664a4ff 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -276,7 +276,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun return {'data': data} -@app.get('/{projectId}/assist/sessions/{sessionId}/dom.mob', tags=["assist"]) +@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} @@ -296,7 +296,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.get('/{projectId}/assist/sessions/{sessionId}/devtools.mob', tags=["assist"]) +@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index f1d4ef6df..1ce471c23 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -284,7 +284,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun return {'data': data} -@app.get('/{projectId}/assist/sessions/{sessionId}/dom.mob', tags=["assist"], +@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): @@ -305,7 +305,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.get('/{projectId}/assist/sessions/{sessionId}/devtools.mob', tags=["assist"], +@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): From 66126924b790517db84788f6f4a53f848b5a4ce3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 3 Oct 2022 16:50:58 +0200 Subject: [PATCH 25/68] feat(chalice): error-tags --- api/chalicelib/core/errors.py | 8 ++++++-- ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 10 ++++++++++ ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql | 10 ++++++++++ scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 10 ++++++++++ scripts/helm/db/init_dbs/postgresql/init_schema.sql | 9 +++++++++ 5 files changed, 45 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index b20853646..85e063f74 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -121,7 +121,8 @@ def get_details(project_id, error_id, user_id, **data): device_partition, country_partition, chart24, - chart30 + chart30, + tags AS custom_tags FROM (SELECT error_id, name, message, @@ -234,7 +235,10 @@ def get_details(project_id, error_id, user_id, **data): WHERE {" AND ".join(pg_sub_query30)}) AS chart_details ON (TRUE) GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); + ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE) + LEFT JOIN (SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) + FROM errors_tags INNER JOIN errors USING(error_id) + WHERE {" AND ".join(pg_basic_query)}) AS raw_tags(tags) ON (TRUE); """ # print("--------------------") diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 3586ff63a..68376168d 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -22,4 +22,14 @@ CREATE TABLE IF NOT EXISTS sessions_notes is_public boolean NOT NULL DEFAULT FALSE ); +CREATE TABLE IF NOT EXISTS errors_tags +( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE +); + +CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 792003dab..02c6e135a 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -497,6 +497,16 @@ $$ CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + CREATE TABLE IF NOT EXISTS errors_tags + ( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + ); + + CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); + IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'platform') THEN diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 4ce5009a3..c27fa3e7a 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -22,4 +22,14 @@ CREATE TABLE IF NOT EXISTS sessions_notes is_public boolean NOT NULL DEFAULT FALSE ); +CREATE TABLE IF NOT EXISTS errors_tags +( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE +); + +CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 9e3a0f924..5cf4ed75e 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -404,6 +404,15 @@ $$ CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + CREATE TABLE errors_tags + ( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + ); + + CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id); -- --- sessions.sql --- CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); From 3a41ca102b9cf4ea61f82d2d39eff4d8036a35a7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 3 Oct 2022 16:59:11 +0200 Subject: [PATCH 26/68] feat(chalice): error-tags --- api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 85e063f74..372de1ae4 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -122,7 +122,7 @@ def get_details(project_id, error_id, user_id, **data): country_partition, chart24, chart30, - tags AS custom_tags + COALESCE(tags,'{{}}')::jsonb AS custom_tags FROM (SELECT error_id, name, message, From ece726eea08d590562ea23f0f1a51f246d70e504 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 4 Oct 2022 13:25:36 +0200 Subject: [PATCH 27/68] feat(chalice): session's notes single tag feat(chalice): session's notes search public only --- api/chalicelib/core/sessions_notes.py | 18 +++++++++------ api/schemas.py | 4 ++-- ee/api/chalicelib/core/sessions_notes.py | 22 +++++++++++-------- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 2 +- .../db/init_dbs/postgresql/init_schema.sql | 2 +- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 2 +- .../db/init_dbs/postgresql/init_schema.sql | 2 +- 7 files changed, 30 insertions(+), 22 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index f0e7bfb48..a586dd75c 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -27,13 +27,17 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: - conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", - "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)"] + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"] extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) + conditions.append( + sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) + if data.shared_only: + conditions.append("sessions_notes.is_public") + else: + conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)") query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes WHERE {" AND ".join(conditions)} @@ -51,8 +55,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) - VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) @@ -66,8 +70,8 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot sub_query = [] if data.message is not None: sub_query.append("message = %(message)s") - if data.tags is not None: - sub_query.append("tags = %(tags)s") + if data.tag is not None and len(data.tag) > 0: + sub_query.append("tag = %(tag)s") if data.is_public is not None: sub_query.append("is_public = %(is_public)s") if data.timestamp is not None: diff --git a/api/schemas.py b/api/schemas.py index db5c0cf82..0b54906a1 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1090,6 +1090,7 @@ class SearchNoteSchema(_PaginatedSchema): sort: str = Field(default="createdAt") order: SortOrderType = Field(default=SortOrderType.desc) tags: Optional[List[str]] = Field(default=[]) + shared_only: bool = Field(default=False) class Config: alias_generator = attribute_to_camel_case @@ -1097,7 +1098,7 @@ class SearchNoteSchema(_PaginatedSchema): class SessionNoteSchema(BaseModel): message: str = Field(..., min_length=2) - tags: List[str] = Field(default=[]) + tag: Optional[str] = Field(default=None) timestamp: int = Field(default=-1) is_public: bool = Field(default=False) @@ -1107,7 +1108,6 @@ class SessionNoteSchema(BaseModel): class SessionUpdateNoteSchema(SessionNoteSchema): message: Optional[str] = Field(default=None, min_length=2) - tags: Optional[List[str]] = Field(default=None) timestamp: Optional[int] = Field(default=None, ge=-1) is_public: Optional[bool] = Field(default=None) diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index df0e8bfa6..dd47fd227 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -28,13 +28,18 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: - conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", - "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)"] + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"] extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) + conditions.append( + sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) + if data.shared_only: + conditions.append("sessions_notes.is_public AND users.tenant_id = %(tenant_id)s") + else: + conditions.append( + "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)") query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes INNER JOIN users USING (user_id) @@ -53,8 +58,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) - VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) @@ -68,8 +73,8 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot sub_query = [] if data.message is not None: sub_query.append("message = %(message)s") - if data.tags is not None: - sub_query.append("tags = %(tags)s") + if data.tag is not None and len(data.tag) > 0: + sub_query.append("tag = %(tag)s") if data.is_public is not None: sub_query.append("is_public = %(is_public)s") if data.timestamp is not None: @@ -99,8 +104,7 @@ def delete(tenant_id, user_id, project_id, note_id): cur.execute( cur.mogrify("""\ UPDATE public.sessions_notes - SET - deleted_at = timezone('utc'::text, now()) + SET deleted_at = timezone('utc'::text, now()) WHERE note_id = %(note_id)s AND project_id = %(project_id)s\ diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 68376168d..1fd27342f 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS sessions_notes created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, - tags text[] NOT NULL DEFAULT '{}', + tag text NULL, session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, timestamp integer NOT NULL DEFAULT -1, diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 02c6e135a..13f2db5cf 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -875,7 +875,7 @@ $$ created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, - tags text[] NOT NULL DEFAULT '{}', + tag text NULL, session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, timestamp integer NOT NULL DEFAULT -1, diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index c27fa3e7a..5abda1259 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS sessions_notes created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, - tags text[] NOT NULL DEFAULT '{}', + tag text NULL, session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, timestamp integer NOT NULL DEFAULT -1, diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 5cf4ed75e..8c3c07d24 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1016,7 +1016,7 @@ $$ created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, - tags text[] NOT NULL DEFAULT '{}', + tag text NULL, session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, timestamp integer NOT NULL DEFAULT -1, From 146234da99e763af488d55bbd94c162b5786b154 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 4 Oct 2022 13:55:35 +0200 Subject: [PATCH 28/68] feat(chalice): session's notes search mine only --- api/chalicelib/core/sessions_notes.py | 2 ++ api/schemas.py | 1 + ee/api/chalicelib/core/sessions_notes.py | 2 ++ 3 files changed, 5 insertions(+) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index a586dd75c..ecf2ddd3d 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -36,6 +36,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = sessions._multiple_values(data.tags, value_key=k) if data.shared_only: conditions.append("sessions_notes.is_public") + elif data.mine_only: + conditions.append("sessions_notes.user_id = %(user_id)s") else: conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)") query = cur.mogrify(f"""SELECT sessions_notes.* diff --git a/api/schemas.py b/api/schemas.py index 0b54906a1..ce6b00439 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1091,6 +1091,7 @@ class SearchNoteSchema(_PaginatedSchema): order: SortOrderType = Field(default=SortOrderType.desc) tags: Optional[List[str]] = Field(default=[]) shared_only: bool = Field(default=False) + mine_only: bool = Field(default=False) class Config: alias_generator = attribute_to_camel_case diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index dd47fd227..f5ac05722 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -37,6 +37,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = sessions._multiple_values(data.tags, value_key=k) if data.shared_only: conditions.append("sessions_notes.is_public AND users.tenant_id = %(tenant_id)s") + elif data.mine_only: + conditions.append("sessions_notes.user_id = %(user_id)s") else: conditions.append( "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)") From 2492867e9c3c6064f27e412125f9ab48d46424f3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 5 Oct 2022 20:05:11 +0200 Subject: [PATCH 29/68] feat(chalice): error tags feat(DB): new table for issues feat(DB): new columns for events --- ee/api/chalicelib/core/__init__.py | 4 +++ ee/api/chalicelib/core/errors.py | 8 ++++-- ee/api/env.default | 1 + .../db/init_dbs/clickhouse/1.8.2/1.8.2.sql | 28 +++++++++++++++++++ .../clickhouse/create/init_schema.sql | 24 +++++++++++++++- 5 files changed, 62 insertions(+), 3 deletions(-) create mode 100644 ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql diff --git a/ee/api/chalicelib/core/__init__.py b/ee/api/chalicelib/core/__init__.py index 550327129..41c437c93 100644 --- a/ee/api/chalicelib/core/__init__.py +++ b/ee/api/chalicelib/core/__init__.py @@ -18,7 +18,11 @@ else: if config("EXP_ERRORS_SEARCH", cast=bool, default=False): print(">>> Using experimental error search") + from . import errors as errors_legacy from . import errors_exp as errors + + if config("EXP_ERRORS_GET", cast=bool, default=False): + print(">>> Using experimental error get") else: from . import errors as errors diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 1db619b15..a1db0c798 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -128,7 +128,8 @@ def get_details(project_id, error_id, user_id, **data): device_partition, country_partition, chart24, - chart30 + chart30, + COALESCE(tags,'{{}}')::jsonb AS custom_tags FROM (SELECT error_id, name, message, @@ -241,7 +242,10 @@ def get_details(project_id, error_id, user_id, **data): WHERE {" AND ".join(pg_sub_query30)}) AS chart_details ON (TRUE) GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); + ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE) + LEFT JOIN (SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) + FROM errors_tags INNER JOIN errors USING(error_id) + WHERE {" AND ".join(pg_basic_query)}) AS raw_tags(tags) ON (TRUE); """ # print("--------------------") diff --git a/ee/api/env.default b/ee/api/env.default index 98c94c9b5..673454853 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -55,6 +55,7 @@ FS_DIR=/mnt/efs EXP_SESSIONS_SEARCH=false EXP_AUTOCOMPLETE=false EXP_ERRORS_SEARCH=false +EXP_ERRORS_GET=false EXP_METRICS=true EXP_7D_MV=false EXP_ALERTS=false diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql new file mode 100644 index 000000000..65acd9fb5 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql @@ -0,0 +1,28 @@ +ALTER TABLE experimental.events + ADD COLUMN IF NOT EXISTS error_tags_keys Array(String); +ALTER TABLE experimental.events + ADD COLUMN IF NOT EXISTS error_tags_values Array(Nullable(String)); + +ALTER TABLE experimental.events + ADD COLUMN IF NOT EXISTS issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)); +ALTER TABLE experimental.events + ADD COLUMN IF NOT EXISTS issue_id Nullable(String); +ALTER TABLE experimental.events + MODIFY COLUMN event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9); + + +CREATE TABLE IF NOT EXISTS experimental.issues +( + project_id UInt16, + issue_id String, + type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), + context_string text NOT NULL, + context_keys Array(String), + context_values Array(Nullable(String)), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id, issue_id, type) + TTL _timestamp + INTERVAL 3 MONTH; + +-- TODO: find a way to update materialized views; or drop and re-create them diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql index 01f3a9968..6ebcfc319 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql @@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS experimental.events ( session_id UInt64, project_id UInt16, - event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8), + event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9), datetime DateTime, label Nullable(String), hesitation_time Nullable(UInt32), @@ -78,6 +78,10 @@ CREATE TABLE IF NOT EXISTS experimental.events success Nullable(UInt8), request_body Nullable(String), response_body Nullable(String), + issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)), + issue_id Nullable(String), + error_tags_keys Array(String), + error_tags_values Array(Nullable(String)), message_id UInt64 DEFAULT 0, _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) @@ -192,6 +196,20 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors ORDER BY (project_id, user_id, error_id) TTL _timestamp + INTERVAL 3 MONTH; +CREATE TABLE IF NOT EXISTS experimental.issues +( + project_id UInt16, + issue_id String, + type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), + context_string text NOT NULL, + context_keys Array(String), + context_values Array(Nullable(String)), + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(_timestamp) + ORDER BY (project_id, issue_id, type) + TTL _timestamp + INTERVAL 3 MONTH; + CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMM(datetime) @@ -256,6 +274,10 @@ SELECT session_id, success, request_body, response_body, + issue_type, + issue_id, + error_tags_keys, + error_tags_values, message_id, _timestamp FROM experimental.events From e8a288ba8aa02f9d573ba4de8a4efe922ce7d20a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 16:04:07 +0200 Subject: [PATCH 30/68] feat(chalice): refactored&cleaned endpoints feat(chalice): notes slack notification --- api/chalicelib/core/assist.py | 16 +++--- api/chalicelib/core/collaboration_slack.py | 63 +++++++++++++++----- api/chalicelib/core/sessions_notes.py | 66 ++++++++++++++++++--- api/chalicelib/core/slack.py | 11 ---- api/routers/core.py | 53 +++-------------- api/routers/core_dynamic.py | 24 ++++---- ee/api/chalicelib/core/sessions_notes.py | 67 +++++++++++++++++++--- ee/api/routers/core_dynamic.py | 27 ++++----- 8 files changed, 204 insertions(+), 123 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index ebf1b7ab8..0a5e5e59d 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -61,10 +61,10 @@ def __get_live_sessions_ws(project_id, data): return {"total": 0, "sessions": []} live_peers = results.json().get("data", []) except requests.exceptions.Timeout: - print("Timeout getting Assist response") + print("!! Timeout getting Assist response") live_peers = {"total": 0, "sessions": []} except Exception as e: - print("issue getting Live-Assist response") + print("!! Issue getting Live-Assist response") print(str(e)) print("expected JSON, received:") try: @@ -116,7 +116,7 @@ def get_live_session_by_id(project_id, session_id): print("!! Timeout getting Assist response") return None except Exception as e: - print("issue getting Assist response") + print("!! Issue getting Assist response") print(str(e)) print("expected JSON, received:") try: @@ -139,10 +139,10 @@ def is_live(project_id, session_id, project_key=None): return False results = results.json().get("data") except requests.exceptions.Timeout: - print("Timeout getting Assist response") + print("!! Timeout getting Assist response") return False except Exception as e: - print("issue getting Assist response") + print("!! Issue getting Assist response") print(str(e)) print("expected JSON, received:") try: @@ -168,10 +168,10 @@ def autocomplete(project_id, q: str, key: str = None): return {"errors": [f"Something went wrong wile calling assist:{results.text}"]} results = results.json().get("data", []) except requests.exceptions.Timeout: - print("Timeout getting Assist response") + print("!! Timeout getting Assist response") return {"errors": ["Assist request timeout"]} except Exception as e: - print("issue getting Assist response") + print("!! Issue getting Assist response") print(str(e)) print("expected JSON, received:") try: @@ -250,7 +250,7 @@ def session_exists(project_id, session_id): print("!! Timeout getting Assist response") return False except Exception as e: - print("issue getting Assist response") + print("!! Issue getting Assist response") print(str(e)) print("expected JSON, received:") try: diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index bd0ae7f21..15f090f5d 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -35,24 +35,57 @@ class Slack: return True @classmethod - def send_text(cls, tenant_id, webhook_id, text, **args): + def send_text_attachments(cls, tenant_id, webhook_id, text, **args): integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id) if integration is None: return {"errors": ["slack integration not found"]} - print("====> sending slack notification") - r = requests.post( - url=integration["endpoint"], - json={ - "attachments": [ - { - "text": text, - "ts": datetime.now().timestamp(), - **args - } - ] - }) - print(r) - print(r.text) + try: + r = requests.post( + url=integration["endpoint"], + json={ + "attachments": [ + { + "text": text, + "ts": datetime.now().timestamp(), + **args + } + ] + }, + timeout=5) + if r.status_code != 200: + print(f"!! issue sending slack text attachments; webhookId:{webhook_id} code:{r.status_code}") + print(r.text) + return None + except requests.exceptions.Timeout: + print(f"!! Timeout sending slack text attachments webhookId:{webhook_id}") + return None + except Exception as e: + print(f"!! Issue sending slack text attachments webhookId:{webhook_id}") + print(str(e)) + return None + return {"data": r.text} + + @classmethod + def send_raw(cls, tenant_id, webhook_id, body): + integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id) + if integration is None: + return {"errors": ["slack integration not found"]} + try: + r = requests.post( + url=integration["endpoint"], + json=body, + timeout=5) + if r.status_code != 200: + print(f"!! issue sending slack raw; webhookId:{webhook_id} code:{r.status_code}") + print(r.text) + return None + except requests.exceptions.Timeout: + print(f"!! Timeout sending slack raw webhookId:{webhook_id}") + return None + except Exception as e: + print(f"!! Issue sending slack raw webhookId:{webhook_id}") + print(str(e)) + return None return {"data": r.text} @classmethod diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index ecf2ddd3d..420cbdf11 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,34 @@ +from urllib.parse import urljoin + +from decouple import config + import schemas from chalicelib.core import sessions +from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC +def get_note(tenant_id, project_id, user_id, note_id, share=None): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name + {",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""} + FROM sessions_notes INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.note_id = %(note_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, + "note_id": note_id, "share": share}) + + cur.execute(query=query) + row = cur.fetchone() + row = helper.dict_to_camel_case(row) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + def get_session_notes(tenant_id, project_id, session_id, user_id): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* @@ -80,8 +105,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot sub_query.append("timestamp = %(timestamp)s") with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify(f"""\ - UPDATE public.sessions_notes + cur.mogrify(f"""UPDATE public.sessions_notes SET {" ,".join(sub_query)} WHERE @@ -101,14 +125,42 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot def delete(tenant_id, user_id, project_id, note_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - UPDATE public.sessions_notes + cur.mogrify(""" UPDATE public.sessions_notes SET deleted_at = timezone('utc'::text, now()) - WHERE - note_id = %(note_id)s - AND project_id = %(project_id)s\ + WHERE note_id = %(note_id)s + AND project_id = %(project_id)s AND user_id = %(user_id)s AND deleted_at ISNULL;""", {"project_id": project_id, "user_id": user_id, "note_id": note_id}) ) return {"data": {"state": "success"}} + + +def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id): + note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) + if note is None: + return {"errors": ["Note not found"]} + session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/sessions/{note['sessionId']}") + title = f"<{session_url}|Note for session {note['sessionId']}>" + + blocks = [{"type": "section", + "fields": [{"type": "mrkdwn", + "text": title}]}, + {"type": "section", + "fields": [{"type": "plain_text", + "text": note["message"]}]}] + if note["tag"]: + blocks.append({"type": "context", + "elements": [{"type": "plain_text", + "text": f"Tag: *{note['tag']}*"}]}) + bottom = f"Created by {note['creatorName'].capitalize()}" + if user_id != note["userId"]: + bottom += f"\nSent by {note['shareName']}: " + blocks.append({"type": "context", + "elements": [{"type": "plain_text", + "text": bottom}]}) + return Slack.send_raw( + tenant_id=tenant_id, + webhook_id=webhook_id, + body={"blocks": blocks} + ) diff --git a/api/chalicelib/core/slack.py b/api/chalicelib/core/slack.py index 0bd715f5e..76bf40163 100644 --- a/api/chalicelib/core/slack.py +++ b/api/chalicelib/core/slack.py @@ -4,17 +4,6 @@ from decouple import config from chalicelib.core.collaboration_slack import Slack -def send(notification, destination): - if notification is None: - return - return Slack.send_text(tenant_id=notification["tenantId"], - webhook_id=destination, - text=notification["description"] \ - + f"\n<{config('SITE_URL')}{notification['buttonUrl']}|{notification['buttonText']}>", - title=notification["title"], - title_link=notification["buttonUrl"], ) - - def send_batch(notifications_list): if notifications_list is None or len(notifications_list) == 0: return diff --git a/api/routers/core.py b/api/routers/core.py index 18e459dd2..935eac873 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,22 +1,19 @@ from typing import Union from decouple import config -from fastapi import Depends, Body, BackgroundTasks, HTTPException -from fastapi.responses import FileResponse +from fastapi import Depends, Body, HTTPException from starlette import status import schemas from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ alerts, funnels, issues, integrations_manager, metadata, \ log_tool_elasticsearch, log_tool_datadog, \ - log_tool_stackdriver, reset_password, sessions_favorite, \ - log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \ + log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \ log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ - assist, heatmaps, mobile, signup, tenants, errors_viewed, boarding, notifications, webhook, users, \ - custom_metrics, saved_search, integrations_global, sessions_viewed, errors_favorite + assist, mobile, signup, tenants, boarding, notifications, webhook, users, \ + custom_metrics, saved_search, integrations_global from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import email_helper, helper, captcha -from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils import helper, captcha from or_dependencies import OR_context from routers.base import get_routers @@ -52,7 +49,6 @@ def login(data: schemas.UserLoginSchema = Body(...)): @app.post('/{projectId}/sessions/search', tags=["sessions"]) -@app.post('/{projectId}/sessions/search2', tags=["sessions"]) def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id) @@ -100,7 +96,6 @@ def get_integrations_status(projectId: int, context: schemas.CurrentContext = De @app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) -@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str, data: schemas.IntegrationNotificationSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -129,7 +124,6 @@ def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_cont @app.post('/{projectId}/integrations/sentry', tags=["integrations"]) -@app.put('/{projectId}/integrations/sentry', tags=["integrations"]) def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -156,7 +150,6 @@ def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/datadog', tags=["integrations"]) -@app.put('/{projectId}/integrations/datadog', tags=["integrations"]) def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -178,7 +171,6 @@ def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) -@app.put('/{projectId}/integrations/stackdriver', tags=["integrations"]) def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -200,7 +192,6 @@ def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_co @app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) -@app.put('/{projectId}/integrations/newrelic', tags=["integrations"]) def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -222,7 +213,6 @@ def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) -@app.put('/{projectId}/integrations/rollbar', tags=["integrations"]) def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -250,7 +240,6 @@ def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) -@app.put('/{projectId}/integrations/bugsnag', tags=["integrations"]) def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -280,7 +269,6 @@ def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_ @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) -@app.put('/{projectId}/integrations/cloudwatch', tags=["integrations"]) def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -308,7 +296,6 @@ def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body( @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) -@app.put('/{projectId}/integrations/elasticsearch', tags=["integrations"]) def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { @@ -331,7 +318,6 @@ def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_c @app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) -@app.put('/{projectId}/integrations/sumologic', tags=["integrations"]) def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -372,7 +358,6 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c @app.post('/integrations/jira', tags=["integrations"]) -@app.put('/integrations/jira', tags=["integrations"]) def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if not data.url.endswith('atlassian.net'): @@ -386,7 +371,6 @@ def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), @app.post('/integrations/github', tags=["integrations"]) -@app.put('/integrations/github', tags=["integrations"]) def add_edit_github(data: schemas.GithubSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, @@ -461,7 +445,6 @@ def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depend @app.post('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) -@app.put('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId, data: schemas.AssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -484,14 +467,12 @@ def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_contex @app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) -@app.put('/{projectId}/gdpr', tags=["projects", "gdpr"]) def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data.dict())} @public_app.post('/password/reset-link', tags=["reset password"]) -@public_app.put('/password/reset-link', tags=["reset password"]) def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)): if len(data.email) < 5: return {"errors": ["please provide a valid email address"]} @@ -504,21 +485,18 @@ def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_co @app.post('/{projectId}/metadata/list', tags=["metadata"]) -@app.put('/{projectId}/metadata/list', tags=["metadata"]) def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) @app.post('/{projectId}/metadata', tags=["metadata"]) -@app.put('/{projectId}/metadata', tags=["metadata"]) def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) @app.post('/{projectId}/metadata/{index}', tags=["metadata"]) -@app.put('/{projectId}/metadata/{index}', tags=["metadata"]) def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index, @@ -552,7 +530,6 @@ def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends @app.post('/{projectId}/sample_rate', tags=["projects"]) -@app.put('/{projectId}/sample_rate', tags=["projects"]) def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())} @@ -574,7 +551,6 @@ def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/{projectId}/alerts', tags=["alerts"]) -@app.put('/{projectId}/alerts', tags=["alerts"]) def create_alert(projectId: int, data: schemas.AlertSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return alerts.create(projectId, data) @@ -597,7 +573,6 @@ def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = De @app.post('/{projectId}/alerts/{alertId}', tags=["alerts"]) -@app.put('/{projectId}/alerts/{alertId}', tags=["alerts"]) def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return alerts.update(alertId, data) @@ -609,7 +584,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = @app.post('/{projectId}/funnels', tags=["funnels"]) -@app.put('/{projectId}/funnels', tags=["funnels"]) def add_funnel(projectId: int, data: schemas.FunnelSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return funnels.create(project_id=projectId, @@ -653,7 +627,6 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s @app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) -@app.put('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, @@ -668,7 +641,6 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat @app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) -@app.put('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, @@ -685,7 +657,6 @@ def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, s @app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) -@app.put('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, @@ -705,7 +676,6 @@ def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = Non @app.post('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"]) -@app.put('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"]) def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str, data: schemas.FunnelSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -729,7 +699,6 @@ def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = @app.post('/{projectId}/funnels/{funnelId}', tags=["funnels"]) -@app.put('/{projectId}/funnels/{funnelId}', tags=["funnels"]) def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return funnels.update(funnel_id=funnelId, @@ -762,7 +731,6 @@ def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_contex @app.post('/config/weekly_report', tags=["weekly report config"]) -@app.put('/config/weekly_report', tags=["weekly report config"]) def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)} @@ -797,21 +765,19 @@ def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayload return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)} -@public_app.put('/signup', tags=['signup']) @public_app.post('/signup', tags=['signup']) +@public_app.put('/signup', tags=['signup']) def signup_handler(data: schemas.UserSignupSchema = Body(...)): return signup.create_step1(data) @app.post('/projects', tags=['projects']) -@app.put('/projects', tags=['projects']) def create_project(data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) @app.post('/projects/{projectId}', tags=['projects']) -@app.put('/projects/{projectId}', tags=['projects']) def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId) @@ -829,8 +795,8 @@ def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_conte } -@app.put('/client', tags=['client']) @app.post('/client', tags=['client']) +@app.put('/client', tags=['client']) def edit_client(data: schemas.UpdateTenantSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data) @@ -852,7 +818,6 @@ def view_notifications(notificationId: int, context: schemas.CurrentContext = De @app.post('/notifications/view', tags=['notifications']) -@app.put('/notifications/view', tags=['notifications']) def batch_view_notifications(data: schemas.NotificationsViewSchema, context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=data.ids, @@ -903,7 +868,6 @@ def delete_slack_integration(integrationId: int, context: schemas.CurrentContext @app.post('/webhooks', tags=["webhooks"]) -@app.put('/webhooks', tags=["webhooks"]) def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)} @@ -940,7 +904,6 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context @app.post('/account/password', tags=["account"]) -@app.put('/account/password', tags=["account"]) def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.change_password(email=context.email, old_password=data.old_password, @@ -949,7 +912,6 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), @app.post('/{projectId}/saved_search', tags=["savedSearch"]) -@app.put('/{projectId}/saved_search', tags=["savedSearch"]) def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return saved_search.create(project_id=projectId, user_id=context.user_id, data=data) @@ -966,7 +928,6 @@ def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentCon @app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) -@app.put('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)} diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 93664a4ff..f8c602faf 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -46,7 +46,6 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) -@app.put('/account', tags=["account"]) def edit_account(data: schemas.EditUserSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, @@ -70,8 +69,8 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con return {"data": data} -@app.put('/integrations/slack', tags=['integrations']) @app.post('/integrations/slack', tags=['integrations']) +@app.put('/integrations/slack', tags=['integrations']) def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name) if n is None: @@ -81,7 +80,6 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte return {"data": n} -@app.put('/integrations/slack/{integrationId}', tags=['integrations']) @app.post('/integrations/slack/{integrationId}', tags=['integrations']) def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -98,7 +96,6 @@ def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = B @app.post('/client/members', tags=["client"]) -@app.put('/client/members', tags=["client"]) def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), @@ -123,7 +120,6 @@ def process_invitation_link(token: str): @public_app.post('/password/reset', tags=["users"]) -@public_app.put('/password/reset', tags=["users"]) def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: return {"errors": ["please provide a valid invitation & pass"]} @@ -136,7 +132,6 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = return users.set_password_invitation(new_password=data.password, user_id=user["userId"]) -@app.put('/client/members/{memberId}', tags=["client"]) @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): @@ -358,9 +353,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) @app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, @@ -374,9 +367,10 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) -@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): + if not sessions.session_exists(project_id=projectId, session_id=sessionId): + return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, user_id=context.user_id, data=data) if "errors" in data.keys(): @@ -398,7 +392,6 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) -@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, @@ -417,6 +410,13 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data +@app.post('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) +def share_note_to_slack(projectId: int, noteId: int, webhookId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, webhook_id=webhookId) + + @app.post('/{projectId}/notes', tags=["sessions", "notes"]) def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -424,6 +424,4 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), user_id=context.user_id, data=data) if "errors" in data: return data - return { - 'data': data - } + return {'data': data} diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index f5ac05722..ce0420023 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,35 @@ +from urllib.parse import urljoin + +from decouple import config + import schemas from chalicelib.core import sessions +from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC +def get_note(tenant_id, project_id, user_id, note_id, share=None): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name + {",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s) AS share_name" if share else ""} + FROM sessions_notes INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.note_id = %(note_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s);""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, + "note_id": note_id, "share": share}) + + cur.execute(query=query) + row = cur.fetchone() + row = helper.dict_to_camel_case(row) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + def get_session_notes(tenant_id, project_id, session_id, user_id): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* @@ -83,8 +109,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot sub_query.append("timestamp = %(timestamp)s") with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify(f"""\ - UPDATE public.sessions_notes + cur.mogrify(f"""UPDATE public.sessions_notes SET {" ,".join(sub_query)} WHERE @@ -104,14 +129,42 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot def delete(tenant_id, user_id, project_id, note_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - UPDATE public.sessions_notes + cur.mogrify(""" UPDATE public.sessions_notes SET deleted_at = timezone('utc'::text, now()) - WHERE - note_id = %(note_id)s - AND project_id = %(project_id)s\ + WHERE note_id = %(note_id)s + AND project_id = %(project_id)s AND user_id = %(user_id)s AND deleted_at ISNULL;""", {"project_id": project_id, "user_id": user_id, "note_id": note_id}) ) return {"data": {"state": "success"}} + + +def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id): + note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) + if note is None: + return {"errors": ["Note not found"]} + session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/sessions/{note['sessionId']}") + title = f"<{session_url}|Note for session {note['sessionId']}>" + + blocks = [{"type": "section", + "fields": [{"type": "mrkdwn", + "text": title}]}, + {"type": "section", + "fields": [{"type": "plain_text", + "text": note["message"]}]}] + if note["tag"]: + blocks.append({"type": "context", + "elements": [{"type": "plain_text", + "text": f"Tag: *{note['tag']}*"}]}) + bottom = f"Created by {note['creatorName'].capitalize()}" + if user_id != note["userId"]: + bottom += f"\nSent by {note['shareName']}: " + blocks.append({"type": "context", + "elements": [{"type": "plain_text", + "text": bottom}]}) + return Slack.send_raw( + tenant_id=tenant_id, + webhook_id=webhook_id, + body={"blocks": blocks} + ) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 1ce471c23..9734a9e2c 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -50,7 +50,6 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) -@app.put('/account', tags=["account"]) def edit_account(data: schemas_ee.EditUserSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, @@ -74,8 +73,8 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con return {"data": data} -@app.put('/integrations/slack', tags=['integrations']) @app.post('/integrations/slack', tags=['integrations']) +@app.put('/integrations/slack', tags=['integrations']) def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name) if n is None: @@ -85,7 +84,6 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte return {"data": n} -@app.put('/integrations/slack/{integrationId}', tags=['integrations']) @app.post('/integrations/slack/{integrationId}', tags=['integrations']) def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -102,7 +100,6 @@ def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = B @app.post('/client/members', tags=["client"]) -@app.put('/client/members', tags=["client"]) def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), @@ -127,7 +124,6 @@ def process_invitation_link(token: str): @public_app.post('/password/reset', tags=["users"]) -@public_app.put('/password/reset', tags=["users"]) def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: return {"errors": ["please provide a valid invitation & pass"]} @@ -140,7 +136,6 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = return users.set_password_invitation(new_password=data.password, user_id=user["userId"], tenant_id=user["tenantId"]) -@app.put('/client/members/{memberId}', tags=["client"]) @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): @@ -375,12 +370,8 @@ def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) -@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[OR_scope(Permissions.session_replay)]) @app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) -@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[OR_scope(Permissions.session_replay)]) def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, @@ -395,10 +386,10 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) -@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): + if not sessions.session_exists(project_id=projectId, session_id=sessionId): + return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, user_id=context.user_id, data=data) if "errors" in data.keys(): @@ -422,7 +413,6 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) -@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, @@ -442,6 +432,13 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data +@app.post('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) +def share_note_to_slack(projectId: int, noteId: int, webhookId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, webhook_id=webhookId) + + @app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -449,6 +446,4 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), user_id=context.user_id) if "errors" in data: return data - return { - 'data': data - } + return {'data': data} From 766d76202d55eb52ba671da78e6d0c156f9f8406 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 16:20:11 +0200 Subject: [PATCH 31/68] feat(chalice): changed share note to slack endpoint --- api/routers/core_dynamic.py | 2 +- ee/api/routers/core_dynamic.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index f8c602faf..8e127dc29 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -410,7 +410,7 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.post('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) +@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) def share_note_to_slack(projectId: int, noteId: int, webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 9734a9e2c..facbaedd1 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -432,7 +432,7 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.post('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) +@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) def share_note_to_slack(projectId: int, noteId: int, webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, From 7dd902987bd251f0e261c52833fb5232b89fd557 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 17:42:33 +0200 Subject: [PATCH 32/68] feat(chalice): get affected users by errors for the past 30days feat(chalice): get affected sessions by errors for the past 30days --- api/chalicelib/core/errors.py | 9 ++++++++- ee/api/chalicelib/core/errors.py | 15 +++++++++++---- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 372de1ae4..059e96640 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -83,6 +83,13 @@ def __process_tags(row): def get_details(project_id, error_id, user_id, **data): pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") pg_sub_query24.append("error_id = %(error_id)s") + pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", + endTime_arg_name="endDate30", project_key="errors.project_id") + pg_sub_query30_err.append("sessions.project_id = %(project_id)s") + pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_err.append("error_id = %(error_id)s") + pg_sub_query30_err.append("source ='js_exception'") pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30") pg_sub_query30.append("error_id = %(error_id)s") pg_basic_query = __get_basic_constraints(time_constraint=False) @@ -131,7 +138,7 @@ def get_details(project_id, error_id, user_id, **data): FROM public.errors INNER JOIN events.errors AS s_errors USING (error_id) INNER JOIN public.sessions USING (session_id) - WHERE error_id = %(error_id)s + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY error_id, name, message) AS details INNER JOIN (SELECT error_id, MAX(timestamp) AS last_occurrence, diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index a1db0c798..a629926ab 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -90,6 +90,13 @@ def __process_tags(row): def get_details(project_id, error_id, user_id, **data): pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") pg_sub_query24.append("error_id = %(error_id)s") + pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", + endTime_arg_name="endDate30",project_key="errors.project_id") + pg_sub_query30_err.append("sessions.project_id = %(project_id)s") + pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_err.append("error_id = %(error_id)s") + pg_sub_query30_err.append("source ='js_exception'") pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30") pg_sub_query30.append("error_id = %(error_id)s") pg_basic_query = __get_basic_constraints(time_constraint=False) @@ -138,7 +145,7 @@ def get_details(project_id, error_id, user_id, **data): FROM public.errors INNER JOIN events.errors AS s_errors USING (error_id) INNER JOIN public.sessions USING (session_id) - WHERE error_id = %(error_id)s + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY error_id, name, message) AS details INNER JOIN (SELECT error_id, MAX(timestamp) AS last_occurrence, @@ -248,9 +255,9 @@ def get_details(project_id, error_id, user_id, **data): WHERE {" AND ".join(pg_basic_query)}) AS raw_tags(tags) ON (TRUE); """ - # print("--------------------") - # print(cur.mogrify(main_pg_query, params)) - # print("--------------------") + print("--------------------") + print(cur.mogrify(main_pg_query, params)) + print("--------------------") cur.execute(cur.mogrify(main_pg_query, params)) row = cur.fetchone() if row is None: From b3ad9ad5eb7a15d05ac593f409e02aa27a9ef1cd Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 19:08:05 +0200 Subject: [PATCH 33/68] feat(chalice): get affected browsers by errors for the past 30days feat(chalice): get affected browsers-versions by errors for the past 30days feat(chalice): get affected os by errors for the past 30days feat(chalice): get affected os-versions by errors for the past 30days feat(chalice): get affected countries by errors for the past 30days --- api/chalicelib/core/errors.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 059e96640..3db72e4b1 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -83,6 +83,12 @@ def __process_tags(row): def get_details(project_id, error_id, user_id, **data): pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") pg_sub_query24.append("error_id = %(error_id)s") + pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, + startTime_arg_name="startDate30", + endTime_arg_name="endDate30", project_key="sessions.project_id") + pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_session.append("error_id = %(error_id)s") pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", endTime_arg_name="endDate30", project_key="errors.project_id") pg_sub_query30_err.append("sessions.project_id = %(project_id)s") @@ -165,14 +171,14 @@ def get_details(project_id, error_id, user_id, **data): COUNT(session_id) AS count FROM events.errors INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_browser ORDER BY count DESC) AS count_per_browser_query INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition FROM (SELECT user_browser_version AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_browser = count_per_browser_query.name GROUP BY user_browser_version ORDER BY count DESC) AS version_details @@ -182,13 +188,13 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_os AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_os ORDER BY count DESC) AS count_per_os_details INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_os = count_per_os_details.name GROUP BY user_os_version ORDER BY count DESC) AS count_per_version_details @@ -199,7 +205,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_device_type AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_device_type ORDER BY count DESC) AS count_per_device_details INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition @@ -209,7 +215,7 @@ def get_details(project_id, error_id, user_id, **data): ELSE user_device END AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_device_type = count_per_device_details.name GROUP BY user_device ORDER BY count DESC) AS count_per_device_v_details @@ -219,7 +225,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_country AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_country ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE) INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24 From b7a757b9cde0a0e227874a10c468c958f1d5e8f8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 19:08:56 +0200 Subject: [PATCH 34/68] feat(chalice): get affected browsers by errors for the past 30days feat(chalice): get affected browsers-versions by errors for the past 30days feat(chalice): get affected os by errors for the past 30days feat(chalice): get affected os-versions by errors for the past 30days feat(chalice): get affected countries by errors for the past 30days --- ee/api/chalicelib/core/errors.py | 19 ++++++++++++------- ee/api/chalicelib/core/metrics.py | 4 ++-- ee/api/chalicelib/core/resources.py | 6 +++--- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index a629926ab..14a20e61f 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -90,6 +90,11 @@ def __process_tags(row): def get_details(project_id, error_id, user_id, **data): pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") pg_sub_query24.append("error_id = %(error_id)s") + pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", + endTime_arg_name="endDate30",project_key="sessions.project_id") + pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_session.append("error_id = %(error_id)s") pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", endTime_arg_name="endDate30",project_key="errors.project_id") pg_sub_query30_err.append("sessions.project_id = %(project_id)s") @@ -172,14 +177,14 @@ def get_details(project_id, error_id, user_id, **data): COUNT(session_id) AS count FROM events.errors INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_browser ORDER BY count DESC) AS count_per_browser_query INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition FROM (SELECT user_browser_version AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_browser = count_per_browser_query.name GROUP BY user_browser_version ORDER BY count DESC) AS version_details @@ -189,13 +194,13 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_os AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_os ORDER BY count DESC) AS count_per_os_details INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_os = count_per_os_details.name GROUP BY user_os_version ORDER BY count DESC) AS count_per_version_details @@ -206,7 +211,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_device_type AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_device_type ORDER BY count DESC) AS count_per_device_details INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition @@ -216,7 +221,7 @@ def get_details(project_id, error_id, user_id, **data): ELSE user_device END AS version, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} AND sessions.user_device_type = count_per_device_details.name GROUP BY user_device ORDER BY count DESC) AS count_per_device_v_details @@ -226,7 +231,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_country AS name, COUNT(session_id) AS count FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_basic_query)} + WHERE {" AND ".join(pg_sub_query30_session)} GROUP BY user_country ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE) INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24 diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 3861f48aa..452566194 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -15,7 +15,7 @@ def __get_basic_constraints(table_name=None, time_constraint=True, round_start=F table_name += "." else: table_name = "" - ch_sub_query = [f"{table_name}{identifier} =toUInt32(%({identifier})s)"] + ch_sub_query = [f"{table_name}{identifier} =toUInt16(%({identifier})s)"] if time_constraint: if round_start: ch_sub_query.append( @@ -2012,7 +2012,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1) ch_sub_query = __get_basic_constraints(table_name="resources", round_start=True, data=args) ch_sub_query.append("resources.success = 0") ch_sub_query.append("resources.type IN ('fetch','script')") - sch_sub_query = ["rs.project_id =toUInt32(%(project_id)s)", "rs.type IN ('fetch','script')"] + sch_sub_query = ["rs.project_id =toUInt16(%(project_id)s)", "rs.type IN ('fetch','script')"] meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition # sch_sub_query += meta_condition diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py index e5d7ee126..f2dbe6786 100644 --- a/ee/api/chalicelib/core/resources.py +++ b/ee/api/chalicelib/core/resources.py @@ -15,7 +15,7 @@ def get_by_session_id(session_id, project_id, start_ts, duration): encoded_body_size,decoded_body_size,success, if(success, 200, 400) AS status FROM {exp_ch_helper.get_main_resources_table(start_ts)} - WHERE session_id = toUInt64(%(session_id)s) + WHERE session_id = toUInt16(%(session_id)s) AND project_id = toUInt16(%(project_id)s) AND datetime >= toDateTime(%(res_start_ts)s / 1000) AND datetime <= toDateTime(%(res_end_ts)s / 1000);""" @@ -25,8 +25,8 @@ def get_by_session_id(session_id, project_id, start_ts, duration): encoded_body_size,decoded_body_size,success, coalesce(status,if(success, 200, status)) AS status FROM resources - WHERE session_id = toUInt64(%(session_id)s) - AND project_id = toUInt64(%(project_id)s) + WHERE session_id = toUInt16(%(session_id)s) + AND project_id = toUInt16(%(project_id)s) AND datetime >= toDateTime(%(res_start_ts)s / 1000) AND datetime <= toDateTime(%(res_end_ts)s / 1000);""" params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, From df60d36ad50495db805bad9fb3ea9aa7781253a4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 7 Oct 2022 19:24:31 +0200 Subject: [PATCH 35/68] feat(chalice): experimental error details --- ee/api/chalicelib/core/errors_exp.py | 117 ++++++++++++++++----------- 1 file changed, 71 insertions(+), 46 deletions(-) diff --git a/ee/api/chalicelib/core/errors_exp.py b/ee/api/chalicelib/core/errors_exp.py index 7014a16e0..1f0566b37 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/ee/api/chalicelib/core/errors_exp.py @@ -2,10 +2,12 @@ import json import schemas from chalicelib.core import metrics, metadata +from chalicelib.core import errors_legacy from chalicelib.core import sourcemaps, sessions from chalicelib.utils import ch_client, metrics_helper, exp_ch_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC +from decouple import config def _multiple_values(values, value_key="value"): @@ -147,20 +149,42 @@ def __process_tags(row): {"name": "country", "partitions": __flatten_sort_key_count(data=row.pop("country_partition"))} ] - +# TODO: solve memory issue def get_details(project_id, error_id, user_id, **data): - # now=TimeUTC.now() + if not config("EXP_ERRORS_GET", cast=bool, default=False): + return errors_legacy.get_details(project_id, error_id, user_id, **data) + + MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0) + MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) + MAIN_EVENTS_TABLE_24 = exp_ch_helper.get_main_events_table(TimeUTC.now()) + ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24") ch_sub_query24.append("error_id = %(error_id)s") - ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30") + pg_sub_query30_err = __get_basic_constraints(time_constraint=True, startTime_arg_name="startDate30", + endTime_arg_name="endDate30", project_key="errors.project_id", + table_name="errors") + pg_sub_query30_err.append("sessions.project_id = toUInt16(%(project_id)s)") + pg_sub_query30_err.append("sessions.datetime >= toDateTime(%(startDate30)s/1000)") + pg_sub_query30_err.append("sessions.datetime <= toDateTime(%(endDate30)s/1000)") + pg_sub_query30_err.append("error_id = %(error_id)s") + pg_sub_query30_err.append("source ='js_exception'") + ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30", + project_key="errors.project_id") ch_sub_query30.append("error_id = %(error_id)s") ch_basic_query = __get_basic_constraints(time_constraint=False) ch_basic_query.append("error_id = %(error_id)s") + ch_basic_query_session = ch_basic_query[:] + ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)") with ch_client.ClickHouseClient() as ch: data["startDate24"] = TimeUTC.now(-1) data["endDate24"] = TimeUTC.now() data["startDate30"] = TimeUTC.now(-30) data["endDate30"] = TimeUTC.now() + # # TODO: remove time limits + # data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 + # data["endDate24"] = 1650470729000 + # data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 + # data["endDate30"] = 1650470729000 density24 = int(data.get("density24", 24)) step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) density30 = int(data.get("density30", 30)) @@ -196,28 +220,27 @@ def get_details(project_id, error_id, user_id, **data): message, COUNT(DISTINCT user_uuid) AS users, COUNT(DISTINCT session_id) AS sessions - FROM errors - WHERE error_id = %(error_id)s + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY error_id, name, message) AS details INNER JOIN (SELECT error_id, - toUnixTimestamp(MAX(datetime)) * 1000 AS last_occurrence, - toUnixTimestamp(MIN(datetime)) * 1000 AS first_occurrence - FROM errors - WHERE error_id = %(error_id)s + toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence, + toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence + FROM {MAIN_EVENTS_TABLE} AS errors + WHERE {" AND ".join(ch_basic_query)} GROUP BY error_id) AS time_details ON details.error_id = time_details.error_id INNER JOIN (SELECT error_id, session_id AS last_session_id, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_uuid - FROM errors - WHERE error_id = %(error_id)s - ORDER BY datetime DESC + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(ch_basic_query_session)} + ORDER BY errors.datetime DESC LIMIT 1) AS last_session_details ON last_session_details.error_id = details.error_id - INNER JOIN (SELECT %(error_id)s AS error_id, - groupArray( - [[[user_browser]], [[toString(count_per_browser)]],versions_partition]) AS browsers_partition + INNER JOIN (SELECT %(error_id)s AS error_id, + groupArray([[[user_browser]], [[toString(count_per_browser)]],versions_partition]) AS browsers_partition FROM (SELECT user_browser, COUNT(session_id) AS count_per_browser - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_browser ORDER BY count_per_browser DESC) AS count_per_browser_query INNER JOIN (SELECT user_browser, @@ -225,36 +248,35 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_browser, user_browser_version, COUNT(session_id) AS count_per_version - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_browser, user_browser_version ORDER BY count_per_version DESC) AS version_details GROUP BY user_browser ) AS browser_version_details USING (user_browser)) AS browser_details ON browser_details.error_id = details.error_id - INNER JOIN (SELECT %(error_id)s AS error_id, + INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([[[user_os]], [[toString(count_per_os)]],versions_partition]) AS os_partition FROM (SELECT user_os, COUNT(session_id) AS count_per_os - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_os ORDER BY count_per_os DESC) AS count_per_os_details INNER JOIN (SELECT user_os, groupArray([user_os_version, toString(count_per_version)]) AS versions_partition FROM (SELECT user_os, user_os_version, COUNT(session_id) AS count_per_version - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_os, user_os_version ORDER BY count_per_version DESC) AS count_per_version_details GROUP BY user_os ) AS os_version_details USING (user_os)) AS os_details ON os_details.error_id = details.error_id - INNER JOIN (SELECT %(error_id)s AS error_id, - groupArray( - [[[toString(user_device_type)]], [[toString(count_per_device)]],versions_partition]) AS device_partition + INNER JOIN (SELECT %(error_id)s AS error_id, + groupArray([[[toString(user_device_type)]], [[toString(count_per_device)]],versions_partition]) AS device_partition FROM (SELECT user_device_type, COUNT(session_id) AS count_per_device - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_device_type ORDER BY count_per_device DESC) AS count_per_device_details INNER JOIN (SELECT user_device_type, @@ -262,26 +284,25 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT user_device_type, COALESCE(user_device,'unknown') AS user_device, COUNT(session_id) AS count_per_device - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_device_type, user_device ORDER BY count_per_device DESC) AS count_per_device_details GROUP BY user_device_type ) AS device_version_details USING (user_device_type)) AS device_details ON device_details.error_id = details.error_id - INNER JOIN (SELECT %(error_id)s AS error_id, - groupArray( - [[[toString(user_country)]], [[toString(count_per_country)]]]) AS country_partition + INNER JOIN (SELECT %(error_id)s AS error_id, + groupArray([[[toString(user_country)]], [[toString(count_per_country)]]]) AS country_partition FROM (SELECT user_country, COUNT(session_id) AS count_per_country - FROM errors - WHERE {" AND ".join(ch_basic_query)} + FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY user_country ORDER BY count_per_country DESC) AS count_per_country_details) AS country_details ON country_details.error_id = details.error_id INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([timestamp, count]) AS chart24 FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size24)s second)) * 1000 AS timestamp, COUNT(DISTINCT session_id) AS count - FROM errors + FROM {MAIN_EVENTS_TABLE_24} AS errors WHERE {" AND ".join(ch_sub_query24)} GROUP BY timestamp ORDER BY timestamp) AS chart_details) AS chart_details24 @@ -289,15 +310,15 @@ def get_details(project_id, error_id, user_id, **data): INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([timestamp, count]) AS chart30 FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size30)s second)) * 1000 AS timestamp, COUNT(DISTINCT session_id) AS count - FROM errors + FROM {MAIN_EVENTS_TABLE} AS errors WHERE {" AND ".join(ch_sub_query30)} GROUP BY timestamp ORDER BY timestamp) AS chart_details) AS chart_details30 ON details.error_id = chart_details30.error_id;""" - # print("--------------------") - # print(main_ch_query % params) - # print("--------------------") + print("--------------------") + print(ch.format(main_ch_query, params)) + print("--------------------") row = ch.execute(query=main_ch_query, params=params) if len(row) == 0: return {"errors": ["error not found"]} @@ -463,13 +484,17 @@ def get_details_chart(project_id, error_id, user_id, **data): def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", - endTime_arg_name="endDate", type_condition=True): - ch_sub_query = ["project_id =toUInt32(%(project_id)s)"] + endTime_arg_name="endDate", type_condition=True, project_key="project_id", table_name=None): + ch_sub_query = [f"{project_key} =toUInt16(%(project_id)s)"] + if table_name is not None: + table_name = table_name + "." + else: + table_name = "" if type_condition: - ch_sub_query.append("event_type='ERROR'") + ch_sub_query.append(f"{table_name}event_type='ERROR'") if time_constraint: - ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)", - f"datetime < toDateTime(%({endTime_arg_name})s/1000)"] + ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)", + f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"] if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") elif platform == schemas.PlatformType.desktop: From f789c91e17bf7f89652795acd31cde4c1aacdd6b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 10 Oct 2022 09:49:33 +0200 Subject: [PATCH 36/68] feat(chalice): refactored permissions --- ee/api/chalicelib/core/signup.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 72317859f..86299c838 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -1,6 +1,7 @@ import json import schemas +import schemas_ee from chalicelib.core import users, telemetry, tenants from chalicelib.utils import captcha from chalicelib.utils import helper @@ -61,7 +62,8 @@ def create_step1(data: schemas.UserSignupSchema): params = {"email": email, "password": password, "fullname": fullname, "companyName": company_name, "projectName": project_name, - "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})} + "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), + "permissions": [p.value for p in schemas_ee.Permissions]} query = """\ WITH t AS ( INSERT INTO public.tenants (name, version_number) @@ -70,8 +72,8 @@ def create_step1(data: schemas.UserSignupSchema): ), r AS ( INSERT INTO public.roles(tenant_id, name, description, permissions, protected) - VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', '{"SESSION_REPLAY", "DEV_TOOLS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE), - ((SELECT tenant_id FROM t), 'Member', 'Member', '{"SESSION_REPLAY", "DEV_TOOLS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE) + VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', %(permissions)s::text[], TRUE), + ((SELECT tenant_id FROM t), 'Member', 'Member', %(permissions)s::text[], FALSE) RETURNING * ), u AS ( From 4b3e4140248c38fc0fa1d5e535cf7eadaaad92d8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 10 Oct 2022 12:22:00 +0200 Subject: [PATCH 37/68] feat(chalice): refactored signup --- api/chalicelib/core/signup.py | 40 ++++++----------- api/chalicelib/core/tenants.py | 2 +- api/chalicelib/utils/helper.py | 13 ++---- api/schemas.py | 1 - ee/api/chalicelib/core/signup.py | 43 +++++++------------ ee/api/chalicelib/core/tenants.py | 4 +- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 3 +- .../db/init_dbs/postgresql/init_schema.sql | 1 - .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 3 +- .../db/init_dbs/postgresql/init_schema.sql | 1 - 10 files changed, 37 insertions(+), 74 deletions(-) diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 9106084ad..c1092c47c 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -1,7 +1,5 @@ import json -from decouple import config - import schemas from chalicelib.core import users, telemetry, tenants from chalicelib.utils import captcha @@ -20,55 +18,41 @@ def create_step1(data: schemas.UserSignupSchema): print(f"=====================> {email}") password = data.password - print("Verifying email validity") if email is None or len(email) < 5: errors.append("Invalid email address.") else: - print("Verifying email existance") if users.email_exists(email): errors.append("Email address already in use.") if users.get_deleted_user_by_email(email) is not None: errors.append("Email address previously deleted.") - print("Verifying captcha") if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): errors.append("Invalid captcha.") - print("Verifying password validity") if len(password) < 6: errors.append("Password is too short, it must be at least 6 characters long.") - print("Verifying fullname validity") fullname = data.fullname if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname): errors.append("Invalid full name.") - print("Verifying company's name validity") - company_name = data.organizationName - if company_name is None or len(company_name) < 1: - errors.append("invalid organization's name") - - print("Verifying project's name validity") - project_name = data.projectName - if project_name is None or len(project_name) < 1: - project_name = "my first project" + organization_name = data.organizationName + if organization_name is None or len(organization_name) < 1: + errors.append("Invalid organization name.") if len(errors) > 0: - print("==> error") + print(f"==> error for email:{data.email}, fullname:{data.fullname}, organizationName:{data.organizationName}") print(errors) return {"errors": errors} - print("No errors detected") + + project_name = "my first project" params = { - "email": email, "password": password, - "fullname": fullname, - "projectName": project_name, - "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), - "organizationName": company_name + "email": email, "password": password, "fullname": fullname, "projectName": project_name, + "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name } - query = f"""\ - WITH t AS ( - INSERT INTO public.tenants (name, version_number) - VALUES (%(organizationName)s, (SELECT openreplay_version())) + query = f"""WITH t AS ( + INSERT INTO public.tenants (name) + VALUES (%(organizationName)s) RETURNING api_key ), u AS ( @@ -106,7 +90,7 @@ def create_step1(data: schemas.UserSignupSchema): } c = { "tenantId": 1, - "name": company_name, + "name": organization_name, "apiKey": api_key, "remainingTrial": 14, "trialEnded": False, diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index e5b8cc63c..f61456de5 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -14,7 +14,7 @@ def get_by_tenant_id(tenant_id): api_key, created_at, '{license.EDITION}' AS edition, - version_number, + openreplay_version() AS version_number, opt_out FROM public.tenants LIMIT 1;""", diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 919ac9d9f..7639f1950 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -4,25 +4,18 @@ import re import string from typing import Union -import requests +from decouple import config import schemas from chalicelib.utils.TimeUTC import TimeUTC -local_prefix = 'local-' -from decouple import config - - -def get_version_number(): - return config("version") - def get_stage_name(): return "OpenReplay" -def generate_salt(): - return "".join(random.choices(string.hexdigits, k=36)) +def random_string(length=36): + return "".join(random.choices(string.hexdigits, k=length)) def list_to_camel_case(items, flatten=False): diff --git a/api/schemas.py b/api/schemas.py index ce6b00439..f1f3d9cb7 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -28,7 +28,6 @@ class UserLoginSchema(_Grecaptcha): class UserSignupSchema(UserLoginSchema): fullname: str = Field(...) organizationName: str = Field(...) - projectName: str = Field(default="my first project") class Config: alias_generator = attribute_to_camel_case diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 86299c838..402477245 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -19,55 +19,42 @@ def create_step1(data: schemas.UserSignupSchema): print(f"=====================> {email}") password = data.password - print("Verifying email validity") - if email is None or len(email) < 5 or not helper.is_valid_email(email): + if email is None or len(email) < 5: errors.append("Invalid email address.") else: - print("Verifying email existance") if users.email_exists(email): errors.append("Email address already in use.") if users.get_deleted_user_by_email(email) is not None: errors.append("Email address previously deleted.") - print("Verifying captcha") if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): errors.append("Invalid captcha.") - print("Verifying password validity") if len(password) < 6: errors.append("Password is too short, it must be at least 6 characters long.") - print("Verifying fullname validity") fullname = data.fullname if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname): errors.append("Invalid full name.") - print("Verifying company's name validity") - company_name = data.organizationName - if company_name is None or len(company_name) < 1: - errors.append("invalid organization's name") - - print("Verifying project's name validity") - project_name = data.projectName - if project_name is None or len(project_name) < 1: - project_name = "my first project" + organization_name = data.organizationName + if organization_name is None or len(organization_name) < 1: + errors.append("Invalid organization name.") if len(errors) > 0: - print("==> error") + print(f"==> error for email:{data.email}, fullname:{data.fullname}, organizationName:{data.organizationName}") print(errors) return {"errors": errors} - print("No errors detected") - print("Decomposed infos") - params = {"email": email, "password": password, - "fullname": fullname, "companyName": company_name, - "projectName": project_name, - "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), - "permissions": [p.value for p in schemas_ee.Permissions]} - query = """\ - WITH t AS ( - INSERT INTO public.tenants (name, version_number) - VALUES (%(companyName)s, (SELECT openreplay_version())) + project_name = "my first project" + params = { + "email": email, "password": password, "fullname": fullname, "projectName": project_name, + "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name, + "permissions": [p.value for p in schemas_ee.Permissions] + } + query = """WITH t AS ( + INSERT INTO public.tenants (name) + VALUES (%(organizationName)s) RETURNING tenant_id, api_key ), r AS ( @@ -111,7 +98,7 @@ def create_step1(data: schemas.UserSignupSchema): } c = { "tenantId": 1, - "name": company_name, + "name": organization_name, "apiKey": api_key, "remainingTrial": 14, "trialEnded": False, diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 71119fd13..c50deaea7 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -13,7 +13,7 @@ def get_by_tenant_key(tenant_key): t.api_key, t.created_at, '{license.EDITION}' AS edition, - t.version_number, + openreplay_version() AS version_number, t.opt_out FROM public.tenants AS t WHERE t.tenant_key = %(tenant_key)s AND t.deleted_at ISNULL @@ -33,7 +33,7 @@ def get_by_tenant_id(tenant_id): t.api_key, t.created_at, '{license.EDITION}' AS edition, - t.version_number, + openreplay_version() AS version_number, t.opt_out, t.tenant_key FROM public.tenants AS t diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 1fd27342f..5e0856f7e 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -6,7 +6,8 @@ SELECT 'v1.8.2-ee' $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants - ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT), + DROP COLUMN IF EXISTS version_number; CREATE TABLE IF NOT EXISTS sessions_notes ( diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 13f2db5cf..1a11bb8a9 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -148,7 +148,6 @@ $$ api_key text UNIQUE default generate_api_key(20) not null, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, t_projects integer NOT NULL DEFAULT 1, diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 5abda1259..b89eb2343 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -6,7 +6,8 @@ SELECT 'v1.8.2' $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants - ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT), + DROP COLUMN IF EXISTS version_number; CREATE TABLE IF NOT EXISTS sessions_notes ( diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 8c3c07d24..fd534900d 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -121,7 +121,6 @@ $$ name text NOT NULL, api_key text NOT NULL DEFAULT generate_api_key(20), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - version_number text NOT NULL, license text NULL, opt_out bool NOT NULL DEFAULT FALSE, t_projects integer NOT NULL DEFAULT 1, From b68c954aff2c08fd90f89ed6c23fd2ecfb35f825 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 12 Oct 2022 12:05:09 +0200 Subject: [PATCH 38/68] feat(chalice): fixed get favorite session --- api/chalicelib/core/sessions_favorite.py | 2 +- ee/api/chalicelib/core/sessions_favorite.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 41d241b4d..11992cf5b 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -62,4 +62,4 @@ def get_start_end_timestamp(project_id, user_id): {"userId": user_id, "project_id": project_id}) ) r = cur.fetchone() - return (0, 0) if r is None else (r["max_start_ts"], r["min_start_ts"]) + return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"]) diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index c1616e0f9..ec6d51555 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -90,4 +90,4 @@ def get_start_end_timestamp(project_id, user_id): {"userId": user_id, "project_id": project_id}) ) r = cur.fetchone() - return (0, 0) if r is None else (r["max_start_ts"], r["min_start_ts"]) + return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"]) From 3543c970543bded58c0ad3b5d0ebbe8b777be1c0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 12 Oct 2022 17:15:11 +0200 Subject: [PATCH 39/68] feat(chalice): changed authorizer --- ee/api/chalicelib/core/authorizers.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index b080d94ef..3f6767d40 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -16,7 +16,7 @@ def jwt_authorizer(token): token[1], config("jwt_secret"), algorithms=config("jwt_algorithm"), - audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"] + audience=[f"front:{helper.get_stage_name()}"] ) except jwt.ExpiredSignatureError: print("! JWT Expired signature") @@ -38,13 +38,12 @@ def jwt_context(context): } -def generate_jwt(id, tenant_id, iat, aud, exp=None): +def generate_jwt(id, tenant_id, iat, aud): token = jwt.encode( payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + int(config("JWT_EXP_DELTA_SECONDS")) + TimeUTC.get_utc_offset() // 1000 \ - if exp is None else exp+ TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("JWT_ISSUER"), "iat": iat // 1000, "aud": aud From 378f2196dfe4f2d21d0497f9b10a89ca2521bbf3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 12 Oct 2022 17:28:26 +0200 Subject: [PATCH 40/68] feat(chalice): file_key for session replay --- ee/api/chalicelib/core/sessions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 18da3e200..4a94d6e21 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -59,7 +59,8 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu SELECT s.*, s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, + encode(file_key,'hex') AS file_key {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''} FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} From 419f834b9ae783d825b300a2eb2450bc9dac12ed Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 13 Oct 2022 18:13:07 +0200 Subject: [PATCH 41/68] feat(chalice): changed pg_helper default timeout for no-pool connexion --- api/chalicelib/utils/pg_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index fe71f997b..c77a33fcf 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -109,7 +109,7 @@ class PostgresClient: elif not config('PG_POOL', cast=bool, default=True): single_config = dict(_PG_CONFIG) single_config["application_name"] += "-NOPOOL" - single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=3 * 60) * 1000}" + single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}" self.connection = psycopg2.connect(**single_config) else: self.connection = postgreSQL_pool.getconn() From 742c1ad7fe27e3ce848268f740edbaa643eb8f5d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 14 Oct 2022 19:42:49 +0200 Subject: [PATCH 42/68] feat(chalice): get error details experimental --- api/chalicelib/core/errors.py | 2 +- ee/api/chalicelib/core/errors.py | 2 +- ee/api/chalicelib/core/errors_exp.py | 294 ++++++++++++++++-- ee/api/chalicelib/utils/exp_ch_helper.py | 6 + .../db/init_dbs/clickhouse/1.8.2/1.8.2.sql | 33 +- .../clickhouse/create/init_schema.sql | 36 ++- 6 files changed, 345 insertions(+), 28 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 3db72e4b1..ba7ce4085 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -139,7 +139,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT error_id, name, message, - COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT user_id) AS users, COUNT(DISTINCT session_id) AS sessions FROM public.errors INNER JOIN events.errors AS s_errors USING (error_id) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 14a20e61f..c037c2364 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -145,7 +145,7 @@ def get_details(project_id, error_id, user_id, **data): FROM (SELECT error_id, name, message, - COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT user_id) AS users, COUNT(DISTINCT session_id) AS sessions FROM public.errors INNER JOIN events.errors AS s_errors USING (error_id) diff --git a/ee/api/chalicelib/core/errors_exp.py b/ee/api/chalicelib/core/errors_exp.py index 1f0566b37..721108b71 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/ee/api/chalicelib/core/errors_exp.py @@ -115,6 +115,18 @@ def __flatten_sort_key_count_version(data, merge_nested=False): ] +def __transform_map_to_tag(data, key1, key2, requested_key): + result = [] + for i in data: + if requested_key == 0 and i.get(key1) is None and i.get(key2) is None: + result.append({"name": "all", "count": int(i.get("count"))}) + elif requested_key == 1 and i.get(key1) is not None and i.get(key2) is None: + result.append({"name": i.get(key1), "count": int(i.get("count"))}) + elif requested_key == 2 and i.get(key1) is not None and i.get(key2) is not None: + result.append({"name": i.get(key2), "count": int(i.get("count"))}) + return result + + def __flatten_sort_key_count(data): if data is None: return [] @@ -149,8 +161,52 @@ def __process_tags(row): {"name": "country", "partitions": __flatten_sort_key_count(data=row.pop("country_partition"))} ] -# TODO: solve memory issue -def get_details(project_id, error_id, user_id, **data): + +def __process_tags_map(row): + browsers_partition = row.pop("browsers_partition") + os_partition = row.pop("os_partition") + device_partition = row.pop("device_partition") + country_partition = row.pop("country_partition") + return [ + {"name": "browser", + "partitions": __transform_map_to_tag(data=browsers_partition, + key1="browser", + key2="browser_version", + requested_key=1)}, + {"name": "browser.ver", + "partitions": __transform_map_to_tag(data=browsers_partition, + key1="browser", + key2="browser_version", + requested_key=2)}, + {"name": "OS", + "partitions": __transform_map_to_tag(data=os_partition, + key1="os", + key2="os_version", + requested_key=1) + }, + {"name": "OS.ver", + "partitions": __transform_map_to_tag(data=os_partition, + key1="os", + key2="os_version", + requested_key=2)}, + {"name": "device.family", + "partitions": __transform_map_to_tag(data=device_partition, + key1="device_type", + key2="device", + requested_key=1)}, + {"name": "device", + "partitions": __transform_map_to_tag(data=device_partition, + key1="device_type", + key2="device", + requested_key=2)}, + {"name": "country", "partitions": __transform_map_to_tag(data=country_partition, + key1="country", + key2="", + requested_key=1)} + ] + + +def get_details_deprecated(project_id, error_id, user_id, **data): if not config("EXP_ERRORS_GET", cast=bool, default=False): return errors_legacy.get_details(project_id, error_id, user_id, **data) @@ -176,15 +232,15 @@ def get_details(project_id, error_id, user_id, **data): ch_basic_query_session = ch_basic_query[:] ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)") with ch_client.ClickHouseClient() as ch: - data["startDate24"] = TimeUTC.now(-1) - data["endDate24"] = TimeUTC.now() - data["startDate30"] = TimeUTC.now(-30) - data["endDate30"] = TimeUTC.now() - # # TODO: remove time limits - # data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 - # data["endDate24"] = 1650470729000 - # data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 - # data["endDate30"] = 1650470729000 + # data["startDate24"] = TimeUTC.now(-1) + # data["endDate24"] = TimeUTC.now() + # data["startDate30"] = TimeUTC.now(-30) + # data["endDate30"] = TimeUTC.now() + # TODO: remove time limits + data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 + data["endDate24"] = 1650470729000 + data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 + data["endDate30"] = 1650470729000 density24 = int(data.get("density24", 24)) step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) density30 = int(data.get("density30", 30)) @@ -282,7 +338,7 @@ def get_details(project_id, error_id, user_id, **data): INNER JOIN (SELECT user_device_type, groupArray([user_device, toString(count_per_device)]) AS versions_partition FROM (SELECT user_device_type, - COALESCE(user_device,'unknown') AS user_device, + coalesce(user_device,'unknown') AS user_device, COUNT(session_id) AS count_per_device FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id) WHERE {" AND ".join(pg_sub_query30_err)} @@ -316,9 +372,9 @@ def get_details(project_id, error_id, user_id, **data): ORDER BY timestamp) AS chart_details) AS chart_details30 ON details.error_id = chart_details30.error_id;""" - print("--------------------") - print(ch.format(main_ch_query, params)) - print("--------------------") + # print("--------------------") + # print(ch.format(main_ch_query, params)) + # print("--------------------") row = ch.execute(query=main_ch_query, params=params) if len(row) == 0: return {"errors": ["error not found"]} @@ -327,7 +383,7 @@ def get_details(project_id, error_id, user_id, **data): with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""SELECT error_id, status, session_id, start_ts, - parent_error_id,session_id, user_anonymous_id, + parent_error_id, user_anonymous_id, user_id, user_uuid, user_browser, user_browser_version, user_os, user_os_version, user_device, payload, FALSE AS favorite, @@ -359,11 +415,203 @@ def get_details(project_id, error_id, user_id, **data): row["favorite"] = False row["viewed"] = False row["chart24"] = __rearrange_chart_details(start_at=data["startDate24"], end_at=data["endDate24"], - density=density24, - chart=row["chart24"]) + density=density24, chart=row["chart24"]) row["chart30"] = __rearrange_chart_details(start_at=data["startDate30"], end_at=data["endDate30"], - density=density30, - chart=row["chart30"]) + density=density30, chart=row["chart30"]) + return {"data": helper.dict_to_camel_case(row)} + + +def get_details(project_id, error_id, user_id, **data): + if not config("EXP_ERRORS_GET", cast=bool, default=False): + return errors_legacy.get_details(project_id, error_id, user_id, **data) + + MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0) + MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0) + MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) + MAIN_EVENTS_TABLE_24 = exp_ch_helper.get_main_events_table(TimeUTC.now()) + + ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24") + ch_sub_query24.append("error_id = %(error_id)s") + # pg_sub_query30_err = __get_basic_constraints(time_constraint=True, startTime_arg_name="startDate30", + # endTime_arg_name="endDate30", project_key="errors.project_id", + # table_name="errors") + # pg_sub_query30_err.append("sessions.project_id = toUInt16(%(project_id)s)") + # pg_sub_query30_err.append("sessions.datetime >= toDateTime(%(startDate30)s/1000)") + # pg_sub_query30_err.append("sessions.datetime <= toDateTime(%(endDate30)s/1000)") + # pg_sub_query30_err.append("error_id = %(error_id)s") + # pg_sub_query30_err.append("source ='js_exception'") + ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30", + project_key="errors.project_id") + ch_sub_query30.append("error_id = %(error_id)s") + ch_basic_query = __get_basic_constraints(time_constraint=False) + ch_basic_query.append("error_id = %(error_id)s") + # ch_basic_query_session = ch_basic_query[:] + # ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)") + with ch_client.ClickHouseClient() as ch: + # data["startDate24"] = TimeUTC.now(-1) + # data["endDate24"] = TimeUTC.now() + # data["startDate30"] = TimeUTC.now(-30) + # data["endDate30"] = TimeUTC.now() + # TODO: remove time limits + data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 + data["endDate24"] = 1650470729000 + data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 + data["endDate30"] = 1650470729000 + density24 = int(data.get("density24", 24)) + step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) + density30 = int(data.get("density30", 30)) + step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30) + params = { + "startDate24": data['startDate24'], + "endDate24": data['endDate24'], + "startDate30": data['startDate30'], + "endDate30": data['endDate30'], + "project_id": project_id, + "userId": user_id, + "step_size24": step_size24, + "step_size30": step_size30, + "error_id": error_id} + + main_ch_query = f"""\ + WITH pre_processed AS (SELECT error_id, + name, + message, + session_id, + datetime, + user_id, + user_browser, + user_browser_version, + user_os, + user_os_version, + user_device_type, + user_device, + user_country + FROM {MAIN_ERR_SESS_TABLE} AS errors + WHERE {" AND ".join(ch_basic_query)} + ) + SELECT %(error_id)s AS error_id, name, message,users, + first_occurrence,last_occurrence,last_session_id, + sessions,browsers_partition,os_partition,device_partition, + country_partition,chart24,chart30 + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_id) AS users, + COUNT(DISTINCT session_id) AS sessions + FROM pre_processed + WHERE datetime >= toDateTime(%(startDate30)s / 1000) + AND datetime <= toDateTime(%(endDate30)s / 1000) + GROUP BY error_id, name, message) AS details + INNER JOIN (SELECT toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence, + toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence + FROM pre_processed) AS time_details ON TRUE + INNER JOIN (SELECT session_id AS last_session_id + FROM pre_processed + ORDER BY datetime DESC + LIMIT 1) AS last_session_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS browsers_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_browser,''),toNullable('unknown')) AS browser, + coalesce(nullIf(user_browser_version,''),toNullable('unknown')) AS browser_version, + map('browser', browser, + 'browser_version', browser_version, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(browser, browser_version) + ORDER BY browser nulls first, browser_version nulls first, count DESC) AS mapped_browser_details + ) AS browser_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS os_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_os,''),toNullable('unknown')) AS os, + coalesce(nullIf(user_os_version,''),toNullable('unknown')) AS os_version, + map('os', os, + 'os_version', os_version, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(os, os_version) + ORDER BY os nulls first, os_version nulls first, count DESC) AS mapped_os_details + ) AS os_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS device_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_device,''),toNullable('unknown')) AS user_device, + map('device_type', toString(user_device_type), + 'device', user_device, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(user_device_type, user_device) + ORDER BY user_device_type nulls first, user_device nulls first, count DESC + ) AS count_per_device_details + ) AS mapped_device_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS country_partition + FROM (SELECT COUNT(1) AS count, + map('country', toString(user_country), + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY user_country + ORDER BY count DESC) AS count_per_country_details + ) AS mapped_country_details ON TRUE + INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart24 + FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3756 second)) * + 1000 AS timestamp, + COUNT(DISTINCT session_id) AS count + FROM {MAIN_EVENTS_TABLE} AS errors + WHERE {" AND ".join(ch_sub_query24)} + GROUP BY timestamp + ORDER BY timestamp) AS chart_details + ) AS chart_details24 ON TRUE + INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart30 + FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3724 second)) * + 1000 AS timestamp, + COUNT(DISTINCT session_id) AS count + FROM {MAIN_EVENTS_TABLE} AS errors + WHERE {" AND ".join(ch_sub_query30)} + GROUP BY timestamp + ORDER BY timestamp) AS chart_details + ) AS chart_details30 ON TRUE;""" + + # print("--------------------") + # print(ch.format(main_ch_query, params)) + # print("--------------------") + row = ch.execute(query=main_ch_query, params=params) + if len(row) == 0: + return {"errors": ["error not found"]} + row = row[0] + + row["tags"] = __process_tags_map(row) + + query = f"""SELECT session_id, toUnixTimestamp(datetime) * 1000 AS start_ts, + user_anonymous_id,user_id, user_uuid, user_browser, user_browser_version, + user_os, user_os_version, user_device, FALSE AS favorite, True AS viewed + FROM {MAIN_SESSIONS_TABLE} AS sessions + WHERE project_id = toUInt16(%(project_id)s) + AND session_id = %(session_id)s + ORDER BY datetime DESC + LIMIT 1;""" + params = {"project_id": project_id, "session_id": row["last_session_id"], "userId": user_id} + # print("--------------------") + # print(ch.format(query, params)) + # print("--------------------") + status = ch.execute(query=query, params=params) + + if status is not None: + status = status[0] + # row["stack"] = format_first_stack_frame(status).pop("stack") + # row["status"] = status.pop("status") + # row["parent_error_id"] = status.pop("parent_error_id") + row["favorite"] = status.pop("favorite") + row["viewed"] = status.pop("viewed") + row["last_hydrated_session"] = status + else: + # row["stack"] = [] + row["last_hydrated_session"] = None + # row["status"] = "untracked" + # row["parent_error_id"] = None + row["favorite"] = False + row["viewed"] = False + row["chart24"] = metrics.__complete_missing_steps(start_time=data["startDate24"], end_time=data["endDate24"], + density=density24, rows=row["chart24"], neutral={"count": 0}) + row["chart30"] = metrics.__complete_missing_steps(start_time=data["startDate30"], end_time=data["endDate30"], + density=density30, rows=row["chart30"], neutral={"count": 0}) return {"data": helper.dict_to_camel_case(row)} @@ -444,7 +692,7 @@ def get_details_chart(project_id, error_id, user_id, **data): INNER JOIN (SELECT user_device_type, groupArray([user_device, toString(count_per_device)]) AS versions_partition FROM (SELECT user_device_type, - COALESCE(user_device,'unknown') AS user_device, + coalesce(user_device,'unknown') AS user_device, COUNT(session_id) AS count_per_device FROM errors WHERE {" AND ".join(ch_sub_query)} @@ -904,11 +1152,11 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id): s.pages_count, s.errors_count, s.issue_types, - COALESCE((SELECT TRUE + coalesce((SELECT TRUE FROM public.user_favorite_sessions AS fs WHERE s.session_id = fs.session_id AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite, - COALESCE((SELECT TRUE + coalesce((SELECT TRUE FROM public.user_viewed_sessions AS fs WHERE s.session_id = fs.session_id AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed diff --git a/ee/api/chalicelib/utils/exp_ch_helper.py b/ee/api/chalicelib/utils/exp_ch_helper.py index 709b5e926..02de9addd 100644 --- a/ee/api/chalicelib/utils/exp_ch_helper.py +++ b/ee/api/chalicelib/utils/exp_ch_helper.py @@ -40,3 +40,9 @@ def get_user_viewed_sessions_table(timestamp=0): def get_user_viewed_errors_table(timestamp=0): return "experimental.user_viewed_errors" + + +def get_main_js_errors_sessions_table(timestamp=0): + return "experimental.js_errors_sessions_mv" # \ + # if config("EXP_7D_MV", cast=bool, default=True) \ + # and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events" diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql index 65acd9fb5..134ec3af6 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql @@ -16,7 +16,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues project_id UInt16, issue_id String, type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), - context_string text NOT NULL, + context_string String, context_keys Array(String), context_values Array(Nullable(String)), _timestamp DateTime DEFAULT now() @@ -25,4 +25,35 @@ CREATE TABLE IF NOT EXISTS experimental.issues ORDER BY (project_id, issue_id, type) TTL _timestamp + INTERVAL 3 MONTH; +CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, error_id, session_id) + TTL _timestamp + INTERVAL 35 DAY + POPULATE +AS +SELECT session_id, + project_id, + events.datetime AS datetime, + event_type, + assumeNotNull(error_id) AS error_id, + source, + name, + message, + error_tags_keys, + error_tags_values, + message_id, + user_browser, + user_browser_version, + user_os, + user_os_version, + user_device_type, + user_device, + user_country, + _timestamp +FROM experimental.events + INNER JOIN experimental.sessions USING (session_id) +WHERE event_type = 'ERROR' + AND source = 'js_exception'; + -- TODO: find a way to update materialized views; or drop and re-create them diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql index 6ebcfc319..b172c0080 100644 --- a/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/clickhouse/create/init_schema.sql @@ -201,7 +201,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues project_id UInt16, issue_id String, type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), - context_string text NOT NULL, + context_string String, context_keys Array(String), context_values Array(Nullable(String)), _timestamp DateTime DEFAULT now() @@ -360,4 +360,36 @@ SELECT session_id, FROM experimental.sessions WHERE datetime >= now() - INTERVAL 7 DAY AND isNotNull(duration) - AND duration > 0; \ No newline at end of file + AND duration > 0; + +CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMM(datetime) + ORDER BY (project_id, datetime, event_type, error_id, session_id) + TTL _timestamp + INTERVAL 35 DAY + POPULATE +AS +SELECT session_id, + project_id, + events.datetime AS datetime, + event_type, + assumeNotNull(error_id) AS error_id, + source, + name, + message, + error_tags_keys, + error_tags_values, + message_id, + user_id, + user_browser, + user_browser_version, + user_os, + user_os_version, + user_device_type, + user_device, + user_country, + _timestamp +FROM experimental.events + INNER JOIN experimental.sessions USING (session_id) +WHERE event_type = 'ERROR' + AND source = 'js_exception'; \ No newline at end of file From b56d39d57fa1ab127ea2e8bec5ac9a0a0cfa96c4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 17 Oct 2022 11:03:49 +0200 Subject: [PATCH 43/68] feat(chalice): get error details experimental changes and fixes --- ee/api/chalicelib/core/errors_exp.py | 59 +++++++++++++++------------- 1 file changed, 32 insertions(+), 27 deletions(-) diff --git a/ee/api/chalicelib/core/errors_exp.py b/ee/api/chalicelib/core/errors_exp.py index 721108b71..1fb201492 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/ee/api/chalicelib/core/errors_exp.py @@ -232,15 +232,15 @@ def get_details_deprecated(project_id, error_id, user_id, **data): ch_basic_query_session = ch_basic_query[:] ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)") with ch_client.ClickHouseClient() as ch: - # data["startDate24"] = TimeUTC.now(-1) - # data["endDate24"] = TimeUTC.now() - # data["startDate30"] = TimeUTC.now(-30) - # data["endDate30"] = TimeUTC.now() - # TODO: remove time limits - data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 - data["endDate24"] = 1650470729000 - data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 - data["endDate30"] = 1650470729000 + data["startDate24"] = TimeUTC.now(-1) + data["endDate24"] = TimeUTC.now() + data["startDate30"] = TimeUTC.now(-30) + data["endDate30"] = TimeUTC.now() + # # TODO: remove time limits + # data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 + # data["endDate24"] = 1650470729000 + # data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 + # data["endDate30"] = 1650470729000 density24 = int(data.get("density24", 24)) step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) density30 = int(data.get("density30", 30)) @@ -448,15 +448,15 @@ def get_details(project_id, error_id, user_id, **data): # ch_basic_query_session = ch_basic_query[:] # ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)") with ch_client.ClickHouseClient() as ch: - # data["startDate24"] = TimeUTC.now(-1) - # data["endDate24"] = TimeUTC.now() - # data["startDate30"] = TimeUTC.now(-30) - # data["endDate30"] = TimeUTC.now() - # TODO: remove time limits - data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 - data["endDate24"] = 1650470729000 - data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 - data["endDate30"] = 1650470729000 + data["startDate24"] = TimeUTC.now(-1) + data["endDate24"] = TimeUTC.now() + data["startDate30"] = TimeUTC.now(-30) + data["endDate30"] = TimeUTC.now() + # # TODO: remove time limits + # data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000 + # data["endDate24"] = 1650470729000 + # data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000 + # data["endDate30"] = 1650470729000 density24 = int(data.get("density24", 24)) step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) density30 = int(data.get("density30", 30)) @@ -485,27 +485,32 @@ def get_details(project_id, error_id, user_id, **data): user_os_version, user_device_type, user_device, - user_country + user_country, + error_tags_keys, + error_tags_values FROM {MAIN_ERR_SESS_TABLE} AS errors WHERE {" AND ".join(ch_basic_query)} ) SELECT %(error_id)s AS error_id, name, message,users, first_occurrence,last_occurrence,last_session_id, sessions,browsers_partition,os_partition,device_partition, - country_partition,chart24,chart30 + country_partition,chart24,chart30,custom_tags FROM (SELECT error_id, name, - message, - COUNT(DISTINCT user_id) AS users, - COUNT(DISTINCT session_id) AS sessions + message FROM pre_processed - WHERE datetime >= toDateTime(%(startDate30)s / 1000) - AND datetime <= toDateTime(%(endDate30)s / 1000) - GROUP BY error_id, name, message) AS details + LIMIT 1) AS details + INNER JOIN (SELECT COUNT(DISTINCT user_id) AS users, + COUNT(DISTINCT session_id) AS sessions + FROM pre_processed + WHERE datetime >= toDateTime(%(startDate30)s / 1000) + AND datetime <= toDateTime(%(endDate30)s / 1000) + ) AS last_month_stats ON TRUE INNER JOIN (SELECT toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence, toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence FROM pre_processed) AS time_details ON TRUE - INNER JOIN (SELECT session_id AS last_session_id + INNER JOIN (SELECT session_id AS last_session_id, + arrayMap((key, value)->(map(key, value)), error_tags_keys, error_tags_values) AS custom_tags FROM pre_processed ORDER BY datetime DESC LIMIT 1) AS last_session_details ON TRUE From 34b229ee15cfd7fbe29ba8de4b01b052a5e2d8e7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 17 Oct 2022 11:14:14 +0200 Subject: [PATCH 44/68] feat(chalice): get error details fixed empty-default-value for error tags --- api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index ba7ce4085..07a7fe200 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -135,7 +135,7 @@ def get_details(project_id, error_id, user_id, **data): country_partition, chart24, chart30, - COALESCE(tags,'{{}}')::jsonb AS custom_tags + COALESCE(tags,'[]')::jsonb AS custom_tags FROM (SELECT error_id, name, message, From 9c3596a2ed12e6ec895b1a946fe897a1d859ac97 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 17 Oct 2022 19:25:01 +0200 Subject: [PATCH 45/68] feat(chalice): event-level-error tags --- api/chalicelib/core/errors.py | 32 +++++++--------- ee/api/chalicelib/core/errors.py | 38 ++++++++----------- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 7 +++- .../db/init_dbs/postgresql/init_schema.sql | 7 +++- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 7 +++- .../db/init_dbs/postgresql/init_schema.sql | 7 +++- 6 files changed, 53 insertions(+), 45 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 07a7fe200..20c53a07a 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -135,7 +135,7 @@ def get_details(project_id, error_id, user_id, **data): country_partition, chart24, chart30, - COALESCE(tags,'[]')::jsonb AS custom_tags + custom_tags FROM (SELECT error_id, name, message, @@ -146,25 +146,22 @@ def get_details(project_id, error_id, user_id, **data): INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY error_id, name, message) AS details - INNER JOIN (SELECT error_id, - MAX(timestamp) AS last_occurrence, + INNER JOIN (SELECT MAX(timestamp) AS last_occurrence, MIN(timestamp) AS first_occurrence FROM events.errors WHERE error_id = %(error_id)s - GROUP BY error_id) AS time_details USING (error_id) - INNER JOIN (SELECT error_id, - session_id AS last_session_id, - user_os, - user_os_version, - user_browser, - user_browser_version, - user_device, - user_device_type, - user_uuid - FROM events.errors INNER JOIN public.sessions USING (session_id) + GROUP BY error_id) AS time_details ON (TRUE) + INNER JOIN (SELECT session_id AS last_session_id, + coalesce(custom_tags, '[]')::jsonb AS custom_tags + FROM events.errors + LEFT JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags + FROM errors_tags + WHERE errors_tags.error_id = %(error_id)s + AND errors_tags.session_id = errors.session_id) AS errors_tags ON (TRUE) WHERE error_id = %(error_id)s ORDER BY errors.timestamp DESC - LIMIT 1) AS last_session_details USING (error_id) + LIMIT 1) AS last_session_details ON (TRUE) INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition FROM (SELECT * FROM (SELECT user_browser AS name, @@ -248,10 +245,7 @@ def get_details(project_id, error_id, user_id, **data): WHERE {" AND ".join(pg_sub_query30)}) AS chart_details ON (TRUE) GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE) - LEFT JOIN (SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) - FROM errors_tags INNER JOIN errors USING(error_id) - WHERE {" AND ".join(pg_basic_query)}) AS raw_tags(tags) ON (TRUE); + ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); """ # print("--------------------") diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index c037c2364..e9ff405f8 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -141,7 +141,7 @@ def get_details(project_id, error_id, user_id, **data): country_partition, chart24, chart30, - COALESCE(tags,'{{}}')::jsonb AS custom_tags + custom_tags FROM (SELECT error_id, name, message, @@ -152,25 +152,22 @@ def get_details(project_id, error_id, user_id, **data): INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query30_err)} GROUP BY error_id, name, message) AS details - INNER JOIN (SELECT error_id, - MAX(timestamp) AS last_occurrence, + INNER JOIN (SELECT MAX(timestamp) AS last_occurrence, MIN(timestamp) AS first_occurrence FROM events.errors WHERE error_id = %(error_id)s - GROUP BY error_id) AS time_details USING (error_id) - INNER JOIN (SELECT error_id, - session_id AS last_session_id, - user_os, - user_os_version, - user_browser, - user_browser_version, - user_device, - user_device_type, - user_uuid - FROM events.errors INNER JOIN public.sessions USING (session_id) + GROUP BY error_id) AS time_details ON (TRUE) + INNER JOIN (SELECT session_id AS last_session_id, + coalesce(custom_tags, '[]')::jsonb AS custom_tags + FROM events.errors + LEFT JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags + FROM errors_tags + WHERE errors_tags.error_id = %(error_id)s + AND errors_tags.session_id = errors.session_id) AS errors_tags ON (TRUE) WHERE error_id = %(error_id)s ORDER BY errors.timestamp DESC - LIMIT 1) AS last_session_details USING (error_id) + LIMIT 1) AS last_session_details ON (TRUE) INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition FROM (SELECT * FROM (SELECT user_browser AS name, @@ -254,15 +251,12 @@ def get_details(project_id, error_id, user_id, **data): WHERE {" AND ".join(pg_sub_query30)}) AS chart_details ON (TRUE) GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE) - LEFT JOIN (SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) - FROM errors_tags INNER JOIN errors USING(error_id) - WHERE {" AND ".join(pg_basic_query)}) AS raw_tags(tags) ON (TRUE); + ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); """ - print("--------------------") - print(cur.mogrify(main_pg_query, params)) - print("--------------------") + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") cur.execute(cur.mogrify(main_pg_query, params)) row = cur.fetchone() if row is None: diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 5e0856f7e..63b3344b6 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -28,9 +28,14 @@ CREATE TABLE IF NOT EXISTS errors_tags key text NOT NULL, value text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); +CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); +CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 1a11bb8a9..cd8f88cb1 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -501,10 +501,15 @@ $$ key text NOT NULL, value text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); + CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); + CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); IF NOT EXISTS(SELECT * FROM pg_type typ diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index b89eb2343..f2ad7e68e 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -28,9 +28,14 @@ CREATE TABLE IF NOT EXISTS errors_tags key text NOT NULL, value text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); +CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); +CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index fd534900d..9fe80c718 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -408,10 +408,15 @@ $$ key text NOT NULL, value text NOT NULL, created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE ); CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id); + CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id); + CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id); -- --- sessions.sql --- CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); From 61d0445317b1f2be7dea9c9080e54e7743aa6004 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 18 Oct 2022 14:09:54 +0200 Subject: [PATCH 46/68] feat(chalice): fixed get last errors-tags --- api/chalicelib/core/errors.py | 3 ++- ee/api/chalicelib/core/errors.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 20c53a07a..9c89844f9 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -158,7 +158,8 @@ def get_details(project_id, error_id, user_id, **data): SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags FROM errors_tags WHERE errors_tags.error_id = %(error_id)s - AND errors_tags.session_id = errors.session_id) AS errors_tags ON (TRUE) + AND errors_tags.session_id = errors.session_id + AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE) WHERE error_id = %(error_id)s ORDER BY errors.timestamp DESC LIMIT 1) AS last_session_details ON (TRUE) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index e9ff405f8..66c3a195d 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -164,7 +164,8 @@ def get_details(project_id, error_id, user_id, **data): SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags FROM errors_tags WHERE errors_tags.error_id = %(error_id)s - AND errors_tags.session_id = errors.session_id) AS errors_tags ON (TRUE) + AND errors_tags.session_id = errors.session_id + AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE) WHERE error_id = %(error_id)s ORDER BY errors.timestamp DESC LIMIT 1) AS last_session_details ON (TRUE) From f1a158e2a3f4a53c685f9902cf50f914e7f9614e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 19 Oct 2022 20:27:50 +0200 Subject: [PATCH 47/68] feat(sourcemaps-reader): read sourcemap from URL feat(chalice): look for sourcemap in URL if not found in bucket --- api/chalicelib/core/sourcemaps.py | 35 +- api/chalicelib/core/sourcemaps_parser.py | 6 +- sourcemap-reader/package-lock.json | 712 ++++++++++++++++++ sourcemap-reader/package.json | 1 + sourcemap-reader/run-dev.sh | 2 +- .../servers/sourcemaps-handler.js | 224 +++--- sourcemap-reader/servers/sourcemaps-server.js | 4 +- 7 files changed, 876 insertions(+), 108 deletions(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 8714b9ee2..4cf4f39c5 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -1,11 +1,11 @@ -from decouple import config -from chalicelib.utils import helper - -from chalicelib.utils import s3 import hashlib from urllib.parse import urlparse +import requests +from decouple import config + from chalicelib.core import sourcemaps_parser +from chalicelib.utils import s3 def __get_key(project_id, url): @@ -73,6 +73,11 @@ def format_payload(p, truncate_to_first=False): return [] +def url_exists(url): + r = requests.head(url, allow_redirects=False) + return r.status_code == 200 # and r.get("Content-Type")=="application/json" + + def get_traces_group(project_id, payload): frames = format_payload(payload) @@ -80,25 +85,35 @@ def get_traces_group(project_id, payload): payloads = {} all_exists = True for i, u in enumerate(frames): - key = __get_key(project_id, u["absPath"]) # use filename instead? + file_url_exists = False + file_url = u["absPath"] + key = __get_key(project_id, file_url) # use filename instead? if key not in payloads: file_exists = s3.exists(config('sourcemaps_bucket'), key) - all_exists = all_exists and file_exists if not file_exists: - print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3") + if not file_url.endswith(".map"): + file_url += '.map' + file_url_exists = url_exists(file_url) + file_exists = file_url_exists + all_exists = all_exists and file_exists + if not file_exists and not file_url_exists: + print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 nor server") payloads[key] = None else: payloads[key] = [] results[i] = dict(u) results[i]["frame"] = dict(u) if payloads[key] is not None: - payloads[key].append({"resultIndex": i, + payloads[key].append({"resultIndex": i, "frame": dict(u), "URL": file_url, "position": {"line": u["lineNo"], "column": u["colNo"]}, - "frame": dict(u)}) + "isURL": not file_exists and file_url_exists}) + for key in payloads.keys(): if payloads[key] is None: continue - key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]]) + key_results = sourcemaps_parser.get_original_trace( + key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key, + positions=[o["position"] for o in payloads[key]]) if key_results is None: all_exists = False continue diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py index c8918cace..df9dcef1d 100644 --- a/api/chalicelib/core/sourcemaps_parser.py +++ b/api/chalicelib/core/sourcemaps_parser.py @@ -11,14 +11,14 @@ if '%s' in SMR_URL: SMR_URL = SMR_URL % "smr" -def get_original_trace(key, positions): +def get_original_trace(key, positions, is_url=False): payload = { "key": key, "positions": positions, "padding": 5, - "bucket": config('sourcemaps_bucket') + "bucket": config('sourcemaps_bucket'), + "isURL": is_url } - try: r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5)) if r.status_code != 200: diff --git a/sourcemap-reader/package-lock.json b/sourcemap-reader/package-lock.json index eb866a247..ef9d38162 100644 --- a/sourcemap-reader/package-lock.json +++ b/sourcemap-reader/package-lock.json @@ -11,6 +11,7 @@ "dependencies": { "aws-sdk": "^2.1172.0", "express": "^4.18.1", + "request": "^2.88.2", "source-map": "^0.7.4" } }, @@ -26,11 +27,47 @@ "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "node_modules/aws-sdk": { "version": "2.1172.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1172.0.tgz", @@ -50,6 +87,19 @@ "node": ">= 10.0.0" } }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -69,6 +119,14 @@ } ] }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, "node_modules/body-parser": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", @@ -122,6 +180,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -154,6 +228,22 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, "node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -162,6 +252,14 @@ "ms": "2.0.0" } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -179,6 +277,15 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -254,6 +361,29 @@ "node": ">= 0.10.0" } }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -271,6 +401,27 @@ "node": ">= 0.8" } }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -305,6 +456,35 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==", + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "deprecated": "this library is no longer supported", + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -342,6 +522,20 @@ "node": ">= 0.8" } }, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -371,11 +565,21 @@ "node": ">= 0.10" } }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + }, "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" + }, "node_modules/jmespath": { "version": "0.16.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", @@ -384,6 +588,40 @@ "node": ">= 0.6.0" } }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + }, + "node_modules/jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -448,6 +686,14 @@ "node": ">= 0.6" } }, + "node_modules/oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "engines": { + "node": "*" + } + }, "node_modules/object-inspect": { "version": "1.12.2", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", @@ -480,6 +726,11 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" + }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -492,6 +743,11 @@ "node": ">= 0.10" } }, + "node_modules/psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==" + }, "node_modules/punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", @@ -542,6 +798,54 @@ "node": ">= 0.8" } }, + "node_modules/request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/request/node_modules/qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/request/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -639,6 +943,30 @@ "node": ">= 8" } }, + "node_modules/sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -655,6 +983,42 @@ "node": ">=0.6" } }, + "node_modules/tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tough-cookie/node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -675,6 +1039,22 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/uri-js/node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, "node_modules/url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", @@ -708,6 +1088,19 @@ "node": ">= 0.8" } }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, "node_modules/xml2js": { "version": "0.4.19", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", @@ -736,11 +1129,40 @@ "negotiator": "0.6.3" } }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, + "asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "aws-sdk": { "version": "2.1172.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1172.0.tgz", @@ -757,11 +1179,29 @@ "xml2js": "0.4.19" } }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" + }, + "aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "requires": { + "tweetnacl": "^0.14.3" + } + }, "body-parser": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", @@ -805,6 +1245,19 @@ "get-intrinsic": "^1.0.2" } }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, "content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -828,6 +1281,19 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "requires": { + "assert-plus": "^1.0.0" + } + }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -836,6 +1302,11 @@ "ms": "2.0.0" } }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, "depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -846,6 +1317,15 @@ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, "ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -909,6 +1389,26 @@ "vary": "~1.1.2" } }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, "finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -923,6 +1423,21 @@ "unpipe": "~1.0.0" } }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, "forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -948,6 +1463,28 @@ "has-symbols": "^1.0.3" } }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "requires": { + "assert-plus": "^1.0.0" + } + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" + }, + "har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "requires": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + } + }, "has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -973,6 +1510,16 @@ "toidentifier": "1.0.1" } }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, "iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -996,16 +1543,57 @@ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" + }, "jmespath": { "version": "0.16.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==" }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" + }, + "json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + }, + "jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -1049,6 +1637,11 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, "object-inspect": { "version": "1.12.2", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", @@ -1072,6 +1665,11 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" + }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -1081,6 +1679,11 @@ "ipaddr.js": "1.9.1" } }, + "psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==" + }, "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", @@ -1115,6 +1718,45 @@ "unpipe": "1.0.0" } }, + "request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "dependencies": { + "qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==" + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -1188,6 +1830,22 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==" }, + "sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, "statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -1198,6 +1856,35 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "dependencies": { + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + } + } + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + }, "type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1212,6 +1899,21 @@ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "requires": { + "punycode": "^2.1.0" + }, + "dependencies": { + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + } + } + }, "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", @@ -1236,6 +1938,16 @@ "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, "xml2js": { "version": "0.4.19", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", diff --git a/sourcemap-reader/package.json b/sourcemap-reader/package.json index ad24a481d..bf0a60d44 100644 --- a/sourcemap-reader/package.json +++ b/sourcemap-reader/package.json @@ -20,6 +20,7 @@ "dependencies": { "aws-sdk": "^2.1172.0", "express": "^4.18.1", + "request": "^2.88.2", "source-map": "^0.7.4" } } diff --git a/sourcemap-reader/run-dev.sh b/sourcemap-reader/run-dev.sh index 3c80807dd..19ba781ce 100755 --- a/sourcemap-reader/run-dev.sh +++ b/sourcemap-reader/run-dev.sh @@ -1,3 +1,3 @@ #!/bin/zsh -MAPPING_WASM=./mappings.wasm npm start \ No newline at end of file +MAPPING_WASM=./mappings.wasm PREFIX=/abc npm start \ No newline at end of file diff --git a/sourcemap-reader/servers/sourcemaps-handler.js b/sourcemap-reader/servers/sourcemaps-handler.js index 96e9efe01..86c93df7b 100644 --- a/sourcemap-reader/servers/sourcemaps-handler.js +++ b/sourcemap-reader/servers/sourcemaps-handler.js @@ -3,6 +3,7 @@ const fs = require('fs'); const sourceMap = require('source-map'); const AWS = require('aws-sdk'); const URL = require('url'); +const request = require('request'); const wasm = fs.readFileSync(process.env.MAPPING_WASM || '/mappings.wasm'); sourceMap.SourceMapConsumer.initialize({ "lib/mappings.wasm": wasm @@ -10,102 +11,141 @@ sourceMap.SourceMapConsumer.initialize({ console.log(`>sourceMap initialised using ${process.env.MAPPING_WASM || '/mappings.wasm'}`); -module.exports.sourcemapReader = async event => { - let s3; - - if (process.env.S3_HOST) { - s3 = new AWS.S3({ - endpoint: process.env.S3_HOST, - accessKeyId: process.env.S3_KEY, - secretAccessKey: process.env.S3_SECRET, - s3ForcePathStyle: true, // needed with minio? - signatureVersion: 'v4' - }); - } else { - s3 = new AWS.S3({ - 'AccessKeyID': process.env.aws_access_key_id, - 'SecretAccessKey': process.env.aws_secret_access_key, - 'Region': process.env.aws_region - }); - } - - var options = { - Bucket: event.bucket, - Key: event.key - }; - return new Promise(function (resolve, reject) { - const getObjectStart = Date.now(); - s3.getObject(options, (err, data) => { - if (err) { - console.error("[SR] Get S3 object failed"); - console.error(err); - return reject(err); - } - const getObjectEnd = Date.now(); - const fileSize = (data.ContentLength / 1024) / 1024; - options.fileSize = `${fileSize} Mb`; - const downloadTime = (getObjectEnd - getObjectStart) / 1000; - options.downloadTime = `${downloadTime} s`; - if (fileSize >= 3) { - console.log("[SR] large file:" + JSON.stringify(options)); - } - let sourcemap = data.Body.toString(); - - return new sourceMap.SourceMapConsumer(sourcemap) - .then(consumer => { - let results = []; - for (let i = 0; i < event.positions.length; i++) { - let original = consumer.originalPositionFor({ +function parseSourcemap(sourcemap, event, options, resolve, reject) { + const getObjectEnd = Date.now(); + try { + return new sourceMap.SourceMapConsumer(sourcemap) + .then(consumer => { + let results = []; + for (let i = 0; i < event.positions.length; i++) { + let original = consumer.originalPositionFor({ + line: event.positions[i].line, + column: event.positions[i].column + }); + let url = URL.parse(""); + let preview = []; + if (original.source) { + preview = consumer.sourceContentFor(original.source, true); + if (preview !== null) { + preview = preview.split("\n") + .map((line, i) => [i + 1, line]); + if (event.padding) { + let start = original.line < event.padding ? 0 : original.line - event.padding; + preview = preview.slice(start, original.line + event.padding); + } + } else { + console.log(`[SR] source not found, null preview for: ${original.source}`); + preview = [] + } + url = URL.parse(original.source); + } else { + console.log("[SR] couldn't find original position of: " + JSON.stringify({ line: event.positions[i].line, column: event.positions[i].column - }); - let url = URL.parse(""); - let preview = []; - if (original.source) { - preview = consumer.sourceContentFor(original.source, true); - if (preview !== null) { - preview = preview.split("\n") - .map((line, i) => [i + 1, line]); - if (event.padding) { - let start = original.line < event.padding ? 0 : original.line - event.padding; - preview = preview.slice(start, original.line + event.padding); - } - } else { - console.log(`[SR] source not found, null preview for: ${original.source}`); - preview = [] - } - url = URL.parse(original.source); - } else { - console.log("[SR] couldn't find original position of: " + JSON.stringify({ - line: event.positions[i].line, - column: event.positions[i].column - })); - } - let result = { - "absPath": url.href, - "filename": url.pathname, - "lineNo": original.line, - "colNo": original.column, - "function": original.name, - "context": preview - }; - // console.log(result); - results.push(result); + })); } - consumer = undefined; + let result = { + "absPath": url.href, + "filename": url.pathname, + "lineNo": original.line, + "colNo": original.column, + "function": original.name, + "context": preview + }; + // console.log(result); + results.push(result); + } + consumer = undefined; - const sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000; - options.sourcemapProcessingTime = `${sourcemapProcessingTime} s` - if (fileSize >= 3 || sourcemapProcessingTime > 2) { - console.log("[SR] " + JSON.stringify(options)); + options.sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000; + options.sourcemapProcessingTimeUnit = 's'; + if (options.fileSize >= 3 || options.sourcemapProcessingTime > 2) { + console.log("[SR] " + JSON.stringify(options)); + } + // Use this code if you don't use the http event with the LAMBDA-PROXY integration + return resolve(results); + }) + .catch(err => { + return reject(err); + }) + .finally(() => { + sourcemap = undefined; + }); + } catch (err) { + reject(err); + } +} + +module.exports.sourcemapReader = async event => { + if (event.isURL) { + let options = { + URL: event.key + }; + return new Promise(function (resolve, reject) { + const getObjectStart = Date.now(); + return request.get(options.URL, (err, response, sourcemap) => { + if (err || response.statusCode !== 200) { + console.error("[SR] Getting file from URL failed"); + console.error("err:"); + console.error(err); + console.error("response:"); + if (err) { + return reject(err); } - // Use this code if you don't use the http event with the LAMBDA-PROXY integration - return resolve(results); - }) - .finally(() => { - sourcemap = undefined; - }) - + return reject(response); + } + const getObjectEnd = Date.now(); + options.fileSize = (response.headers['content-length'] / 1024) / 1024; + options.fileSizeUnit = 'Mb'; + options.downloadTime = (getObjectEnd - getObjectStart) / 1000; + options.downloadTimeUnit = 's'; + if (options.fileSize >= 3) { + console.log("[SR] large file:" + JSON.stringify(options)); + } + return parseSourcemap(sourcemap, event, options, resolve, reject); + }); }); - }); + } else { + let s3; + if (process.env.S3_HOST) { + s3 = new AWS.S3({ + endpoint: process.env.S3_HOST, + accessKeyId: process.env.S3_KEY, + secretAccessKey: process.env.S3_SECRET, + s3ForcePathStyle: true, // needed with minio? + signatureVersion: 'v4' + }); + } else { + s3 = new AWS.S3({ + 'AccessKeyID': process.env.aws_access_key_id, + 'SecretAccessKey': process.env.aws_secret_access_key, + 'Region': process.env.aws_region + }); + } + + let options = { + Bucket: event.bucket, + Key: event.key + }; + return new Promise(function (resolve, reject) { + const getObjectStart = Date.now(); + s3.getObject(options, (err, data) => { + if (err) { + console.error("[SR] Get S3 object failed"); + console.error(err); + return reject(err); + } + const getObjectEnd = Date.now(); + options.fileSize = (data.ContentLength / 1024) / 1024; + options.fileSizeUnit = 'Mb'; + options.downloadTime = (getObjectEnd - getObjectStart) / 1000; + options.downloadTimeUnit = 's'; + if (options.fileSize >= 3) { + console.log("[SR] large file:" + JSON.stringify(options)); + } + let sourcemap = data.Body.toString(); + return parseSourcemap(sourcemap, event, options, resolve, reject); + }); + }); + } }; \ No newline at end of file diff --git a/sourcemap-reader/servers/sourcemaps-server.js b/sourcemap-reader/servers/sourcemaps-server.js index 7ac6da992..6415cdd2c 100644 --- a/sourcemap-reader/servers/sourcemaps-server.js +++ b/sourcemap-reader/servers/sourcemaps-server.js @@ -20,8 +20,8 @@ router.post('/', (req, res) => { .catch((e) => { console.error("[SR] Something went wrong"); console.error(e); - res.statusCode(500); - res.end(e); + res.statusCode = 500; + res.end(e.toString()); }); }) From d350f29f54414a226c23951d2cddbefe7a1a7555 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 09:42:26 +0200 Subject: [PATCH 48/68] feat(chalice): fixed favorite session --- api/chalicelib/core/sessions_favorite.py | 21 +++++++++++---------- api/routers/core_dynamic.py | 12 ++++-------- ee/api/chalicelib/core/sessions_favorite.py | 12 ++++++------ ee/api/routers/core_dynamic.py | 11 ++++------- 4 files changed, 25 insertions(+), 31 deletions(-) diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 11992cf5b..4c456d385 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -1,38 +1,39 @@ +import schemas from chalicelib.core import sessions from chalicelib.utils import pg_client -def add_favorite_session(tenant_id, project_id, user_id, session_id): +def add_favorite_session(context: schemas.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) VALUES (%(userId)s,%(session_id)s);""", - {"userId": user_id, "session_id": session_id}) + {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, full_data=False, include_fav_viewed=True) -def remove_favorite_session(tenant_id, project_id, user_id, session_id): +def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s AND session_id = %(session_id)s;""", - {"userId": user_id, "session_id": session_id}) + {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, full_data=False, include_fav_viewed=True) -def favorite_session(tenant_id, project_id, user_id, session_id): - if favorite_session_exists(user_id=user_id, session_id=session_id): - return remove_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, +def favorite_session(context: schemas.CurrentContext, project_id, session_id): + if favorite_session_exists(user_id=context.user_id, session_id=session_id): + return remove_favorite_session(context=context, project_id=project_id, session_id=session_id) - return add_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, session_id=session_id) + return add_favorite_session(context=context, project_id=project_id, session_id=session_id) def favorite_session_exists(user_id, session_id): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 8e127dc29..58d29793a 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -208,8 +208,7 @@ def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, @app.get('/{projectId}/errors/{errorId}', tags=['errors']) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, - context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -260,9 +259,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, - full_data=True, user_id=context.user_id, include_fav_viewed=True, - group_metadata=True, live=False) + data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -318,12 +316,10 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) -@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"]) def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, - user_id=context.user_id, session_id=sessionId)} + "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index ec6d51555..3d6496424 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -5,7 +5,7 @@ from chalicelib.core import sessions, sessions_favorite_exp from chalicelib.utils import pg_client, s3_extra -def add_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): +def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -19,7 +19,7 @@ def add_favorite_session(project_id, session_id, context: schemas_ee.CurrentCont full_data=False, include_fav_viewed=True, context=context) -def remove_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): +def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -33,8 +33,8 @@ def remove_favorite_session(project_id, session_id, context: schemas_ee.CurrentC full_data=False, include_fav_viewed=True, context=context) -def favorite_session(tenant_id, project_id, user_id, session_id, context: schemas_ee.CurrentContext): - if favorite_session_exists(user_id=user_id, session_id=session_id): +def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): + if favorite_session_exists(user_id=context.user_id, session_id=session_id): key = str(session_id) try: s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_D_VALUE', default='default')) @@ -47,7 +47,7 @@ def favorite_session(tenant_id, project_id, user_id, session_id, context: schema except Exception as e: print(f"!!!Error while tagging: {key} to default") print(str(e)) - return remove_favorite_session(project_id=project_id, session_id=session_id, context=context) + return remove_favorite_session(context=context, project_id=project_id, session_id=session_id) key = str(session_id) try: s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault')) @@ -60,7 +60,7 @@ def favorite_session(tenant_id, project_id, user_id, session_id, context: schema except Exception as e: print(f"!!!Error while tagging: {key} to vault") print(str(e)) - return add_favorite_session(project_id=project_id, session_id=session_id, context=context) + return add_favorite_session(context=context, project_id=project_id, session_id=session_id) def favorite_session_exists(user_id, session_id): diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index facbaedd1..047ecaca4 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -269,8 +269,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, live=False, context=context) + data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -329,13 +329,10 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) -@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"], - dependencies=[OR_scope(Permissions.session_replay)]) def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, - user_id=context.user_id, session_id=sessionId, context=context)} + "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], @@ -443,7 +440,7 @@ def share_note_to_slack(projectId: int, noteId: int, webhookId: int, def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, - user_id=context.user_id) + user_id=context.user_id, data=data) if "errors" in data: return data return {'data': data} From 7c6ee551157bb01ba89c0077cd0ef7658c79c86d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 11:02:56 +0200 Subject: [PATCH 49/68] feat(chalice): changes --- api/chalicelib/utils/s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index f3c580e90..3a86924ea 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -70,7 +70,7 @@ def get_file(source_bucket, source_key): ) except ClientError as ex: if ex.response['Error']['Code'] == 'NoSuchKey': - print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}') + print(f'======> No object found - returning None for bucket:{source_bucket} key:{source_key}') return None else: raise ex From 0c6d65da0cc6aa82e405d137587bedfdf845598b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 16:12:24 +0200 Subject: [PATCH 50/68] feat(chalice): fixed get sourcemap from server if not in bucket --- api/chalicelib/core/sourcemaps.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 4cf4f39c5..7e99b25c3 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -85,18 +85,20 @@ def get_traces_group(project_id, payload): payloads = {} all_exists = True for i, u in enumerate(frames): - file_url_exists = False + file_exists_in_bucket = False + file_exists_in_server = False file_url = u["absPath"] key = __get_key(project_id, file_url) # use filename instead? if key not in payloads: - file_exists = s3.exists(config('sourcemaps_bucket'), key) - if not file_exists: + file_exists_in_bucket = s3.exists(config('sourcemaps_bucket'), key) + if not file_exists_in_bucket: + print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 looking in server") if not file_url.endswith(".map"): file_url += '.map' - file_url_exists = url_exists(file_url) - file_exists = file_url_exists - all_exists = all_exists and file_exists - if not file_exists and not file_url_exists: + file_exists_in_server = url_exists(file_url) + file_exists_in_bucket = file_exists_in_server + all_exists = all_exists and file_exists_in_bucket + if not file_exists_in_bucket and not file_exists_in_server: print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 nor server") payloads[key] = None else: @@ -106,14 +108,15 @@ def get_traces_group(project_id, payload): if payloads[key] is not None: payloads[key].append({"resultIndex": i, "frame": dict(u), "URL": file_url, "position": {"line": u["lineNo"], "column": u["colNo"]}, - "isURL": not file_exists and file_url_exists}) + "isURL": not file_exists_in_bucket and file_exists_in_server}) for key in payloads.keys(): if payloads[key] is None: continue key_results = sourcemaps_parser.get_original_trace( key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key, - positions=[o["position"] for o in payloads[key]]) + positions=[o["position"] for o in payloads[key]], + is_url=payloads[key][0]["isURL"]) if key_results is None: all_exists = False continue @@ -143,10 +146,10 @@ def fetch_missed_contexts(frames): if frames[i]["frame"]["absPath"] in source_cache: file = source_cache[frames[i]["frame"]["absPath"]] else: - file = s3.get_file(config('js_cache_bucket'), get_js_cache_path(frames[i]["frame"]["absPath"])) + file_path = get_js_cache_path(frames[i]["frame"]["absPath"]) + file = s3.get_file(config('js_cache_bucket'), file_path) if file is None: - print( - f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {config('js_cache_bucket')}") + print(f"File {file_path} not found in {config('js_cache_bucket')}") source_cache[frames[i]["frame"]["absPath"]] = file if file is None: continue From daa3e5151101ce7577083fe0b244a6ce9deeb836 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 18:18:11 +0200 Subject: [PATCH 51/68] feat(chalice): enhanced get sourcemap from server if not in bucket --- api/chalicelib/core/sourcemaps.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 7e99b25c3..4ec973886 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -75,7 +75,7 @@ def format_payload(p, truncate_to_first=False): def url_exists(url): r = requests.head(url, allow_redirects=False) - return r.status_code == 200 # and r.get("Content-Type")=="application/json" + return r.status_code == 200 and r.headers.get("Content-Type") != "text/html" def get_traces_group(project_id, payload): @@ -89,6 +89,11 @@ def get_traces_group(project_id, payload): file_exists_in_server = False file_url = u["absPath"] key = __get_key(project_id, file_url) # use filename instead? + if file_url and len(file_url) > 0 and not file_url[:file_url.index("?")].endswith(".js"): + print(f"{u['absPath']} sourcemap is not a JS file") + payloads[key] = None + continue + if key not in payloads: file_exists_in_bucket = s3.exists(config('sourcemaps_bucket'), key) if not file_exists_in_bucket: From c08e88a0a18a44534f5e37290a0f8bfe3cbdb4e9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 18:20:06 +0200 Subject: [PATCH 52/68] feat(sourcemaps-reader): changed logging --- sourcemap-reader/servers/sourcemaps-server.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sourcemap-reader/servers/sourcemaps-server.js b/sourcemap-reader/servers/sourcemaps-server.js index 6415cdd2c..da70f6b3b 100644 --- a/sourcemap-reader/servers/sourcemaps-server.js +++ b/sourcemap-reader/servers/sourcemaps-server.js @@ -9,7 +9,7 @@ router.post('/', (req, res) => { }); req.on('end', function () { data = JSON.parse(data); - console.log("[SR] Starting parser for: " + data.key); + console.log(`[SR] Starting parser for ${data.isURL ? "URL: " : "file: "}${data.key}`); // process.env = {...process.env, ...data.bucket_config}; handler.sourcemapReader(data) .then((results) => { From 38402f526f802a77ab66663c047262b8744fa51b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 19:04:07 +0200 Subject: [PATCH 53/68] feat(chalice): fixed get sourcemap from server if not in bucket --- api/chalicelib/core/sourcemaps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 4ec973886..12c2bd96a 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -89,7 +89,7 @@ def get_traces_group(project_id, payload): file_exists_in_server = False file_url = u["absPath"] key = __get_key(project_id, file_url) # use filename instead? - if file_url and len(file_url) > 0 and not file_url[:file_url.index("?")].endswith(".js"): + if file_url and len(file_url) > 0 and not file_url[:file_url.find("?")].endswith(".js"): print(f"{u['absPath']} sourcemap is not a JS file") payloads[key] = None continue From 9ff2cc81116d3e0c2aa1ee9998d8f6b52b4b7ee1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 19:27:16 +0200 Subject: [PATCH 54/68] feat(chalice): fixed get sourcemap from server if not in bucket --- api/chalicelib/core/sourcemaps.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 12c2bd96a..24a2c8220 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -89,7 +89,9 @@ def get_traces_group(project_id, payload): file_exists_in_server = False file_url = u["absPath"] key = __get_key(project_id, file_url) # use filename instead? - if file_url and len(file_url) > 0 and not file_url[:file_url.find("?")].endswith(".js"): + params_idx = file_url.find("?") + if file_url and len(file_url) > 0 \ + and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"): print(f"{u['absPath']} sourcemap is not a JS file") payloads[key] = None continue From ef456145e015e12ab629c3a278fc6fcecb5f9ae2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 20 Oct 2022 20:20:37 +0200 Subject: [PATCH 55/68] feat(chalice): fixed get sourcemap from server if not in bucket --- api/chalicelib/core/sourcemaps.py | 15 ++++++++------- api/chalicelib/utils/s3.py | 1 - 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 24a2c8220..31c8d884e 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -115,7 +115,7 @@ def get_traces_group(project_id, payload): if payloads[key] is not None: payloads[key].append({"resultIndex": i, "frame": dict(u), "URL": file_url, "position": {"line": u["lineNo"], "column": u["colNo"]}, - "isURL": not file_exists_in_bucket and file_exists_in_server}) + "isURL": file_exists_in_server}) for key in payloads.keys(): if payloads[key] is None: @@ -148,16 +148,17 @@ MAX_COLUMN_OFFSET = 60 def fetch_missed_contexts(frames): source_cache = {} for i in range(len(frames)): - if len(frames[i]["context"]) != 0: + if len(frames[i]["context"]) > 0: continue - if frames[i]["frame"]["absPath"] in source_cache: - file = source_cache[frames[i]["frame"]["absPath"]] + file_abs_path = frames[i]["frame"]["absPath"] + if file_abs_path in source_cache: + file = source_cache[file_abs_path] else: - file_path = get_js_cache_path(frames[i]["frame"]["absPath"]) + file_path = get_js_cache_path(file_abs_path) file = s3.get_file(config('js_cache_bucket'), file_path) if file is None: - print(f"File {file_path} not found in {config('js_cache_bucket')}") - source_cache[frames[i]["frame"]["absPath"]] = file + print(f"Missing abs_path: {file_abs_path}, file {file_path} not found in {config('js_cache_bucket')}") + source_cache[file_abs_path] = file if file is None: continue lines = file.split("\n") diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index f3c580e90..5458c8f14 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -70,7 +70,6 @@ def get_file(source_bucket, source_key): ) except ClientError as ex: if ex.response['Error']['Code'] == 'NoSuchKey': - print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}') return None else: raise ex From d1da53709f59cce1cbb9cf4fe6113b3ae68d28bf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 21 Oct 2022 15:26:07 +0200 Subject: [PATCH 56/68] feat(chalice): recreate cursor --- api/chalicelib/utils/pg_client.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 8b9001649..1aa846669 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -141,6 +141,13 @@ class PostgresClient: and not self.unlimited_query: postgreSQL_pool.putconn(self.connection) + def recreate_cursor(self): + try: + self.cursor.close() + except Exception as error: + logging.error("Error while closing cursor for recreation", error) + self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + def close(): pass From 801324204d3e63af6aafe898df163ce7a635d830 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 21 Oct 2022 16:09:52 +0200 Subject: [PATCH 57/68] feat(chalice): change funnel cols-config --- ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 8 ++++++++ scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 63b3344b6..11d7e912e 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -38,4 +38,12 @@ CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); +UPDATE metrics +SET default_config=default_config || '{"col":4}' +WHERE metric_type = 'funnel'; + +UPDATE dashboard_widgets +SET config=config || '{"col":4}' +WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel'); + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index f2ad7e68e..6332a2b52 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -38,4 +38,12 @@ CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); +UPDATE metrics +SET default_config=default_config || '{"col":4}' +WHERE metric_type = 'funnel'; + +UPDATE dashboard_widgets +SET config=config || '{"col":4}' +WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel'); + COMMIT; \ No newline at end of file From afeccbbc26b067c9294677c62e0432b977ebcef8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 21 Oct 2022 16:36:10 +0200 Subject: [PATCH 58/68] feat(chalice): allow recreate cursor --- api/chalicelib/utils/pg_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 1aa846669..8da809418 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -117,6 +117,7 @@ class PostgresClient: def __enter__(self): if self.cursor is None: self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + self.cursor.recreate = self.recreate_cursor return self.cursor def __exit__(self, *args): From a93ac392f0e0441198877ad9b96d06714635983b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 21 Oct 2022 16:58:24 +0200 Subject: [PATCH 59/68] feat(chalice): handle Github bad credentials --- api/chalicelib/utils/github_client_v3.py | 7 +++++-- api/chalicelib/utils/jira_client.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/utils/github_client_v3.py b/api/chalicelib/utils/github_client_v3.py index 692e878ae..650aeb4fe 100644 --- a/api/chalicelib/utils/github_client_v3.py +++ b/api/chalicelib/utils/github_client_v3.py @@ -1,6 +1,9 @@ import requests from datetime import datetime +from fastapi import HTTPException +from starlette import status + class github_formatters: @@ -120,9 +123,9 @@ class githubV3Request: pages = get_response_links(response) result = response.json() if response.status_code != 200: - print("!-------- error") + print(f"=>GITHUB Exception") print(result) - raise Exception(result["message"]) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"GITHUB: {result['message']}") if isinstance(result, dict): return result results += result diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py index 4306cfab2..a820d4aa9 100644 --- a/api/chalicelib/utils/jira_client.py +++ b/api/chalicelib/utils/jira_client.py @@ -35,7 +35,7 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_projects() - print(f"=>Exception {e.text}") + print(f"=>JIRA Exception {e.text}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") projects_dict_list = [] for project in projects: From dafb88e54283141ea15287dc98579e1f3155997d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 21 Oct 2022 17:42:45 +0200 Subject: [PATCH 60/68] feat(chalice): changed recreate cursor for pg_client --- api/chalicelib/utils/pg_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 8da809418..b11ba4079 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -147,7 +147,8 @@ class PostgresClient: self.cursor.close() except Exception as error: logging.error("Error while closing cursor for recreation", error) - self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + self.cursor = None + self.__enter__() def close(): From 218b0d92995239ce15b6c6a3cc347efdd7a7045f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 16:42:36 +0200 Subject: [PATCH 61/68] feat(chalice): graceful shutdown --- api/app.py | 61 +++++++++++++++++++------------ api/app_alerts.py | 36 +++++++++++++----- api/chalicelib/utils/pg_client.py | 22 +++++++---- 3 files changed, 78 insertions(+), 41 deletions(-) diff --git a/api/app.py b/api/app.py index 974d7d8d9..4d5080ced 100644 --- a/api/app.py +++ b/api/app.py @@ -20,22 +20,14 @@ app.add_middleware(GZipMiddleware, minimum_size=1000) @app.middleware('http') async def or_middleware(request: Request, call_next): - global OR_SESSION_TOKEN - OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid')) - - try: - if helper.TRACK_TIME: - import time - now = int(time.time() * 1000) - response: StreamingResponse = await call_next(request) - if helper.TRACK_TIME: - now = int(time.time() * 1000) - now - if now > 500: - print(f"Execution time: {now} ms") - except Exception as e: - pg_client.close() - raise e - pg_client.close() + if helper.TRACK_TIME: + import time + now = int(time.time() * 1000) + response: StreamingResponse = await call_next(request) + if helper.TRACK_TIME: + now = int(time.time() * 1000) - now + if now > 500: + logging.info(f"Execution time: {now} ms") return response @@ -61,14 +53,35 @@ app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) -Schedule = AsyncIOScheduler() -Schedule.start() +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) +app.schedule = AsyncIOScheduler() -for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: - Schedule.add_job(id=job["func"].__name__, **job) -for job in Schedule.get_jobs(): - print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) +@app.on_event("startup") +async def startup(): + await pg_client.init() + app.schedule.start() -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) -logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO)) + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + +@app.on_event("shutdown") +async def shutdown(): + print(">>>>> shutting down") + app.schedule.shutdown(wait=False) + await pg_client.terminate() + + +@app.get('/private/suicide', tags=["private"]) +async def stop_server(): + import os, signal + os.kill(1, signal.SIGTERM) diff --git a/api/app_alerts.py b/api/app_alerts.py index 4e05ab1a8..0d8ad0f04 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -3,11 +3,12 @@ import logging from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config from fastapi import FastAPI +from chalicelib.utils import pg_client from chalicelib.core import alerts_processor app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) -print("============= ALERTS =============") +logging.info("============= ALERTS =============") @app.get("/") @@ -16,12 +17,29 @@ async def root(): app.schedule = AsyncIOScheduler() -app.schedule.start() -app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval", - "minutes": config("ALERTS_INTERVAL", cast=int, default=5), - "misfire_grace_time": 20}) -for job in app.schedule.get_jobs(): - print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) -logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO)) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) +app.schedule = AsyncIOScheduler() + + +@app.on_event("startup") +async def startup(): + await pg_client.init() + app.schedule.start() + app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval", + "minutes": config("ALERTS_INTERVAL", cast=int, default=5), + "misfire_grace_time": 20}) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + +@app.on_event("shutdown") +async def shutdown(): + app.schedule.shutdown(wait=False) + await pg_client.terminate() diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 4ff1efe4b..1bfb7aa09 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -20,8 +20,6 @@ PG_CONFIG = dict(_PG_CONFIG) if config("PG_TIMEOUT", cast=int, default=0) > 0: PG_CONFIG["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int) * 1000}" -logging.info(f">PG_POOL:{config('PG_POOL', default=None)}") - class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool): def __init__(self, minconn, maxconn, *args, **kwargs): @@ -83,10 +81,6 @@ def make_pool(): raise error -if config('PG_POOL', cast=bool, default=True): - make_pool() - - class PostgresClient: connection = None cursor = None @@ -151,5 +145,17 @@ class PostgresClient: self.__enter__() -def close(): - pass +async def init(): + logging.info(f">PG_POOL:{config('PG_POOL', default=None)}") + if config('PG_POOL', cast=bool, default=True): + make_pool() + + +async def terminate(): + global postgreSQL_pool + if postgreSQL_pool is not None: + try: + postgreSQL_pool.closeall() + logging.info("Closed all connexions to PostgreSQL") + except (Exception, psycopg2.DatabaseError) as error: + logging.error("Error while closing all connexions to PostgreSQL", error) From 1935988433a18d900e1b295c0518a6ab36019365 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 16:42:58 +0200 Subject: [PATCH 62/68] feat(DB): changed integrations trigger --- .../helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 14 ++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 4 +++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 6332a2b52..2671593ae 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -46,4 +46,18 @@ UPDATE dashboard_widgets SET config=config || '{"col":4}' WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel'); +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', + jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options', + null)::text); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 9fe80c718..645696eeb 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -60,7 +60,9 @@ CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS $$ BEGIN IF NEW IS NULL THEN - PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + PERFORM pg_notify('integration', + jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options', + null)::text); ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN PERFORM pg_notify('integration', row_to_json(NEW)::text); END IF; From bb1d0cf1b6522d48fdc5b2a405bbd65e374cf921 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 16:47:33 +0200 Subject: [PATCH 63/68] feat(chalice): graceful shutdown --- api/app.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/app.py b/api/app.py index 4d5080ced..2625e8f64 100644 --- a/api/app.py +++ b/api/app.py @@ -81,7 +81,8 @@ async def shutdown(): await pg_client.terminate() -@app.get('/private/suicide', tags=["private"]) +@app.get('/private/shutdown', tags=["private"]) async def stop_server(): + await shutdown() import os, signal os.kill(1, signal.SIGTERM) From eb7c2b15dba5b9d22175ddeac89d0613073ff877 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 17:01:52 +0200 Subject: [PATCH 64/68] feat(chalice): graceful shutdown feat(DB): changed integrations trigger --- api/app.py | 4 +- api/app_alerts.py | 10 +++ ee/api/app.py | 67 ++++++++++++------- .../db/init_dbs/postgresql/1.8.2/1.8.2.sql | 14 ++++ .../db/init_dbs/postgresql/init_schema.sql | 3 +- 5 files changed, 71 insertions(+), 27 deletions(-) diff --git a/api/app.py b/api/app.py index 2625e8f64..26342484c 100644 --- a/api/app.py +++ b/api/app.py @@ -63,6 +63,7 @@ app.schedule = AsyncIOScheduler() @app.on_event("startup") async def startup(): + logging.info(">>>>> starting up <<<<<") await pg_client.init() app.schedule.start() @@ -76,13 +77,14 @@ async def startup(): @app.on_event("shutdown") async def shutdown(): - print(">>>>> shutting down") + logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=False) await pg_client.terminate() @app.get('/private/shutdown', tags=["private"]) async def stop_server(): + logging.info("Requested shutdown") await shutdown() import os, signal os.kill(1, signal.SIGTERM) diff --git a/api/app_alerts.py b/api/app_alerts.py index 0d8ad0f04..7107423de 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -28,6 +28,7 @@ app.schedule = AsyncIOScheduler() @app.on_event("startup") async def startup(): + logging.info(">>>>> starting up <<<<<") await pg_client.init() app.schedule.start() app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval", @@ -41,5 +42,14 @@ async def startup(): @app.on_event("shutdown") async def shutdown(): + logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=False) await pg_client.terminate() + + +@app.get('/private/shutdown', tags=["private"]) +async def stop_server(): + logging.info("Requested shutdown") + await shutdown() + import os, signal + os.kill(1, signal.SIGTERM) diff --git a/ee/api/app.py b/ee/api/app.py index ad9310f95..055706792 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -9,6 +9,7 @@ from fastapi.middleware.gzip import GZipMiddleware from starlette import status from starlette.responses import StreamingResponse, JSONResponse +from chalicelib.core import traces from chalicelib.utils import helper from chalicelib.utils import pg_client from routers import core, core_dynamic, ee, saml @@ -27,21 +28,14 @@ async def or_middleware(request: Request, call_next): if not unlock.is_valid(): return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN) - global OR_SESSION_TOKEN - OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid')) - try: - if helper.TRACK_TIME: - import time - now = int(time.time() * 1000) - response: StreamingResponse = await call_next(request) - if helper.TRACK_TIME: - now = int(time.time() * 1000) - now - if now > 500: - print(f"Execution time: {now} ms") - except Exception as e: - pg_client.close() - raise e - pg_client.close() + if helper.TRACK_TIME: + import time + now = int(time.time() * 1000) + response: StreamingResponse = await call_next(request) + if helper.TRACK_TIME: + now = int(time.time() * 1000) - now + if now > 500: + logging.info(f"Execution time: {now} ms") return response @@ -74,18 +68,41 @@ app.include_router(insights.app) app.include_router(v1_api.app_apikey) app.include_router(v1_api_ee.app_apikey) -app.queue_system = queue.Queue() +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) app.schedule = AsyncIOScheduler() -app.schedule.start() +app.queue_system = queue.Queue() -for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) -from chalicelib.core import traces -app.schedule.add_job(id="trace_worker", **traces.cron_jobs[0]) +@app.on_event("startup") +async def startup(): + logging.info(">>>>> starting up <<<<<") + await pg_client.init() + app.schedule.start() -for job in app.schedule.get_jobs(): - print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + +@app.on_event("shutdown") +async def shutdown(): + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=True) + await traces.process_traces_queue() + await pg_client.terminate() + + +@app.get('/private/shutdown', tags=["private"]) +async def stop_server(): + logging.info("Requested shutdown") + await shutdown() + import os, signal + os.kill(1, signal.SIGTERM) -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) -logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO)) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 11d7e912e..b796cc6f3 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -46,4 +46,18 @@ UPDATE dashboard_widgets SET config=config || '{"col":4}' WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel'); +CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS +$$ +BEGIN + IF NEW IS NULL THEN + PERFORM pg_notify('integration', + jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options', + null)::text); + ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN + PERFORM pg_notify('integration', row_to_json(NEW)::text); + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index cd8f88cb1..c3a732fc8 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -61,7 +61,8 @@ $$ BEGIN IF NEW IS NULL THEN PERFORM pg_notify('integration', - (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text)); + jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options', + null)::text); ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN PERFORM pg_notify('integration', row_to_json(NEW)::text); END IF; From 2a514da536fd889a499920137683ae3aa9477872 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 17:29:03 +0200 Subject: [PATCH 65/68] feat(sourcemaps-reader): support shutdown feat(assist): support shutdown feat(peers): support shutdown --- ee/utilities/server.js | 17 +++++++++++++++++ sourcemap-reader/server.js | 10 +++++++++- utilities/server.js | 10 +++++++++- 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index 48799e279..5ab424ad4 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -112,4 +112,21 @@ if (process.env.uws !== "true") { // process.exit(1); }); module.exports = {uapp}; +} + +if (process.env.uws !== "true") { + wsapp.get('/private/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } + ); +} else { + uapp.get('/private/shutdown', (res, req) => { + console.log("Requested shutdown"); + res.writeStatus('200 OK').end("ok!"); + process.kill(1, "SIGTERM"); + } + ); } \ No newline at end of file diff --git a/sourcemap-reader/server.js b/sourcemap-reader/server.js index b58128992..02f63475b 100644 --- a/sourcemap-reader/server.js +++ b/sourcemap-reader/server.js @@ -23,4 +23,12 @@ const server = app.listen(PORT, HOST, () => { console.log(`SR App listening on http://${HOST}:${PORT}`); console.log('Press Ctrl+C to quit.'); }); -module.exports = {server}; \ No newline at end of file +module.exports = {server}; + +app.get('/private/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); \ No newline at end of file diff --git a/utilities/server.js b/utilities/server.js index 0b300f47b..d71aca65d 100644 --- a/utilities/server.js +++ b/utilities/server.js @@ -31,4 +31,12 @@ const wsserver = wsapp.listen(PORT, HOST, () => { }); wsapp.enable('trust proxy'); socket.start(wsserver); -module.exports = {wsserver}; \ No newline at end of file +module.exports = {wsserver}; + +wsapp.get('/private/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); \ No newline at end of file From 5cf294799120b11f2674919fb2d322bd29d0fddf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 17:29:09 +0200 Subject: [PATCH 66/68] feat(sourcemaps-reader): support shutdown feat(assist): support shutdown feat(peers): support shutdown --- peers/server.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/peers/server.js b/peers/server.js index 0eb99a9de..fcf78611b 100644 --- a/peers/server.js +++ b/peers/server.js @@ -40,4 +40,12 @@ process.on('uncaughtException', err => { console.log(`Uncaught Exception: ${err.message}`); debug && console.log(err.stack); // process.exit(1); -}); \ No newline at end of file +}); + +app.get('/private/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); \ No newline at end of file From e4befed9b677cd8087acee6a56c6d1e971bf5f10 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 24 Oct 2022 17:37:49 +0200 Subject: [PATCH 67/68] feat(peers): fixed assertion --- peers/server.js | 1 + 1 file changed, 1 insertion(+) diff --git a/peers/server.js b/peers/server.js index fcf78611b..e553513ad 100644 --- a/peers/server.js +++ b/peers/server.js @@ -1,5 +1,6 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); +const assert = require('assert').strict; const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server'); const express = require('express'); const {ExpressPeerServer} = require('peer'); From 5e312e5ec321a0313e32fbe745e6d5f1ebe72483 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 25 Oct 2022 13:30:19 +0200 Subject: [PATCH 68/68] feat(sourcemaps-reader): stadalone feat(chalice): use sourcempas reader stadalone --- api/Dockerfile | 10 ++------ api/build.sh | 2 -- api/entrypoint.sh | 4 +--- api/env.default | 2 +- ee/api/Dockerfile | 10 ++------ ee/api/entrypoint.sh | 4 +--- ee/api/env.default | 2 +- sourcemap-reader/Dockerfile | 23 ++++++++++++++++++ sourcemap-reader/build.sh | 44 ++++++++++++++++++++++++++++++++++ sourcemap-reader/entrypoint.sh | 2 ++ 10 files changed, 77 insertions(+), 26 deletions(-) create mode 100644 sourcemap-reader/Dockerfile create mode 100644 sourcemap-reader/build.sh create mode 100755 sourcemap-reader/entrypoint.sh diff --git a/api/Dockerfile b/api/Dockerfile index d02dda8ba..637293c3a 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,28 +1,22 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -RUN apk add --no-cache build-base nodejs npm tini +RUN apk add --no-cache build-base tini ARG envarg # Add Tini # Startup daemon ENV SOURCE_MAP_VERSION=0.7.4 \ APP_NAME=chalice \ LISTEN_PORT=8000 \ - MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \ ENTERPRISE_BUILD=${envarg} -ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm - WORKDIR /work_tmp COPY requirements.txt /work_tmp/requirements.txt RUN pip install --no-cache-dir --upgrade -r /work_tmp/requirements.txt -COPY sourcemap-reader/*.json /work_tmp/ -RUN cd /work_tmp && npm install WORKDIR /work COPY . . -RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \ - && mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM} +RUN mv env.default .env RUN adduser -u 1001 openreplay -D USER 1001 diff --git a/api/build.sh b/api/build.sh index 46c54ab2e..58689f85d 100644 --- a/api/build.sh +++ b/api/build.sh @@ -20,8 +20,6 @@ check_prereq() { function build_api(){ cp -R ../api ../_api cd ../_api - cp -R ../utilities/utils ../sourcemap-reader/. - cp -R ../sourcemap-reader . tag="" # Copy enterprise code [[ $1 == "ee" ]] && { diff --git a/api/entrypoint.sh b/api/entrypoint.sh index 7342426c2..e140268ef 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,5 +1,3 @@ #!/bin/sh -cd sourcemap-reader -nohup npm start & -cd .. + uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers diff --git a/api/env.default b/api/env.default index 2dcafbc8a..64243ed0b 100644 --- a/api/env.default +++ b/api/env.default @@ -38,7 +38,7 @@ PG_POOL=true sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps +sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps STAGE=default-foss version_number=1.4.0 FS_DIR=/mnt/efs diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index 1b23fc6d4..4f6b739ba 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,26 +1,20 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" -RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec nodejs npm tini +RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec tini ARG envarg ENV SOURCE_MAP_VERSION=0.7.4 \ APP_NAME=chalice \ LISTEN_PORT=8000 \ - MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \ ENTERPRISE_BUILD=${envarg} -ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm - WORKDIR /work_tmp COPY requirements.txt /work_tmp/requirements.txt RUN pip install --no-cache-dir --upgrade -r /work_tmp/requirements.txt -COPY sourcemap-reader/*.json /work_tmp/ -RUN cd /work_tmp && npm install WORKDIR /work COPY . . -RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \ - && mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM} +RUN mv env.default .env RUN adduser -u 1001 openreplay -D USER 1001 diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh index b5997ee3b..ebd646a7d 100755 --- a/ee/api/entrypoint.sh +++ b/ee/api/entrypoint.sh @@ -1,7 +1,5 @@ #!/bin/sh sh env_vars.sh source /tmp/.env.override -cd sourcemap-reader -nohup npm start & -cd .. + uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers diff --git a/ee/api/env.default b/ee/api/env.default index 673454853..68cbafdce 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -48,7 +48,7 @@ PG_POOL=true sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps +sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps stage=default-ee version_number=1.0.0 FS_DIR=/mnt/efs diff --git a/sourcemap-reader/Dockerfile b/sourcemap-reader/Dockerfile new file mode 100644 index 000000000..6b31970d6 --- /dev/null +++ b/sourcemap-reader/Dockerfile @@ -0,0 +1,23 @@ +FROM node:18-alpine +LABEL Maintainer="KRAIEM Taha Yassine" +RUN apk add --no-cache tini + +ARG envarg +ENV SOURCE_MAP_VERSION=0.7.4 \ + APP_NAME=sourcemaps-reader \ + LISTEN_PORT=9000 \ + MAPPING_WASM=/work/mappings.wasm \ + ENTERPRISE_BUILD=${envarg} + +ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm ${MAPPING_WASM} +WORKDIR /work +COPY *.json ./ +RUN npm install && chmod 644 ${MAPPING_WASM} + +COPY . . + +RUN adduser -u 1001 openreplay -D +USER 1001 + +ENTRYPOINT ["/sbin/tini", "--"] +CMD ./entrypoint.sh diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh new file mode 100644 index 000000000..8d679622e --- /dev/null +++ b/sourcemap-reader/build.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Script to build api module +# flags to accept: +# envarg: build for enterprise edition. +# Default will be OSS build. + +# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh + +git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +envarg="default-foss" +check_prereq() { + which docker || { + echo "Docker not installed, please install docker." + exit 1 + } + return +} + +function build_api(){ + cp -R ../sourcemap-reader ../_smr + cd ../_smr + cp -R ../utilities/utils . + tag="" + # Copy enterprise code + [[ $1 == "ee" ]] && { + cp -rf ../ee/sourcemap-reader/* ./ + envarg="default-ee" + tag="ee-" + } + docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1} . + cd ../sourcemap-reader + rm -rf ../_smr + [[ $PUSH_IMAGE -eq 1 ]] && { + docker push ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1} + docker tag ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1} ${DOCKER_REPO:-'local'}/souremaps-reader:${tag}latest + docker push ${DOCKER_REPO:-'local'}/souremaps-reader:${tag}latest + } + echo "sourcemaps-reader docker build completed" +} + +check_prereq +build_api $1 +echo buil_complete \ No newline at end of file diff --git a/sourcemap-reader/entrypoint.sh b/sourcemap-reader/entrypoint.sh new file mode 100755 index 000000000..d1bcb4adf --- /dev/null +++ b/sourcemap-reader/entrypoint.sh @@ -0,0 +1,2 @@ +#!/bin/sh +npm start \ No newline at end of file