diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index ba334e101..00ab6028b 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -125,7 +125,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr= {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} FROM public.projects AS s - where s.project_id =%(project_id)s + WHERE s.project_id =%(project_id)s AND s.deleted_at IS NULL LIMIT 1;""", {"project_id": project_id}) @@ -146,7 +146,7 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} FROM public.projects AS s - where s.project_key =%(project_key)s + WHERE s.project_key =%(project_key)s AND s.deleted_at IS NULL LIMIT 1;""", {"project_key": project_key}) @@ -199,7 +199,7 @@ def count_by_tenant(tenant_id): SELECT count(s.project_id) FROM public.projects AS s - where s.deleted_at IS NULL;""") + WHERE s.deleted_at IS NULL;""") return cur.fetchone()["count"] @@ -210,7 +210,7 @@ def get_gdpr(project_id): SELECT gdpr FROM public.projects AS s - where s.project_id =%(project_id)s + WHERE s.project_id =%(project_id)s AND s.deleted_at IS NULL;""", {"project_id": project_id}) ) @@ -239,7 +239,7 @@ def get_internal_project_id(project_key): cur.mogrify("""\ SELECT project_id FROM public.projects - where project_key =%(project_key)s AND deleted_at ISNULL;""", + WHERE project_key =%(project_key)s AND deleted_at ISNULL;""", {"project_key": project_key}) ) row = cur.fetchone() @@ -252,7 +252,7 @@ def get_project_key(project_id): cur.mogrify("""\ SELECT project_key FROM public.projects - where project_id =%(project_id)s AND deleted_at ISNULL;""", + WHERE project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) project = cur.fetchone() @@ -266,7 +266,7 @@ def get_capture_status(project_id): SELECT sample_rate AS rate, sample_rate=100 AS capture_all FROM public.projects - where project_id =%(project_id)s AND deleted_at ISNULL;""", + WHERE project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) return helper.dict_to_camel_case(cur.fetchone()) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index d31b8aea0..7d77bf53d 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -298,7 +298,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ transitions ::: if transited from the first stage to the last - 1 else - 0 - errors ::: a dictionary where the keys are all unique issues (currently context-wise) + errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise) the values are lists if an issue happened between the first stage to the last - 1 else - 0 diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index d0b3e2adc..9dbee0e15 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -10,10 +10,9 @@ def get_by_id(webhook_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""\ - SELECT - w.* + SELECT w.* FROM public.webhooks AS w - where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", + WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", {"webhook_id": webhook_id}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -25,11 +24,10 @@ def get_by_id(webhook_id): def get(tenant_id, webhook_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - SELECT - webhook_id AS integration_id, webhook_id AS id, w.* - FROM public.webhooks AS w - where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", + cur.mogrify("""SELECT w.* + FROM public.webhooks AS w + WHERE w.webhook_id =%(webhook_id)s + AND deleted_at ISNULL AND type='webhook';""", {"webhook_id": webhook_id}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -41,11 +39,9 @@ def get(tenant_id, webhook_id): def get_by_type(tenant_id, webhook_type): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - SELECT - w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at - FROM public.webhooks AS w - WHERE w.type =%(type)s AND deleted_at ISNULL;""", + cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at + FROM public.webhooks AS w + WHERE w.type =%(type)s AND deleted_at ISNULL;""", {"type": webhook_type}) ) webhooks = helper.list_to_camel_case(cur.fetchall()) @@ -56,22 +52,12 @@ def get_by_type(tenant_id, webhook_type): def get_by_tenant(tenant_id, replace_none=False): with pg_client.PostgresClient() as cur: - cur.execute("""\ - SELECT - webhook_id AS integration_id, webhook_id AS id, w.* - FROM public.webhooks AS w - WHERE deleted_at ISNULL;""" - ) + cur.execute("""SELECT w.* + FROM public.webhooks AS w + WHERE deleted_at ISNULL AND type='webhook';""") all = helper.list_to_camel_case(cur.fetchall()) - if replace_none: - for w in all: - w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) - for k in w.keys(): - if w[k] is None: - w[k] = '' - else: - for w in all: - w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) + for w in all: + w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) return all diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index c5ab7c800..a8aae009f 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -137,7 +137,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr= {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} FROM public.projects AS s - where s.tenant_id =%(tenant_id)s + WHERE s.tenant_id =%(tenant_id)s AND s.project_id =%(project_id)s AND s.deleted_at IS NULL LIMIT 1;""", @@ -168,7 +168,7 @@ def is_authorized(project_id, tenant_id, user_id=None): SELECT project_id FROM public.projects AS s {role_query if user_id is not None else ""} - where s.tenant_id =%(tenant_id)s + WHERE s.tenant_id =%(tenant_id)s AND s.project_id =%(project_id)s AND s.deleted_at IS NULL LIMIT 1;""", @@ -234,7 +234,7 @@ def get_gdpr(project_id): SELECT gdpr FROM public.projects AS s - where s.project_id =%(project_id)s + WHERE s.project_id =%(project_id)s AND s.deleted_at IS NULL;""", {"project_id": project_id}) ) @@ -263,7 +263,7 @@ def get_internal_project_id(project_key): cur.mogrify("""\ SELECT project_id FROM public.projects - where project_key =%(project_key)s AND deleted_at ISNULL;""", + WHERE project_key =%(project_key)s AND deleted_at ISNULL;""", {"project_key": project_key}) ) row = cur.fetchone() @@ -276,7 +276,7 @@ def get_project_key(project_id): cur.mogrify("""\ SELECT project_key FROM public.projects - where project_id =%(project_id)s AND deleted_at ISNULL;""", + WHERE project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) project = cur.fetchone() @@ -290,7 +290,7 @@ def get_capture_status(project_id): SELECT sample_rate AS rate, sample_rate=100 AS capture_all FROM public.projects - where project_id =%(project_id)s AND deleted_at ISNULL;""", + WHERE project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) return helper.dict_to_camel_case(cur.fetchone()) @@ -327,7 +327,7 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} FROM public.projects AS s - where s.project_key =%(project_key)s + WHERE s.project_key =%(project_key)s AND s.tenant_id =%(tenant_id)s AND s.deleted_at IS NULL LIMIT 1;""", diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index 5bd80dc06..cbc11e1f6 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -119,7 +119,7 @@ def get_role_by_name(tenant_id, name): cur.execute( cur.mogrify("""SELECT * FROM public.roles - where tenant_id =%(tenant_id)s + WHERE tenant_id =%(tenant_id)s AND deleted_at IS NULL AND name ILIKE %(name)s;""", {"tenant_id": tenant_id, "name": name}) diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index 3aa701f97..36fb5533c 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -305,7 +305,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ transitions ::: if transited from the first stage to the last - 1 else - 0 - errors ::: a dictionary where the keys are all unique issues (currently context-wise) + errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise) the values are lists if an issue happened between the first stage to the last - 1 else - 0 diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index 1f845ec06..da65ddaf4 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -299,7 +299,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ transitions ::: if transited from the first stage to the last - 1 else - 0 - errors ::: a dictionary where the keys are all unique issues (currently context-wise) + errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise) the values are lists if an issue happened between the first stage to the last - 1 else - 0 diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index cb7cf509e..3aaae8b1c 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -1,3 +1,5 @@ +import logging + import requests from chalicelib.utils import pg_client, helper @@ -8,10 +10,9 @@ def get_by_id(webhook_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""\ - SELECT - webhook_id AS integration_id, webhook_id AS id, w.* + SELECT w.* FROM public.webhooks AS w - where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", + WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", {"webhook_id": webhook_id}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -24,10 +25,9 @@ def get(tenant_id, webhook_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""\ - SELECT - webhook_id AS integration_id, webhook_id AS id, w.* + SELECT w.* FROM public.webhooks AS w - where w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s AND deleted_at ISNULL;""", + WHERE w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s AND deleted_at ISNULL AND type='webhook';""", {"webhook_id": webhook_id, "tenant_id": tenant_id}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -39,14 +39,11 @@ def get(tenant_id, webhook_id): def get_by_type(tenant_id, webhook_type): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - SELECT - w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at - FROM public.webhooks AS w - where - w.tenant_id =%(tenant_id)s - AND w.type =%(type)s - AND deleted_at ISNULL;""", + cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at + FROM public.webhooks AS w + WHERE w.tenant_id =%(tenant_id)s + AND w.type =%(type)s + AND deleted_at ISNULL;""", {"type": webhook_type, "tenant_id": tenant_id}) ) webhooks = helper.list_to_camel_case(cur.fetchall()) @@ -58,25 +55,16 @@ def get_by_type(tenant_id, webhook_type): def get_by_tenant(tenant_id, replace_none=False): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - SELECT - webhook_id AS integration_id, webhook_id AS id,w.* - FROM public.webhooks AS w - where - w.tenant_id =%(tenant_id)s - AND deleted_at ISNULL;""", + cur.mogrify("""SELECT w.* + FROM public.webhooks AS w + WHERE w.tenant_id =%(tenant_id)s + AND deleted_at ISNULL + AND type='webhook';""", {"tenant_id": tenant_id}) ) all = helper.list_to_camel_case(cur.fetchall()) - if replace_none: - for w in all: - w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) - for k in w.keys(): - if w[k] is None: - w[k] = '' - else: - for w in all: - w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) + for w in all: + w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) return all @@ -152,28 +140,24 @@ def trigger_batch(data_list): for w in data_list: if w["destination"] not in webhooks_map: webhooks_map[w["destination"]] = get_by_id(webhook_id=w["destination"]) - __trigger(hook=webhooks_map[w["destination"]], data=w["data"]) + if webhooks_map[w["destination"]] is None: + logging.error(f"!!Error webhook not found: webhook_id={w['destination']}") + else: + __trigger(hook=webhooks_map[w["destination"]], data=w["data"]) def __trigger(hook, data): - if hook["type"] == 'webhook': + if hook is not None and hook["type"] == 'webhook': headers = {} if hook["authHeader"] is not None and len(hook["authHeader"]) > 0: headers = {"Authorization": hook["authHeader"]} - # body = { - # "webhookId": hook["id"], - # "createdAt": TimeUTC.now(), - # "event": event, - # "data": data - # } - r = requests.post(url=hook["endpoint"], json=data, headers=headers) if r.status_code != 200: - print("=======> webhook: something went wrong") - print(r) - print(r.status_code) - print(r.text) + logging.error("=======> webhook: something went wrong") + logging.error(r) + logging.error(r.status_code) + logging.error(r.text) return response = None try: @@ -182,5 +166,5 @@ def __trigger(hook, data): try: response = r.text except: - print("no response found") + logging.info("no response found") return response diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql new file mode 100644 index 000000000..950f4e179 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql @@ -0,0 +1,13 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.8.3-ee' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE IF EXISTS public.webhooks + ALTER COLUMN type SET DEFAULT 'webhook'; + +ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams'; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index c3a732fc8..d2e5a9330 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -146,7 +146,7 @@ $$ tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, tenant_key text NOT NULL DEFAULT generate_api_key(20), name text NOT NULL, - api_key text UNIQUE default generate_api_key(20) not null, + api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, license text NULL, @@ -186,9 +186,9 @@ $$ email text NOT NULL UNIQUE, role user_role NOT NULL DEFAULT 'member', name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - api_key text UNIQUE default generate_api_key(20) not null, + api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT'{}'::jsonb, weekly_report boolean NOT NULL DEFAULT TRUE, @@ -283,25 +283,25 @@ $$ IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'webhook_type') THEN - create type webhook_type as enum ('webhook','slack','email'); + CREATE TYPE webhook_type AS ENUM ('webhook','slack','email','msteams'); END IF; - create table IF NOT EXISTS webhooks + CREATE TABLE IF NOT EXISTS webhooks ( - webhook_id integer generated by default as identity + webhook_id integer generated by DEFAULT as identity constraint webhooks_pkey primary key, - tenant_id integer not null + tenant_id integer NOT NULL constraint webhooks_tenant_id_fkey references tenants on delete cascade, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, + endpoint text NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, auth_header text, - type webhook_type not null, - index integer default 0 not null, + type webhook_type NOT NULL DEFAULT 'webhook', + index integer DEFAULT 0 NOT NULL, name varchar(100) ); @@ -339,9 +339,9 @@ $$ funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, + name text NOT NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, is_public boolean NOT NULL DEFAULT False ); @@ -352,21 +352,21 @@ $$ IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'announcement_type') THEN - create type announcement_type as enum ('notification','alert'); + CREATE TYPE announcement_type AS ENUM ('notification','alert'); END IF; - create table IF NOT EXISTS announcements + CREATE TABLE IF NOT EXISTS announcements ( - announcement_id serial not null + announcement_id serial NOT NULL constraint announcements_pk primary key, - title text not null, - description text not null, + title text NOT NULL, + description text NOT NULL, button_text varchar(30), button_url text, image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + type announcement_type DEFAULT 'notification'::announcement_type NOT NULL ); IF NOT EXISTS(SELECT * @@ -395,14 +395,14 @@ $$ CREATE TABLE IF NOT EXISTS jira_cloud ( - user_id integer not null + user_id integer NOT NULL constraint jira_cloud_pk primary key constraint jira_cloud_users_fkey references users on delete cascade, - username text not null, - token text not null, + username text NOT NULL, + token text NOT NULL, url text ); @@ -501,7 +501,7 @@ $$ ( key text NOT NULL, value text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, session_id bigint NOT NULL, message_id bigint NOT NULL, @@ -666,8 +666,8 @@ $$ issue_id text NOT NULL, provider oauth_provider NOT NULL, created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default'{}'::jsonb NOT NULL + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + provider_data jsonb DEFAULT'{}'::jsonb NOT NULL ); CREATE INDEX IF NOT EXISTS assigned_sessions_session_id_idx ON assigned_sessions (session_id); @@ -720,8 +720,8 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, action job_action NOT NULL, reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp DEFAULT timezone('utc'::text, now()) NULL, start_at timestamp NOT NULL, errors text NULL ); @@ -823,9 +823,9 @@ $$ search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, + name text NOT NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, is_public boolean NOT NULL DEFAULT False ); @@ -877,7 +877,7 @@ $$ ( note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, message text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, tag text NULL, diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql b/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql new file mode 100644 index 000000000..30be2368d --- /dev/null +++ b/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql @@ -0,0 +1,13 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.8.3' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE IF EXISTS public.webhooks + ALTER COLUMN type SET DEFAULT 'webhook'; + +ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams'; + +COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 645696eeb..52b50f14e 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -9,7 +9,6 @@ $$ SELECT 'v1.8.1' $$ LANGUAGE sql IMMUTABLE; --- --- accounts.sql --- CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS $$ @@ -29,7 +28,6 @@ begin end; $$ LANGUAGE plpgsql; --- --- events.sql --- CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS $$ @@ -54,7 +52,6 @@ BEGIN END; $$ LANGUAGE plpgsql IMMUTABLE; --- --- integrations.sql --- CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS $$ @@ -70,7 +67,6 @@ BEGIN END; $$ LANGUAGE plpgsql; --- --- alerts.sql --- CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS $$ @@ -87,7 +83,6 @@ BEGIN END ; $$ LANGUAGE plpgsql; --- --- projects.sql --- CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS $$ @@ -110,11 +105,9 @@ $$ ELSE raise notice 'Creating DB'; - -- --- public.sql --- CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS pgcrypto; --- --- accounts.sql --- CREATE TABLE tenants ( @@ -141,9 +134,9 @@ $$ email text NOT NULL UNIQUE, role user_role NOT NULL DEFAULT 'member', name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, - api_key text UNIQUE default generate_api_key(20) not null, + api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, jwt_iat timestamp without time zone NULL DEFAULT NULL, data jsonb NOT NULL DEFAULT '{}'::jsonb, weekly_report boolean NOT NULL DEFAULT TRUE @@ -171,7 +164,6 @@ $$ ); CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); --- --- projects.sql --- CREATE TABLE projects ( @@ -214,25 +206,22 @@ $$ EXECUTE PROCEDURE notify_project(); --- --- webhooks.sql --- + CREATE TYPE webhook_type AS ENUM ('webhook', 'slack', 'email', 'msteams'); - create type webhook_type as enum ('webhook', 'slack', 'email'); - - create table webhooks + CREATE TABLE webhooks ( - webhook_id integer generated by default as identity + webhook_id integer generated by DEFAULT as identity constraint webhooks_pkey primary key, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, + endpoint text NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, auth_header text, - type webhook_type not null, - index integer default 0 not null, + type webhook_type NOT NULL DEFAULT 'webhook', + index integer DEFAULT 0 NOT NULL, name varchar(100) ); --- --- notifications.sql --- CREATE TABLE notifications ( @@ -258,16 +247,15 @@ $$ constraint user_viewed_notifications_pkey primary key (user_id, notification_id) ); --- --- funnels.sql --- CREATE TABLE funnels ( funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, + name text NOT NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, is_public boolean NOT NULL DEFAULT False ); @@ -275,25 +263,23 @@ $$ CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public); CREATE INDEX funnels_project_id_idx ON public.funnels (project_id); --- --- announcements.sql --- - create type announcement_type as enum ('notification', 'alert'); + CREATE TYPE announcement_type AS ENUM ('notification', 'alert'); - create table announcements + CREATE TABLE announcements ( - announcement_id serial not null + announcement_id serial NOT NULL constraint announcements_pk primary key, - title text not null, - description text not null, + title text NOT NULL, + description text NOT NULL, button_text varchar(30), button_url text, image_url text, - created_at timestamp default timezone('utc'::text, now()) not null, - type announcement_type default 'notification'::announcement_type not null + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + type announcement_type DEFAULT 'notification'::announcement_type NOT NULL ); --- --- integrations.sql --- CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); CREATE TABLE integrations @@ -312,20 +298,19 @@ $$ EXECUTE PROCEDURE notify_integration(); - create table jira_cloud + CREATE TABLE jira_cloud ( - user_id integer not null + user_id integer NOT NULL constraint jira_cloud_pk primary key constraint jira_cloud_users_fkey references users on delete cascade, - username text not null, - token text not null, + username text NOT NULL, + token text NOT NULL, url text ); --- --- issues.sql --- CREATE TYPE issue_type AS ENUM ( 'click_rage', @@ -361,7 +346,6 @@ $$ CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); CREATE INDEX issues_project_id_idx ON issues (project_id); --- --- errors.sql --- CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); @@ -409,7 +393,7 @@ $$ ( key text NOT NULL, value text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, session_id bigint NOT NULL, message_id bigint NOT NULL, @@ -420,7 +404,6 @@ $$ CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id); CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id); --- --- sessions.sql --- CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR'); CREATE TYPE platform AS ENUM ('web','ios','android'); @@ -546,21 +529,18 @@ $$ ); CREATE INDEX user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id); --- --- assignments.sql --- - create table assigned_sessions + CREATE TABLE assigned_sessions ( session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, issue_id text NOT NULL, provider oauth_provider NOT NULL, created_by integer NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - provider_data jsonb default '{}'::jsonb NOT NULL + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + provider_data jsonb DEFAULT '{}'::jsonb NOT NULL ); CREATE INDEX assigned_sessions_session_id_idx ON assigned_sessions (session_id); --- --- events_common.sql --- - CREATE TYPE events_common.custom_level AS ENUM ('info','error'); @@ -632,7 +612,6 @@ $$ CREATE INDEX requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL; CREATE INDEX requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; --- --- events.sql --- CREATE TABLE events.pages ( @@ -853,8 +832,6 @@ $$ CREATE INDEX performance_avg_used_js_heap_size_gt0_idx ON events.performance (avg_used_js_heap_size) WHERE avg_used_js_heap_size > 0; --- --- autocomplete.sql --- - CREATE TABLE autocomplete ( value text NOT NULL, @@ -893,8 +870,8 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, action job_action NOT NULL, reference_id text NOT NULL, - created_at timestamp default timezone('utc'::text, now()) NOT NULL, - updated_at timestamp default timezone('utc'::text, now()) NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp DEFAULT timezone('utc'::text, now()) NULL, start_at timestamp NOT NULL, errors text NULL ); @@ -977,9 +954,9 @@ $$ search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, + name text NOT NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, deleted_at timestamp, is_public boolean NOT NULL DEFAULT False ); @@ -1019,7 +996,7 @@ $$ ( note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, message text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, deleted_at timestamp without time zone NULL DEFAULT NULL, tag text NULL,