diff --git a/api/chalicelib/core/feature_flags.py b/api/chalicelib/core/feature_flags.py new file mode 100644 index 000000000..d61b88063 --- /dev/null +++ b/api/chalicelib/core/feature_flags.py @@ -0,0 +1,347 @@ +import schemas +from chalicelib.utils import helper +from chalicelib.utils import pg_client +from chalicelib.utils.TimeUTC import TimeUTC +from typing import Any, List, Dict, Optional +from fastapi import HTTPException, status +import json + +feature_flag_columns = ( + "feature_flag_id", + "name", + "flag_key", + "description", + "flag_type", + "is_persist", + "is_active", + "created_at", + "updated_at", + "created_by", + "updated_by", +) + + +def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]: + """ + Get all feature flags and their total count. + """ + constraints = [ + "feature_flags.project_id = %(project_id)s", + "feature_flags.deleted_at IS NULL", + "feature_flags.is_active = %(is_active)s" + ] + + params = { + "project_id": project_id, + "user_id": user_id, + "limit": data.limit, + "is_active": data.is_active, + "offset": (data.page - 1) * data.limit, + } + + if data.user_id is not None: + constraints.append("feature_flags.created_by=%(user_id)s") + + if data.query is not None and len(data.query) > 0: + constraints.append("name ILIKE %(query)s") + params["query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + sql = f""" + SELECT COUNT(1) OVER () AS count, {", ".join(feature_flag_columns)} + FROM feature_flags + WHERE {" AND ".join(constraints)} + ORDER BY created_at {data.order} + LIMIT %(limit)s OFFSET %(offset)s; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, params) + cur.execute(query) + rows = cur.fetchall() + + if len(rows) == 0: + return {"data": {"total": 0, "list": []}} + + results = {"total": rows[0]["count"]} + + rows = helper.list_to_camel_case(rows) + for row in rows: + row.pop("count") + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"]) + + results["list"] = rows + return {"data": results} + + +def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]: + insert_columns = ( + 'project_id', + 'name', + 'flag_key', + 'description', + 'flag_type', + 'is_persist', + 'is_active', + 'created_by' + ) + + _data = {} + for i, s in enumerate(feature_flag_data.conditions): + for k in s.dict().keys(): + _data[f"{k}_{i}"] = s.__getattribute__(k) + _data[f"name_{i}"] = s.name + _data[f"rollout_percentage_{i}"] = s.rollout_percentage + _data[f"filters_{i}"] = json.dumps(s.filters) + + params = { + "project_id": project_id, + "created_by": user_id, + **feature_flag_data.dict(), + **_data + } + + conditions_len = len(feature_flag_data.conditions) + + flag_sql = f""" + INSERT INTO feature_flags ({", ".join(insert_columns)}) + VALUES ({", ".join(["%(" + col + ")s" for col in insert_columns])}) + RETURNING feature_flag_id + """ + + query = f""" + WITH inserted_flag AS ({flag_sql}) + INSERT INTO feature_flags_conditions(feature_flag_id, name, rollout_percentage, filters) + VALUES {",".join([f"((SELECT feature_flag_id FROM inserted_flag), %(name_{i})s, %(rollout_percentage_{i})s, %(filters_{i})s::jsonb)" + for i in range(conditions_len)])} + RETURNING feature_flag_id; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(query, params) + cur.execute(query) + row = cur.fetchone() + + if row is None: + return None + + return get_feature_flag(project_id=project_id, feature_flag_id=row["feature_flag_id"]) + + +def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]: + conditions_query = """ + SELECT COALESCE(jsonb_agg(ffc ORDER BY condition_id), '[]'::jsonb) AS conditions + FROM feature_flags_conditions AS ffc + WHERE ffc.feature_flag_id = %(feature_flag_id)s + """ + + sql = f""" + SELECT {", ".join(["ff." + col for col in feature_flag_columns])}, + ({conditions_query}) AS conditions + FROM feature_flags AS ff + WHERE ff.feature_flag_id = %(feature_flag_id)s + AND ff.project_id = %(project_id)s + AND ff.deleted_at IS NULL; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "project_id": project_id}) + cur.execute(query) + row = cur.fetchone() + + if row is None: + return {"errors": ["Feature flag not found"]} + + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"]) + + return {"data": helper.dict_to_camel_case(row)} + + +def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> List[Dict[str, Any]]: + """ + Create new feature flag conditions and return their data. + """ + rows = [] + + # insert all conditions rows with single sql query + if len(conditions) > 0: + columns = ( + "feature_flag_id", + "name", + "rollout_percentage", + "filters", + ) + + sql = f""" + INSERT INTO feature_flags_conditions + (feature_flag_id, name, rollout_percentage, filters) + VALUES {", ".join(["%s"] * len(conditions))} + RETURNING condition_id, {", ".join(columns)} + """ + + with pg_client.PostgresClient() as cur: + params = [(feature_flag_id, c.name, c.rollout_percentage, json.dumps(c.filters)) for c in conditions] + query = cur.mogrify(sql, params) + cur.execute(query) + rows = cur.fetchall() + + return rows + + +def update_feature_flag(project_id: int, feature_flag_id: int, + feature_flag: schemas.FeatureFlagSchema, user_id: int): + """ + Update an existing feature flag and return its updated data. + """ + columns = ( + "name", + "flag_key", + "description", + "flag_type", + "is_persist", + "is_active", + "updated_by", + ) + + params = { + "updated_by": user_id, + "feature_flag_id": feature_flag_id, + "project_id": project_id, + **feature_flag.dict(), + } + + sql = f""" + UPDATE feature_flags + SET {", ".join(f"{column} = %({column})s" for column in columns)}, + updated_at = timezone('utc'::text, now()) + WHERE feature_flag_id = %(feature_flag_id)s AND project_id = %(project_id)s + RETURNING feature_flag_id, {", ".join(columns)}, created_at, updated_at + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, params) + cur.execute(query) + row = cur.fetchone() + + if row is None: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Something went wrong.") + + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"]) + row['conditions'] = check_conditions(feature_flag_id, feature_flag.conditions) + + return {"data": helper.dict_to_camel_case(row)} + + +def get_conditions(feature_flag_id: int): + """ + Get all conditions for a feature flag. + """ + sql = """ + SELECT + condition_id, + feature_flag_id, + name, + rollout_percentage, + filters + FROM feature_flags_conditions + WHERE feature_flag_id = %(feature_flag_id)s + ORDER BY condition_id; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id}) + cur.execute(query) + rows = cur.fetchall() + + return rows + + +def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any: + existing_ids = [ec.get("condition_id") for ec in get_conditions(feature_flag_id)] + to_be_deleted = [] + to_be_updated = [] + to_be_created = [] + + for cid in existing_ids: + if cid not in [c.condition_id for c in conditions]: + to_be_deleted.append(cid) + + for condition in conditions: + if condition.condition_id is None: + to_be_created.append(condition) + else: + to_be_updated.append(condition) + + if len(to_be_created) > 0: + create_conditions(feature_flag_id=feature_flag_id, conditions=to_be_created) + + if len(to_be_updated) > 0: + update_conditions(feature_flag_id=feature_flag_id, conditions=to_be_updated) + + if len(to_be_deleted) > 0: + delete_conditions(feature_flag_id=feature_flag_id, ids=to_be_deleted) + + return get_conditions(feature_flag_id) + + +def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any: + """ + Update existing feature flag conditions and return their updated data. + """ + values = [] + params = { + "feature_flag_id": feature_flag_id, + } + for i in range(len(conditions)): + values.append(f"(%(condition_id_{i})s, %(name_{i})s, %(rollout_percentage_{i})s, %(filters_{i})s::jsonb)") + params[f"condition_id_{i}"] = conditions[i].condition_id + params[f"name_{i}"] = conditions[i].name + params[f"rollout_percentage_{i}"] = conditions[i].rollout_percentage + params[f"filters_{i}"] = json.dumps(conditions[i].filters) + + sql = f""" + UPDATE feature_flags_conditions + SET name = c.name, rollout_percentage = c.rollout_percentage, filters = c.filters + FROM (VALUES {','.join(values)}) AS c(condition_id, name, rollout_percentage, filters) + WHERE c.condition_id = feature_flags_conditions.condition_id AND feature_flag_id = %(feature_flag_id)s; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, params) + cur.execute(query) + + +def delete_conditions(feature_flag_id: int, ids: List[int]) -> None: + """ + Delete feature flag conditions. + """ + sql = """ + DELETE FROM feature_flags_conditions + WHERE condition_id IN %(ids)s + AND feature_flag_id= %(feature_flag_id)s; + """ + + with pg_client.PostgresClient() as cur: + query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)}) + cur.execute(query) + + +def delete_feature_flag(project_id: int, feature_flag_id: int): + """ + Delete a feature flag. + """ + conditions = [ + "project_id=%(project_id)s", + "feature_flags.feature_flag_id=%(feature_flag_id)s" + ] + params = {"project_id": project_id, "feature_flag_id": feature_flag_id} + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""UPDATE feature_flags + SET deleted_at= (now() at time zone 'utc') + WHERE {" AND ".join(conditions)};""", params) + cur.execute(query) + + return {"state": "success"} diff --git a/api/routers/core.py b/api/routers/core.py index 446a277fc..d28c1d4d2 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -11,7 +11,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \ log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ assist, mobile, tenants, boarding, notifications, webhook, users, \ - custom_metrics, saved_search, integrations_global + custom_metrics, saved_search, integrations_global, feature_flags from chalicelib.core.collaboration_msteams import MSTeams from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import helper, captcha, s3 diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 0847b83b3..9fe664d95 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist, sessions_notes, click_maps, sessions_replay, signup + sessions_favorite, assist, sessions_notes, click_maps, sessions_replay, signup, feature_flags from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -451,3 +451,34 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} + + +@app.post('/{project_id}/feature-flags/search', tags=["feature flags"]) +async def search_feature_flags(project_id: int, + data: schemas.SearchFlagsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.search_feature_flags(project_id=project_id, user_id=context.user_id, data=data) + + +@app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) +async def get_feature_flag(project_id: int, feature_flag_id: int): + return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) + + +@app.post('/{project_id}/feature-flags', tags=["feature flags"]) +async def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data) + + +@app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) +async def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id, + user_id=context.user_id, + feature_flag=data) + + +@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) +async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): + return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} diff --git a/api/schemas.py b/api/schemas.py index 81089e2b2..d261e1697 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1018,11 +1018,11 @@ class CreateCardSchema(CardChartSchema): name: Optional[str] = Field(...) is_public: bool = Field(default=True) view_type: Union[MetricTimeseriesViewType, \ - MetricTableViewType, MetricOtherViewType] = Field(...) + MetricTableViewType, MetricOtherViewType] = Field(...) metric_type: MetricType = Field(...) metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \ - MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \ - MetricOfClickMap] = Field(MetricOfTable.user_id) + MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \ + MetricOfClickMap] = Field(MetricOfTable.user_id) metric_value: List[IssueType] = Field(default=[]) metric_format: Optional[MetricFormatType] = Field(default=None) default_config: CardConfigSchema = Field(..., alias="config") @@ -1194,7 +1194,7 @@ class LiveSessionSearchFilterSchema(BaseModel): type: LiveFilterType = Field(...) source: Optional[str] = Field(default=None) operator: Literal[SearchEventOperator._is, \ - SearchEventOperator._contains] = Field(default=SearchEventOperator._contains) + SearchEventOperator._contains] = Field(default=SearchEventOperator._contains) transform = root_validator(pre=True, allow_reuse=True)(transform_old_FilterType) @@ -1363,3 +1363,42 @@ class GetHeatmapPayloadSchema(BaseModel): class Config: alias_generator = attribute_to_camel_case + + +class FeatureFlagCondition(BaseModel): + condition_id: Optional[int] = Field(default=None) + name: str = Field(...) + rollout_percentage: Optional[int] = Field(default=0) + filters: List[dict] = Field(default=[]) + + class Config: + alias_generator = attribute_to_camel_case + + +class SearchFlagsSchema(_PaginatedSchema): + limit: int = Field(default=15, gt=0, le=200) + user_id: Optional[int] = Field(default=None) + order: SortOrderType = Field(default=SortOrderType.desc) + query: Optional[str] = Field(default=None) + is_active: bool = Field(default=True) + + class Config: + alias_generator = attribute_to_camel_case + + +class FeatureFlagType(str, Enum): + single_variant = "single" + multi_variant = "multi" + + +class FeatureFlagSchema(BaseModel): + name: str = Field(...) + flag_key: str = Field(...) + description: Optional[str] = Field(None) + flag_type: FeatureFlagType = Field(default=FeatureFlagType.single_variant) + is_persist: Optional[bool] = Field(default=False) + is_active: Optional[bool] = Field(default=True) + conditions: List[FeatureFlagCondition] = Field(default=[]) + + class Config: + alias_generator = attribute_to_camel_case diff --git a/ee/api/.gitignore b/ee/api/.gitignore index ab9b50029..93746e0d2 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -223,6 +223,7 @@ Pipfile.lock /chalicelib/core/socket_ios.py /chalicelib/core/sourcemaps.py /chalicelib/core/sourcemaps_parser.py +/chalicelib/core/feature_flags.py /chalicelib/saml /chalicelib/utils/html/ /chalicelib/utils/__init__.py diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index 8a1f6a263..1b5eed8c5 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -9,6 +9,7 @@ rm -rf ./chalicelib/core/collaboration_base.py rm -rf ./chalicelib/core/collaboration_msteams.py rm -rf ./chalicelib/core/collaboration_slack.py rm -rf ./chalicelib/core/countries.py +rm -rf ./chalicelib/core/feature_flags.py #exp rm -rf ./chalicelib/core/errors.py rm -rf ./chalicelib/core/errors_favorite.py #exp rm -rf ./chalicelib/core/events.py diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 5bec97431..ed9d14212 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite, sessions_notes, click_maps, sessions_replay, signup + errors_favorite, sessions_notes, click_maps, sessions_replay, signup, feature_flags from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -478,3 +478,38 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} + + +@app.post('/{project_id}/feature-flags/search', tags=["feature flags"], + dependencies=[OR_scope(Permissions.feature_flags)]) +async def search_feature_flags(project_id: int, + data: schemas.SearchFlagsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.search_feature_flags(project_id=project_id, user_id=context.user_id, data=data) + + +@app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], + dependencies=[OR_scope(Permissions.feature_flags)]) +async def get_feature_flag(project_id: int, feature_flag_id: int): + return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) + + +@app.post('/{project_id}/feature-flags', tags=["feature flags"], dependencies=[OR_scope(Permissions.feature_flags)]) +async def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data) + + +@app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], + dependencies=[OR_scope(Permissions.feature_flags)]) +async def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id, + user_id=context.user_id, + feature_flag=data) + + +@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], + dependencies=[OR_scope(Permissions.feature_flags)]) +async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): + return feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index b9d31bfed..416d9e860 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -15,6 +15,7 @@ class Permissions(str, Enum): metrics = "METRICS" assist_live = "ASSIST_LIVE" assist_call = "ASSIST_CALL" + feature_flags = "FEATURE_FLAGS" class CurrentContext(schemas.CurrentContext): diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql new file mode 100644 index 000000000..7e69f9847 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql @@ -0,0 +1,57 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.12.0-ee'; + next_version CONSTANT text := 'v1.13.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.13.0-ee' +$$ LANGUAGE sql IMMUTABLE; + +CREATE TABLE IF NOT EXISTS public.feature_flags +( + feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + flag_key text NOT NULL, + description text NOT NULL, + flag_type text NOT NULL, + is_persist boolean NOT NULL DEFAULT FALSE, + is_active boolean NOT NULL DEFAULT FALSE, + created_by integer REFERENCES users (user_id) ON DELETE SET NULL, + updated_by integer REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp without time zone NULL DEFAULT NULL +); + +CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id); + +CREATE TABLE IF NOT EXISTS public.feature_flags_conditions +( + condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE, + name text NOT NULL, + rollout_percentage integer NOT NULL, + filters jsonb NOT NULL DEFAULT '[]'::jsonb +); + +UPDATE public.roles +SET permissions = (SELECT array_agg(distinct e) FROM unnest(permissions || '{FEATURE_FLAGS}') AS e) +where not permissions @> '{FEATURE_FLAGS}'; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 505b989fd..7d3e028bd 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.12.0-ee' +SELECT 'v1.13.0-ee' $$ LANGUAGE sql IMMUTABLE; @@ -889,6 +889,34 @@ $$ CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id); + CREATE TABLE IF NOT EXISTS public.feature_flags + ( + feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + flag_key text NOT NULL, + description text NOT NULL, + flag_type text NOT NULL, + is_persist boolean NOT NULL DEFAULT FALSE, + is_active boolean NOT NULL DEFAULT FALSE, + created_by integer REFERENCES users (user_id) ON DELETE SET NULL, + updated_by integer REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp without time zone NULL DEFAULT NULL + ); + + CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id); + + CREATE TABLE IF NOT EXISTS public.feature_flags_conditions + ( + condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE, + name text NOT NULL, + rollout_percentage integer NOT NULL, + filters jsonb NOT NULL DEFAULT '[]'::jsonb + ); + RAISE NOTICE 'Created missing public schema tables'; END IF; END; diff --git a/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql b/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql new file mode 100644 index 000000000..5be391046 --- /dev/null +++ b/scripts/schema/db/init_dbs/postgresql/1.13.0/1.13.0.sql @@ -0,0 +1,53 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.12.0'; + next_version CONSTANT text := 'v1.13.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.13.0' +$$ LANGUAGE sql IMMUTABLE; + +CREATE TABLE IF NOT EXISTS public.feature_flags +( + feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + flag_key text NOT NULL, + description text NOT NULL, + flag_type text NOT NULL, + is_persist boolean NOT NULL DEFAULT FALSE, + is_active boolean NOT NULL DEFAULT FALSE, + created_by integer REFERENCES users (user_id) ON DELETE SET NULL, + updated_by integer REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp without time zone NULL DEFAULT NULL +); + +CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id); + +CREATE TABLE IF NOT EXISTS public.feature_flags_conditions +( + condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE, + name text NOT NULL, + rollout_percentage integer NOT NULL, + filters jsonb NOT NULL DEFAULT '[]'::jsonb +); + +COMMIT; \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index cd5892d67..f684dfc4f 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.12.0' +SELECT 'v1.13.0' $$ LANGUAGE sql IMMUTABLE; @@ -972,7 +972,35 @@ $$ primary key (project_id, created_at) ); - CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id); + CREATE INDEX projects_stats_project_id_idx ON public.projects_stats (project_id); + + CREATE TABLE public.feature_flags + ( + feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + flag_key text NOT NULL, + description text NOT NULL, + flag_type text NOT NULL, + is_persist boolean NOT NULL DEFAULT FALSE, + is_active boolean NOT NULL DEFAULT FALSE, + created_by integer REFERENCES users (user_id) ON DELETE SET NULL, + updated_by integer REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp without time zone NULL DEFAULT NULL + ); + + CREATE INDEX idx_feature_flags_project_id ON public.feature_flags (project_id); + + CREATE TABLE public.feature_flags_conditions + ( + condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE, + name text NOT NULL, + rollout_percentage integer NOT NULL, + filters jsonb NOT NULL DEFAULT '[]'::jsonb + ); raise notice 'DB created'; END IF;