feat(api) - feature flags multi variant (#1353)
* feat(api): feature flags with multi variants * feat(api): feature flags with multi variants - description optional * feat(api): feature flags with multi variants - check for sum of rollout percentage * feat(api): feature flags with multi variants - refactor * feat(api): feature flags with multi variants - fix payload format * feat(api): feature flags - test cases * fix((ui): multi variants payload validation * feat(api): feature flags - removed name * feat(api): feature flags - removed name * feat(api): feature flags - removed name
This commit is contained in:
parent
d9ff686c78
commit
7e96189690
5 changed files with 475 additions and 58 deletions
|
|
@ -8,7 +8,7 @@ import json
|
|||
|
||||
feature_flag_columns = (
|
||||
"feature_flag_id",
|
||||
"name",
|
||||
"payload",
|
||||
"flag_key",
|
||||
"description",
|
||||
"flag_type",
|
||||
|
|
@ -21,33 +21,25 @@ feature_flag_columns = (
|
|||
)
|
||||
|
||||
|
||||
def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) -> bool:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
|
||||
FROM public.feature_flags
|
||||
WHERE deleted_at IS NULL
|
||||
AND flag_key ILIKE %(flag_key)s AND project_id=%(project_id)s
|
||||
{"AND feature_flag_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
|
||||
{"flag_key": flag_key, "exclude_id": exclude_id, "project_id": project_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
return row["exists"]
|
||||
|
||||
|
||||
def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]:
|
||||
"""
|
||||
Get all feature flags and their total count.
|
||||
"""
|
||||
constraints = [
|
||||
"feature_flags.project_id = %(project_id)s",
|
||||
"feature_flags.deleted_at IS NULL",
|
||||
]
|
||||
|
||||
params = {
|
||||
"project_id": project_id,
|
||||
"user_id": user_id,
|
||||
"limit": data.limit,
|
||||
"offset": (data.page - 1) * data.limit,
|
||||
}
|
||||
|
||||
if data.is_active is not None:
|
||||
constraints.append("feature_flags.is_active=%(is_active)s")
|
||||
params["is_active"] = data.is_active
|
||||
|
||||
if data.user_id is not None:
|
||||
constraints.append("feature_flags.created_by=%(user_id)s")
|
||||
|
||||
if data.query is not None and len(data.query) > 0:
|
||||
constraints.append("name ILIKE %(query)s")
|
||||
params["query"] = helper.values_for_operator(value=data.query,
|
||||
op=schemas.SearchEventOperator._contains)
|
||||
constraints, params = prepare_constraints_params_to_search(data, project_id, user_id)
|
||||
|
||||
sql = f"""
|
||||
SELECT COUNT(1) OVER () AS count, {", ".join(feature_flag_columns)}
|
||||
|
|
@ -77,34 +69,47 @@ def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlag
|
|||
return {"data": results}
|
||||
|
||||
|
||||
def prepare_constraints_params_to_search(data, project_id, user_id):
|
||||
constraints = [
|
||||
"feature_flags.project_id = %(project_id)s",
|
||||
"feature_flags.deleted_at IS NULL",
|
||||
]
|
||||
params = {
|
||||
"project_id": project_id,
|
||||
"user_id": user_id,
|
||||
"limit": data.limit,
|
||||
"offset": (data.page - 1) * data.limit,
|
||||
}
|
||||
if data.is_active is not None:
|
||||
constraints.append("feature_flags.is_active=%(is_active)s")
|
||||
params["is_active"] = data.is_active
|
||||
if data.user_id is not None:
|
||||
constraints.append("feature_flags.created_by=%(user_id)s")
|
||||
if data.query is not None and len(data.query) > 0:
|
||||
constraints.append("flag_key ILIKE %(query)s")
|
||||
params["query"] = helper.values_for_operator(value=data.query,
|
||||
op=schemas.SearchEventOperator._contains)
|
||||
return constraints, params
|
||||
|
||||
|
||||
def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]:
|
||||
validate_unique_flag_key(feature_flag_data, project_id)
|
||||
validate_multi_variant_flag(feature_flag_data)
|
||||
|
||||
insert_columns = (
|
||||
'project_id',
|
||||
'name',
|
||||
'flag_key',
|
||||
'description',
|
||||
'flag_type',
|
||||
'payload',
|
||||
'is_persist',
|
||||
'is_active',
|
||||
'created_by'
|
||||
)
|
||||
|
||||
_data = {}
|
||||
for i, s in enumerate(feature_flag_data.conditions):
|
||||
for k in s.dict().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
_data[f"name_{i}"] = s.name
|
||||
_data[f"rollout_percentage_{i}"] = s.rollout_percentage
|
||||
_data[f"filters_{i}"] = json.dumps(s.filters)
|
||||
|
||||
params = {
|
||||
"project_id": project_id,
|
||||
"created_by": user_id,
|
||||
**feature_flag_data.dict(),
|
||||
**_data
|
||||
}
|
||||
|
||||
params = prepare_params_to_create_flag(feature_flag_data, project_id, user_id)
|
||||
conditions_len = len(feature_flag_data.conditions)
|
||||
variants_len = len(feature_flag_data.variants)
|
||||
|
||||
flag_sql = f"""
|
||||
INSERT INTO feature_flags ({", ".join(insert_columns)})
|
||||
|
|
@ -113,12 +118,29 @@ def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schema
|
|||
"""
|
||||
|
||||
query = f"""
|
||||
WITH inserted_flag AS ({flag_sql})
|
||||
INSERT INTO feature_flags_conditions(feature_flag_id, name, rollout_percentage, filters)
|
||||
VALUES {",".join([f"((SELECT feature_flag_id FROM inserted_flag), %(name_{i})s, %(rollout_percentage_{i})s, %(filters_{i})s::jsonb)"
|
||||
for i in range(conditions_len)])}
|
||||
RETURNING feature_flag_id;
|
||||
"""
|
||||
WITH inserted_flag AS ({flag_sql}),
|
||||
inserted_conditions AS (
|
||||
INSERT INTO feature_flags_conditions(feature_flag_id, name, rollout_percentage, filters)
|
||||
VALUES {",".join([f"(("
|
||||
f"SELECT feature_flag_id FROM inserted_flag),"
|
||||
f"%(name_{i})s,"
|
||||
f"%(rollout_percentage_{i})s,"
|
||||
f"%(filters_{i})s::jsonb)"
|
||||
for i in range(conditions_len)])}
|
||||
RETURNING feature_flag_id
|
||||
),
|
||||
inserted_variants AS (
|
||||
INSERT INTO feature_flags_variants(feature_flag_id, value, description, rollout_percentage, payload)
|
||||
VALUES {",".join([f"((SELECT feature_flag_id FROM inserted_flag),"
|
||||
f"%(v_value_{i})s,"
|
||||
f"%(v_description_{i})s,"
|
||||
f"%(v_rollout_percentage_{i})s,"
|
||||
f"%(v_payload_{i})s::jsonb)"
|
||||
for i in range(variants_len)])}
|
||||
RETURNING feature_flag_id
|
||||
)
|
||||
SELECT feature_flag_id FROM inserted_flag;
|
||||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(query, params)
|
||||
|
|
@ -131,6 +153,56 @@ def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schema
|
|||
return get_feature_flag(project_id =project_id, feature_flag_id=row["feature_flag_id"])
|
||||
|
||||
|
||||
def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None):
|
||||
if exists_by_name(project_id=project_id, flag_key=feature_flag_data.flag_key, exclude_id=exclude_id):
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Feature flag with key already exists.")
|
||||
|
||||
|
||||
def validate_multi_variant_flag(feature_flag_data):
|
||||
if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant:
|
||||
if sum([v.rollout_percentage for v in feature_flag_data.variants]) > 100:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Sum of rollout percentage for variants cannot be greater than 100.")
|
||||
|
||||
|
||||
def prepare_params_to_create_flag(feature_flag_data, project_id, user_id):
|
||||
conditions_data = prepare_conditions_values(feature_flag_data)
|
||||
variants_data = prepare_variants_values(feature_flag_data)
|
||||
|
||||
params = {
|
||||
"project_id": project_id,
|
||||
"created_by": user_id,
|
||||
**feature_flag_data.dict(),
|
||||
**conditions_data,
|
||||
**variants_data,
|
||||
"payload": json.dumps(feature_flag_data.payload)
|
||||
}
|
||||
return params
|
||||
|
||||
|
||||
def prepare_variants_values(feature_flag_data):
|
||||
variants_data = {}
|
||||
for i, v in enumerate(feature_flag_data.variants):
|
||||
# for k in v.dict().keys():
|
||||
# variants_data[f"{k}_{i}"] = v.__getattribute__(k)
|
||||
variants_data[f"v_value_{i}"] = v.value
|
||||
variants_data[f"v_description_{i}"] = v.description
|
||||
variants_data[f"v_payload_{i}"] = json.dumps(v.payload)
|
||||
variants_data[f"v_rollout_percentage_{i}"] = v.rollout_percentage
|
||||
return variants_data
|
||||
|
||||
|
||||
def prepare_conditions_values(feature_flag_data):
|
||||
conditions_data = {}
|
||||
for i, s in enumerate(feature_flag_data.conditions):
|
||||
for k in s.dict().keys():
|
||||
conditions_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
conditions_data[f"name_{i}"] = s.name
|
||||
conditions_data[f"rollout_percentage_{i}"] = s.rollout_percentage
|
||||
conditions_data[f"filters_{i}"] = json.dumps(s.filters)
|
||||
return conditions_data
|
||||
|
||||
|
||||
def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]:
|
||||
conditions_query = """
|
||||
SELECT COALESCE(jsonb_agg(ffc ORDER BY condition_id), '[]'::jsonb) AS conditions
|
||||
|
|
@ -138,9 +210,16 @@ def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str
|
|||
WHERE ffc.feature_flag_id = %(feature_flag_id)s
|
||||
"""
|
||||
|
||||
variants_query = """
|
||||
SELECT COALESCE(jsonb_agg(ffv ORDER BY variant_id), '[]'::jsonb) AS variants
|
||||
FROM feature_flags_variants AS ffv
|
||||
WHERE ffv.feature_flag_id = %(feature_flag_id)s
|
||||
"""
|
||||
|
||||
sql = f"""
|
||||
SELECT {", ".join(["ff." + col for col in feature_flag_columns])},
|
||||
({conditions_query}) AS conditions
|
||||
({conditions_query}) AS conditions,
|
||||
({variants_query}) AS variants
|
||||
FROM feature_flags AS ff
|
||||
WHERE ff.feature_flag_id = %(feature_flag_id)s
|
||||
AND ff.project_id = %(project_id)s
|
||||
|
|
@ -197,8 +276,10 @@ def update_feature_flag(project_id: int, feature_flag_id: int,
|
|||
"""
|
||||
Update an existing feature flag and return its updated data.
|
||||
"""
|
||||
validate_unique_flag_key(feature_flag_data=feature_flag, project_id=project_id, exclude_id=feature_flag_id)
|
||||
validate_multi_variant_flag(feature_flag_data=feature_flag)
|
||||
|
||||
columns = (
|
||||
"name",
|
||||
"flag_key",
|
||||
"description",
|
||||
"flag_type",
|
||||
|
|
@ -228,11 +309,12 @@ def update_feature_flag(project_id: int, feature_flag_id: int,
|
|||
row = cur.fetchone()
|
||||
|
||||
if row is None:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Something went wrong.")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Feature flag not found")
|
||||
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||
row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"])
|
||||
row['conditions'] = check_conditions(feature_flag_id, feature_flag.conditions)
|
||||
row['variants'] = check_variants(feature_flag_id, feature_flag.variants)
|
||||
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
|
@ -261,6 +343,129 @@ def get_conditions(feature_flag_id: int):
|
|||
return rows
|
||||
|
||||
|
||||
def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
|
||||
existing_ids = [ev.get("variant_id") for ev in get_variants(feature_flag_id)]
|
||||
to_be_deleted = []
|
||||
to_be_updated = []
|
||||
to_be_created = []
|
||||
|
||||
for vid in existing_ids:
|
||||
if vid not in [v.variant_id for v in variants]:
|
||||
to_be_deleted.append(vid)
|
||||
|
||||
for variant in variants:
|
||||
if variant.variant_id is None:
|
||||
to_be_created.append(variant)
|
||||
else:
|
||||
to_be_updated.append(variant)
|
||||
|
||||
if len(to_be_created) > 0:
|
||||
create_variants(feature_flag_id=feature_flag_id, variants=to_be_created)
|
||||
|
||||
if len(to_be_updated) > 0:
|
||||
update_variants(feature_flag_id=feature_flag_id, variants=to_be_updated)
|
||||
|
||||
if len(to_be_deleted) > 0:
|
||||
delete_variants(feature_flag_id=feature_flag_id, ids=to_be_deleted)
|
||||
|
||||
return get_variants(feature_flag_id)
|
||||
|
||||
|
||||
def get_variants(feature_flag_id: int):
|
||||
sql = """
|
||||
SELECT
|
||||
variant_id,
|
||||
feature_flag_id,
|
||||
value,
|
||||
payload,
|
||||
rollout_percentage
|
||||
FROM feature_flags_variants
|
||||
WHERE feature_flag_id = %(feature_flag_id)s
|
||||
ORDER BY variant_id;
|
||||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id})
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
return rows
|
||||
|
||||
|
||||
def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Create new feature flag variants and return their data.
|
||||
"""
|
||||
rows = []
|
||||
|
||||
# insert all variants rows with single sql query
|
||||
if len(variants) > 0:
|
||||
columns = (
|
||||
"feature_flag_id",
|
||||
"value",
|
||||
"description",
|
||||
"payload",
|
||||
"rollout_percentage",
|
||||
)
|
||||
|
||||
sql = f"""
|
||||
INSERT INTO feature_flags_variants
|
||||
(feature_flag_id, value, description, payload, rollout_percentage)
|
||||
VALUES {", ".join(["%s"] * len(variants))}
|
||||
RETURNING variant_id, {", ".join(columns)}
|
||||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
params = [(feature_flag_id, v.value, v.description, v.payload, v.rollout_percentage) for v in variants]
|
||||
query = cur.mogrify(sql, params)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
return rows
|
||||
|
||||
|
||||
def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
|
||||
"""
|
||||
Update existing feature flag variants and return their updated data.
|
||||
"""
|
||||
values = []
|
||||
params = {
|
||||
"feature_flag_id": feature_flag_id,
|
||||
}
|
||||
for i in range(len(variants)):
|
||||
values.append(f"(%(variant_id_{i})s, %(value_{i})s, %(rollout_percentage_{i})s, %(payload_{i})s::jsonb)")
|
||||
params[f"variant_id_{i}"] = variants[i].variant_id
|
||||
params[f"value_{i}"] = variants[i].value
|
||||
params[f"rollout_percentage_{i}"] = variants[i].rollout_percentage
|
||||
params[f"payload_{i}"] = json.dumps(variants[i].payload)
|
||||
|
||||
sql = f"""
|
||||
UPDATE feature_flags_variants
|
||||
SET value = c.value, rollout_percentage = c.rollout_percentage, payload = c.payload
|
||||
FROM (VALUES {','.join(values)}) AS c(variant_id, value, rollout_percentage, payload)
|
||||
WHERE c.variant_id = feature_flags_variants.variant_id AND feature_flag_id = %(feature_flag_id)s;
|
||||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(sql, params)
|
||||
cur.execute(query)
|
||||
|
||||
|
||||
def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
|
||||
"""
|
||||
Delete existing feature flag variants and return their data.
|
||||
"""
|
||||
sql = """
|
||||
DELETE FROM feature_flags_variants
|
||||
WHERE variant_id IN %(ids)s
|
||||
AND feature_flag_id= %(feature_flag_id)s;
|
||||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)})
|
||||
cur.execute(query)
|
||||
|
||||
|
||||
def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
|
||||
existing_ids = [ec.get("condition_id") for ec in get_conditions(feature_flag_id)]
|
||||
to_be_deleted = []
|
||||
|
|
@ -342,7 +547,7 @@ def delete_feature_flag(project_id: int, feature_flag_id: int):
|
|||
params = {"project_id": project_id, "feature_flag_id": feature_flag_id}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""UPDATE feature_flags
|
||||
SET deleted_at= (now() at time zone 'utc')
|
||||
SET deleted_at= (now() at time zone 'utc'), is_active=false
|
||||
WHERE {" AND ".join(conditions)};""", params)
|
||||
cur.execute(query)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
from enum import Enum
|
||||
from typing import Optional, List, Union, Literal
|
||||
from typing import Optional, List, Union, Literal, Any
|
||||
|
||||
from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator, validator
|
||||
from pydantic.types import Json
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
|
@ -1034,11 +1035,11 @@ class __CardSchema(BaseModel):
|
|||
|
||||
class CardSchema(__CardSchema, CardChartSchema):
|
||||
view_type: Union[MetricTimeseriesViewType, \
|
||||
MetricTableViewType, MetricOtherViewType] = Field(...)
|
||||
MetricTableViewType, MetricOtherViewType] = Field(...)
|
||||
metric_type: MetricType = Field(...)
|
||||
metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \
|
||||
MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \
|
||||
MetricOfClickMap] = Field(default=MetricOfTable.user_id)
|
||||
MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \
|
||||
MetricOfClickMap] = Field(default=MetricOfTable.user_id)
|
||||
metric_value: List[IssueType] = Field(default=[])
|
||||
is_template: bool = Field(default=False)
|
||||
|
||||
|
|
@ -1210,7 +1211,7 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
type: LiveFilterType = Field(...)
|
||||
source: Optional[str] = Field(default=None)
|
||||
operator: Literal[SearchEventOperator._is, \
|
||||
SearchEventOperator._contains] = Field(default=SearchEventOperator._contains)
|
||||
SearchEventOperator._contains] = Field(default=SearchEventOperator._contains)
|
||||
|
||||
transform = root_validator(pre=True, allow_reuse=True)(transform_old_FilterType)
|
||||
|
||||
|
|
@ -1381,6 +1382,17 @@ class GetHeatmapPayloadSchema(BaseModel):
|
|||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class FeatureFlagVariant(BaseModel):
|
||||
variant_id: Optional[int] = Field(default=None)
|
||||
value: str = Field(...)
|
||||
description: Optional[str] = Field(default=None)
|
||||
payload: Optional[str] = Field(default=None)
|
||||
rollout_percentage: Optional[int] = Field(default=0, ge=0, le=100)
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class FeatureFlagCondition(BaseModel):
|
||||
condition_id: Optional[int] = Field(default=None)
|
||||
name: str = Field(...)
|
||||
|
|
@ -1408,13 +1420,14 @@ class FeatureFlagType(str, Enum):
|
|||
|
||||
|
||||
class FeatureFlagSchema(BaseModel):
|
||||
name: str = Field(...)
|
||||
payload: Optional[str] = Field(default=None)
|
||||
flag_key: str = Field(...)
|
||||
description: Optional[str] = Field(None)
|
||||
flag_type: FeatureFlagType = Field(default=FeatureFlagType.single_variant)
|
||||
is_persist: Optional[bool] = Field(default=False)
|
||||
is_active: Optional[bool] = Field(default=True)
|
||||
conditions: List[FeatureFlagCondition] = Field(default=[], min_items=1)
|
||||
variants: List[FeatureFlagVariant] = Field(default=[])
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
|
|
|||
0
api/test/__init__.py
Normal file
0
api/test/__init__.py
Normal file
186
api/test/test_feature_flag.py
Normal file
186
api/test/test_feature_flag.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
import json
|
||||
|
||||
from pydantic.error_wrappers import ValidationError
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.feature_flags import prepare_conditions_values, prepare_variants_values
|
||||
|
||||
|
||||
class TestFeatureFlag:
|
||||
def test_prepare_conditions_values(self):
|
||||
feature_flag_data = schemas.FeatureFlagSchema(
|
||||
flagKey="flag_2",
|
||||
conditions=[
|
||||
schemas.FeatureFlagCondition(
|
||||
name="Condition 2",
|
||||
rolloutPercentage=75,
|
||||
filters=[{"key": "value1"}]
|
||||
),
|
||||
schemas.FeatureFlagCondition(
|
||||
name="Condition 3",
|
||||
rolloutPercentage=25,
|
||||
filters=[{"key": "value2"}]
|
||||
)
|
||||
]
|
||||
)
|
||||
expected_output = {
|
||||
'condition_id_0': None,
|
||||
"name_0": "Condition 2",
|
||||
"rollout_percentage_0": 75,
|
||||
"filters_0": json.dumps([{"key": "value1"}]),
|
||||
'condition_id_1': None,
|
||||
"name_1": "Condition 3",
|
||||
"rollout_percentage_1": 25,
|
||||
"filters_1": json.dumps([{"key": "value2"}])
|
||||
}
|
||||
assert prepare_conditions_values(feature_flag_data) == expected_output
|
||||
|
||||
def test_feature_flag_schema_validation(self):
|
||||
try:
|
||||
schemas.FeatureFlagSchema(
|
||||
flagKey="valid_flag",
|
||||
conditions=[
|
||||
schemas.FeatureFlagCondition(name="Condition 1", rollout_percentage=50),
|
||||
schemas.FeatureFlagCondition(name="Condition 2", rollout_percentage=25)
|
||||
],
|
||||
variants=[
|
||||
schemas.FeatureFlagVariant(value="Variant 1", rollout_percentage=50),
|
||||
schemas.FeatureFlagVariant(value="Variant 2", rollout_percentage=50)
|
||||
]
|
||||
)
|
||||
except ValidationError:
|
||||
assert False, "Valid data should not raise ValidationError"
|
||||
|
||||
try:
|
||||
schemas.FeatureFlagSchema()
|
||||
except ValidationError as e:
|
||||
assert len(e.errors()) == 1
|
||||
for error in e.errors():
|
||||
assert error["type"] == "value_error.missing"
|
||||
assert error["loc"] in [("flagKey",)]
|
||||
else:
|
||||
assert False, "Invalid data should raise ValidationError"
|
||||
|
||||
def test_feature_flag_variant_schema_validation(self):
|
||||
try:
|
||||
schemas.FeatureFlagVariant(
|
||||
value="Variant Value",
|
||||
description="Variant Description",
|
||||
# payload={"key": "value"},
|
||||
rolloutPercentage=50
|
||||
)
|
||||
except ValidationError:
|
||||
assert False, "Valid data should not raise ValidationError"
|
||||
|
||||
try:
|
||||
schemas.FeatureFlagVariant()
|
||||
except ValidationError as e:
|
||||
assert len(e.errors()) == 1
|
||||
error = e.errors()[0]
|
||||
assert error["type"] == "value_error.missing"
|
||||
assert error["loc"] == ("value",)
|
||||
else:
|
||||
assert False, "Invalid data should raise ValidationError"
|
||||
|
||||
def test_feature_flag_condition_schema_validation(self):
|
||||
try:
|
||||
schemas.FeatureFlagCondition(
|
||||
name="Condition Name",
|
||||
rolloutPercentage=50,
|
||||
filters=[{"key": "value"}]
|
||||
)
|
||||
except ValidationError:
|
||||
assert False, "Valid data should not raise ValidationError"
|
||||
|
||||
try:
|
||||
schemas.FeatureFlagCondition()
|
||||
except ValidationError as e:
|
||||
assert len(e.errors()) == 1
|
||||
error = e.errors()[0]
|
||||
assert error["type"] == "value_error.missing"
|
||||
assert error["loc"] == ("name",)
|
||||
else:
|
||||
assert False, "Invalid data should raise ValidationError"
|
||||
|
||||
def test_search_flags_schema_validation(self):
|
||||
try:
|
||||
schemas.SearchFlagsSchema(
|
||||
limit=15,
|
||||
user_id=123,
|
||||
order=schemas.SortOrderType.desc,
|
||||
query="search term",
|
||||
is_active=True
|
||||
)
|
||||
except ValidationError:
|
||||
assert False, "Valid data should not raise ValidationError"
|
||||
|
||||
try:
|
||||
schemas.SearchFlagsSchema(
|
||||
limit=500,
|
||||
user_id=-1,
|
||||
order="invalid",
|
||||
query="a" * 201,
|
||||
isActive=None
|
||||
)
|
||||
except ValidationError as e:
|
||||
assert len(e.errors()) == 2
|
||||
assert e.errors()[0]["ctx"] == {'limit_value': 200}
|
||||
assert e.errors()[0]["type"] == "value_error.number.not_le"
|
||||
|
||||
assert e.errors()[1]["msg"] == "value is not a valid enumeration member; permitted: 'ASC', 'DESC'"
|
||||
assert e.errors()[1]["type"] == "type_error.enum"
|
||||
else:
|
||||
assert False, "Invalid data should raise ValidationError"
|
||||
|
||||
def test_prepare_variants_values_single_variant(self):
|
||||
feature_flag_data = schemas.FeatureFlagSchema(
|
||||
flagKey="flag_1",
|
||||
variants=[
|
||||
schemas.FeatureFlagVariant(
|
||||
value="Variant 1",
|
||||
description="Description 1",
|
||||
# payload="{'key': 'value1'}",
|
||||
rolloutPercentage=50
|
||||
)
|
||||
]
|
||||
)
|
||||
expected_output = {
|
||||
"v_value_0": "Variant 1",
|
||||
"v_description_0": "Description 1",
|
||||
# "payload_0": json.dumps({"key": "value1"}),
|
||||
'v_payload_0': 'null',
|
||||
"v_rollout_percentage_0": 50
|
||||
}
|
||||
assert prepare_variants_values(feature_flag_data) == expected_output
|
||||
|
||||
def test_prepare_variants_values_multiple_variants(self):
|
||||
feature_flag_data = schemas.FeatureFlagSchema(
|
||||
flagKey="flag_2",
|
||||
variants=[
|
||||
schemas.FeatureFlagVariant(
|
||||
value="Variant 1",
|
||||
description="Description 1",
|
||||
# payload="{'key': 'value1'}",
|
||||
rolloutPercentage=50
|
||||
),
|
||||
schemas.FeatureFlagVariant(
|
||||
value="Variant 2",
|
||||
description="Description 2",
|
||||
# payload="{'key': 'value1'}",
|
||||
rolloutPercentage=50
|
||||
)
|
||||
]
|
||||
)
|
||||
expected_output = {
|
||||
"v_value_0": "Variant 1",
|
||||
"v_description_0": "Description 1",
|
||||
# "payload_0": json.dumps({"key": "value1"}),
|
||||
'v_payload_0': 'null',
|
||||
"v_rollout_percentage_0": 50,
|
||||
"v_value_1": "Variant 2",
|
||||
"v_description_1": "Description 2",
|
||||
# "payload_1": json.dumps({"key": "value2"}),
|
||||
'v_payload_1': 'null',
|
||||
"v_rollout_percentage_1": 50
|
||||
}
|
||||
assert prepare_variants_values(feature_flag_data) == expected_output
|
||||
|
|
@ -23,9 +23,9 @@ CREATE TABLE IF NOT EXISTS public.feature_flags
|
|||
(
|
||||
feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
flag_key text NOT NULL,
|
||||
description text NOT NULL,
|
||||
description text DEFAULT NULL::text,
|
||||
payload text DEFAULT NULL::text,
|
||||
flag_type text NOT NULL,
|
||||
is_persist boolean NOT NULL DEFAULT FALSE,
|
||||
is_active boolean NOT NULL DEFAULT FALSE,
|
||||
|
|
@ -38,6 +38,9 @@ CREATE TABLE IF NOT EXISTS public.feature_flags
|
|||
|
||||
CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id);
|
||||
|
||||
ALTER TABLE feature_flags
|
||||
ADD CONSTRAINT unique_project_flag_deleted UNIQUE (project_id, flag_key, deleted_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_conditions
|
||||
(
|
||||
condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
|
|
@ -47,6 +50,16 @@ CREATE TABLE IF NOT EXISTS public.feature_flags_conditions
|
|||
filters jsonb NOT NULL DEFAULT '[]'::jsonb
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_variants
|
||||
(
|
||||
variant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE,
|
||||
value text NOT NULL,
|
||||
description text DEFAULT NULL::text,
|
||||
payload jsonb DEFAULT NULL,
|
||||
rollout_percentage integer DEFAULT 0
|
||||
);
|
||||
|
||||
ALTER TABLE IF EXISTS public.sessions
|
||||
ADD COLUMN IF NOT EXISTS user_city text,
|
||||
ADD COLUMN IF NOT EXISTS user_state text;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue