refactor(chalice): changed product analytics search payload
This commit is contained in:
parent
522a985ef3
commit
7f0bc100f5
3 changed files with 62 additions and 64 deletions
|
|
@ -8,17 +8,23 @@ from chalicelib.utils.ch_client import ClickHouseClient
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_events(project_id: int):
|
||||
def get_events(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT event_name, display_name
|
||||
"""SELECT COUNT(1) OVER () AS total,
|
||||
event_name, display_name
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY display_name;""",
|
||||
parameters={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(x)
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
for row in rows:
|
||||
row.pop("total")
|
||||
return {"total": total, "list": rows}
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
|
|
@ -29,52 +35,47 @@ def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
|||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
for i, f in enumerate(data.filters):
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"isNotNull({column})"
|
||||
elif is_undefined:
|
||||
condition = f"isNull({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(f"{column} {op} %({f_k})s", f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
ev_constraints = []
|
||||
for i, e in enumerate(data.events):
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: e.event_name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
if len(e.properties.filters) > 0:
|
||||
for j, f in enumerate(e.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=p_k)}
|
||||
if f.is_predefined:
|
||||
sub_condition = f"{f.name} {op} %({p_k})s"
|
||||
else:
|
||||
sub_condition = f"properties.{f.name} {op} %({p_k})s"
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, f.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND ("
|
||||
for j, c in enumerate(sub_conditions):
|
||||
if j > 0:
|
||||
condition += " " + e.properties.operators[j - 1] + " " + c
|
||||
else:
|
||||
condition += c
|
||||
condition += ")"
|
||||
for i, f in enumerate(data.filters):
|
||||
if not f.is_event:
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
if is_any:
|
||||
condition = f"isNotNull({column})"
|
||||
elif is_undefined:
|
||||
condition = f"isNull({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(f"{column} {op} %({f_k})s", f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
else:
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: f.name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
for j, ef in enumerate(f.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k)}
|
||||
op = sh.get_sql_operator(ef.operator)
|
||||
if ef.is_predefined:
|
||||
sub_condition = f"{ef.name} {op} %({p_k})s"
|
||||
else:
|
||||
sub_condition = f"properties.{ef.name} {op} %({p_k})s"
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from typing import Optional, List
|
||||
|
||||
from typing import Optional, List, Literal, Union, Annotated
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
|
|
@ -8,12 +7,16 @@ from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
|||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
event_name: str = Field(...)
|
||||
is_event: Literal[True] = True
|
||||
name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[EventSearchSchema] = Field(default_factory=list, description="operator between events is OR")
|
||||
filters: List[PropertyFilterSchema] = Field(default_factory=list, description="operator between filters is AND")
|
||||
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
|
|||
|
|
@ -580,6 +580,7 @@ class EventPredefinedPropertyType(str, Enum):
|
|||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
|
|
@ -598,16 +599,9 @@ class PropertyFilterSchema(BaseModel):
|
|||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operators: List[Literal["and", "or"]] = Field(...)
|
||||
operator: Literal["and", "or"] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def event_filter_validator(self):
|
||||
assert len(self.filters) == 0 \
|
||||
or len(self.operators) == len(self.filters) - 1, \
|
||||
"Number of operators must match the number of filter-1"
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue