feat(metrics): add metrics search functionality

Implement new search_metrics function in custom_metrics.py to allow
filtering and sorting of metrics. Add corresponding endpoint in the
metrics router and supporting schema classes in schemas.py. The new
implementation provides pagination, filtering, and sorting capabilities
for metrics.
This commit is contained in:
Shekar Siri 2025-03-10 14:57:56 +01:00
parent 687ab05f22
commit cef251db6a
3 changed files with 658 additions and 234 deletions

View file

@ -352,6 +352,108 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def search_metrics(project_id, user_id, data: schemas.MetricSearchSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s", "metrics.deleted_at ISNULL"]
params = {
"project_id": project_id,
"user_id": user_id,
"offset": (data.page - 1) * data.limit,
"limit": data.limit,
}
if data.mine_only:
constraints.append("user_id = %(user_id)s")
else:
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
if data.shared_only:
constraints.append("is_public")
if data.filter is not None:
if data.filter.type:
constraints.append("metrics.metric_type = %(filter_type)s")
params["filter_type"] = data.filter.type
if data.filter.query and len(data.filter.query) > 0:
constraints.append("(metrics.name ILIKE %(filter_query)s OR owner.owner_name ILIKE %(filter_query)s)")
params["filter_query"] = helper.values_for_operator(
value=data.filter.query, op=schemas.SearchEventOperator.CONTAINS
)
with pg_client.PostgresClient() as cur:
count_query = cur.mogrify(
f"""SELECT COUNT(*)
FROM metrics
LEFT JOIN LATERAL (
SELECT email AS owner_email, name AS owner_name
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)};""",
params
)
cur.execute(count_query)
total = cur.fetchone()["count"]
sub_join = ""
if include_series:
sub_join = """LEFT JOIN LATERAL (
SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)"""
sort_column = data.sort.field if data.sort.field is not None else "created_at"
# change ascend to asc and descend to desc
sort_order = data.sort.order.value if hasattr(data.sort.order, "value") else data.sort.order
if sort_order == "ascend":
sort_order = "asc"
elif sort_order == "descend":
sort_order = "desc"
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
dashboards, owner_email, owner_name, default_config AS config, thumbnail
FROM metrics
{sub_join}
LEFT JOIN LATERAL (
SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public, name),'[]'::jsonb) AS dashboards
FROM (
SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards
INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))
) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (
SELECT email AS owner_email, name AS owner_name
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY {sort_column} {sort_order}
LIMIT %(limit)s OFFSET %(offset)s;""",
params
)
cur.execute(query)
rows = cur.fetchall()
if include_series:
for r in rows:
for s in r.get("series", []):
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
else:
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
rows = helper.list_to_camel_case(rows)
return {"total": total, "list": rows}
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"]

View file

@ -9,172 +9,330 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/dashboards', tags=["dashboard"])
def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
@app.post("/{projectId}/dashboards", tags=["dashboard"])
def create_dashboards(
projectId: int,
data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return dashboards.create_dashboard(
project_id=projectId, user_id=context.user_id, data=data
)
@app.get('/{projectId}/dashboards', tags=["dashboard"])
def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)}
@app.get("/{projectId}/dashboards", tags=["dashboard"])
def get_dashboards(
projectId: int, context: schemas.CurrentContext = Depends(OR_context)
):
return {
"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)
}
@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
@app.get("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
def get_dashboard(
projectId: int,
dashboardId: int,
context: schemas.CurrentContext = Depends(OR_context),
):
data = dashboards.get_dashboard(
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
)
if data is None:
return {"errors": ["dashboard not found"]}
return {"data": data}
@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)}
@app.put("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
def update_dashboard(
projectId: int,
dashboardId: int,
data: schemas.EditDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": dashboards.update_dashboard(
project_id=projectId,
user_id=context.user_id,
dashboard_id=dashboardId,
data=data,
)
}
@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def delete_dashboard(projectId: int, dashboardId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
@app.delete("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
def delete_dashboard(
projectId: int,
dashboardId: int,
_=Body(None),
context: schemas.CurrentContext = Depends(OR_context),
):
return dashboards.delete_dashboard(
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
)
@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"])
def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
@app.get("/{projectId}/dashboards/{dashboardId}/pin", tags=["dashboard"])
def pin_dashboard(
projectId: int,
dashboardId: int,
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": dashboards.pin_dashboard(
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
)
}
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
data=data)}
@app.post("/{projectId}/dashboards/{dashboardId}/cards", tags=["cards"])
def add_card_to_dashboard(
projectId: int,
dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": dashboards.add_widget(
project_id=projectId,
user_id=context.user_id,
dashboard_id=dashboardId,
data=data,
)
}
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.post("/{projectId}/dashboards/{dashboardId}/metrics", tags=["dashboard"])
# @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project=context.project, user_id=context.user_id,
dashboard_id=dashboardId, data=data)}
def create_metric_and_add_to_dashboard(
projectId: int,
dashboardId: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": dashboards.create_metric_add_widget(
project=context.project,
user_id=context.user_id,
dashboard_id=dashboardId,
data=data,
)
}
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId, data=data)
@app.put("/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}", tags=["dashboard"])
def update_widget_in_dashboard(
projectId: int,
dashboardId: int,
widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return dashboards.update_widget(
project_id=projectId,
user_id=context.user_id,
dashboard_id=dashboardId,
widget_id=widgetId,
data=data,
)
@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId)
@app.delete(
"/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}", tags=["dashboard"]
)
def remove_widget_from_dashboard(
projectId: int,
dashboardId: int,
widgetId: int,
_=Body(None),
context: schemas.CurrentContext = Depends(OR_context),
):
return dashboards.remove_widget(
project_id=projectId,
user_id=context.user_id,
dashboard_id=dashboardId,
widget_id=widgetId,
)
@app.post('/{projectId}/cards/try', tags=["cards"])
def try_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_chart(project=context.project, data=data, user_id=context.user_id)}
@app.post("/{projectId}/cards/try", tags=["cards"])
def try_card(
projectId: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": custom_metrics.get_chart(
project=context.project, data=data, user_id=context.user_id
)
}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions(project=context.project, user_id=context.user_id, data=data)
@app.post("/{projectId}/cards/try/sessions", tags=["cards"])
def try_card_sessions(
projectId: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
data = custom_metrics.get_sessions(
project=context.project, user_id=context.user_id, data=data
)
return {"data": data}
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_issues(project=context.project, user_id=context.user_id, data=data)}
@app.post("/{projectId}/cards/try/issues", tags=["cards"])
def try_card_issues(
projectId: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": custom_metrics.get_issues(
project=context.project, user_id=context.user_id, data=data
)
}
@app.get('/{projectId}/cards', tags=["cards"])
@app.get("/{projectId}/cards", tags=["cards"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
return {
"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)
}
@app.post('/{projectId}/cards', tags=["cards"])
def create_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create_card(project=context.project, user_id=context.user_id, data=data)
@app.post("/{projectId}/cards", tags=["cards"])
def create_card(
projectId: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return custom_metrics.create_card(
project=context.project, user_id=context.user_id, data=data
)
@app.post('/{projectId}/cards/search', tags=["cards"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.post("/{projectId}/cards/search", tags=["cards"])
def search_cards(
projectId: int,
data: schemas.MetricSearchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": custom_metrics.search_metrics(
project_id=projectId, user_id=context.user_id, data=data
)
}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
@app.get("/{projectId}/cards/{metric_id}", tags=["cards"])
def get_card(
projectId: int,
metric_id: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context),
):
if metric_id.isnumeric():
metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
data = custom_metrics.get_card(
project_id=projectId, user_id=context.user_id, metric_id=metric_id
)
if data is None:
return {"errors": ["card not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions_by_card_id(project=context.project, user_id=context.user_id, metric_id=metric_id,
data=data)
@app.post("/{projectId}/cards/{metric_id}/sessions", tags=["cards"])
def get_card_sessions(
projectId: int,
metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
data = custom_metrics.get_sessions_by_card_id(
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data
)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
@app.post(
"/{projectId}/cards/{metric_id}/issues/{issueId}/sessions", tags=["dashboard"]
)
def get_metric_funnel_issue_sessions(
projectId: int,
metric_id: int,
issueId: str,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
data = custom_metrics.get_funnel_sessions_by_issue(
project_id=projectId,
user_id=context.user_id,
metric_id=metric_id,
issue_id=issueId,
data=data,
)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project=context.project, user_id=context.user_id, metric_id=metric_id,
data=data)
@app.post("/{projectId}/cards/{metric_id}/chart", tags=["card"])
def get_card_chart(
projectId: int,
metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
data = custom_metrics.make_chart_from_card(
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data
)
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
@app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"])
def update_card(
projectId: int,
metric_id: int,
data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
data = custom_metrics.update_card(
project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data
)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
def update_card_state(projectId: int, metric_id: int,
data: schemas.UpdateCardStatusSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@app.post("/{projectId}/cards/{metric_id}/status", tags=["dashboard"])
def update_card_state(
projectId: int,
metric_id: int,
data: schemas.UpdateCardStatusSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
status=data.active)}
"data": custom_metrics.change_state(
project_id=projectId,
user_id=context.user_id,
metric_id=metric_id,
status=data.active,
)
}
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
def delete_card(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
@app.delete("/{projectId}/cards/{metric_id}", tags=["dashboard"])
def delete_card(
projectId: int,
metric_id: int,
_=Body(None),
context: schemas.CurrentContext = Depends(OR_context),
):
return {
"data": custom_metrics.delete_card(
project_id=projectId, user_id=context.user_id, metric_id=metric_id
)
}

View file

@ -7,29 +7,43 @@ from pydantic.functional_validators import BeforeValidator
from chalicelib.utils.TimeUTC import TimeUTC
from .overrides import BaseModel, Enum, ORUnion
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
from .transformers_validators import (
transform_email,
remove_whitespace,
remove_duplicate_values,
single_to_list,
force_is_event,
NAME_PATTERN,
int_to_string,
check_alphanumeric,
)
class _GRecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(default=None, alias='g-recaptcha-response')
g_recaptcha_response: Optional[str] = Field(
default=None, alias="g-recaptcha-response"
)
class UserLoginSchema(_GRecaptcha):
email: EmailStr = Field(...)
password: SecretStr = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
_transform_email = field_validator("email", mode="before")(transform_email)
class UserSignupSchema(UserLoginSchema):
fullname: str = Field(..., min_length=1)
organizationName: str = Field(..., min_length=1)
_transform_fullname = field_validator('fullname', mode='before')(remove_whitespace)
_transform_organizationName = field_validator('organizationName', mode='before')(remove_whitespace)
_transform_fullname = field_validator("fullname", mode="before")(remove_whitespace)
_transform_organizationName = field_validator("organizationName", mode="before")(
remove_whitespace
)
_check_alphanumeric = field_validator('fullname', 'organizationName')(check_alphanumeric)
_check_alphanumeric = field_validator("fullname", "organizationName")(
check_alphanumeric
)
class EditAccountSchema(BaseModel):
@ -37,15 +51,17 @@ class EditAccountSchema(BaseModel):
tenantName: Optional[str] = Field(default=None)
opt_out: Optional[bool] = Field(default=None)
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_transform_tenantName = field_validator('tenantName', mode='before')(remove_whitespace)
_check_alphanumeric = field_validator('name', 'tenantName')(check_alphanumeric)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
_transform_tenantName = field_validator("tenantName", mode="before")(
remove_whitespace
)
_check_alphanumeric = field_validator("name", "tenantName")(check_alphanumeric)
class ForgetPasswordPayloadSchema(_GRecaptcha):
email: EmailStr = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
_transform_email = field_validator("email", mode="before")(transform_email)
class EditUserPasswordSchema(BaseModel):
@ -57,7 +73,7 @@ class CreateProjectSchema(BaseModel):
name: str = Field(default="my first project", pattern=NAME_PATTERN)
platform: Literal["web", "ios"] = Field(default="web")
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
class ProjectContext(BaseModel):
@ -77,7 +93,7 @@ class CurrentContext(CurrentAPIContext):
email: EmailStr = Field(...)
role: str = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
_transform_email = field_validator("email", mode="before")(transform_email)
@computed_field
@property
@ -99,8 +115,8 @@ class AddCollaborationSchema(BaseModel):
name: str = Field(..., pattern=NAME_PATTERN)
url: HttpUrl = Field(...)
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_transform_url = field_validator('url', mode='before')(remove_whitespace)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
_transform_url = field_validator("url", mode="before")(remove_whitespace)
class EditCollaborationSchema(AddCollaborationSchema):
@ -123,12 +139,15 @@ class _TimedSchema(BaseModel):
@model_validator(mode="after")
def __time_validator(self):
if self.startTimestamp is not None:
assert 0 <= self.startTimestamp, "startTimestamp must be greater or equal to 0"
assert (
0 <= self.startTimestamp
), "startTimestamp must be greater or equal to 0"
if self.endTimestamp is not None:
assert 0 <= self.endTimestamp, "endTimestamp must be greater or equal to 0"
if self.startTimestamp is not None and self.endTimestamp is not None:
assert self.startTimestamp <= self.endTimestamp, \
"endTimestamp must be greater or equal to startTimestamp"
assert (
self.startTimestamp <= self.endTimestamp
), "endTimestamp must be greater or equal to startTimestamp"
return self
@ -150,7 +169,7 @@ class IssueTrackingJiraSchema(IssueTrackingIntegration):
username: str = Field(...)
url: HttpUrl = Field(...)
@field_validator('url')
@field_validator("url")
@classmethod
def transform_url(cls, v: HttpUrl):
return HttpUrl.build(scheme=v.scheme.lower(), host=v.host.lower())
@ -163,7 +182,7 @@ class WebhookSchema(BaseModel):
auth_header: Optional[str] = Field(default=None)
name: str = Field(default="", max_length=100, pattern=NAME_PATTERN)
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
class CreateMemberSchema(BaseModel):
@ -172,8 +191,8 @@ class CreateMemberSchema(BaseModel):
email: EmailStr = Field(...)
admin: Optional[bool] = Field(default=False)
_transform_email = field_validator('email', mode='before')(transform_email)
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_transform_email = field_validator("email", mode="before")(transform_email)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
class EditMemberSchema(BaseModel):
@ -181,9 +200,9 @@ class EditMemberSchema(BaseModel):
email: EmailStr = Field(...)
admin: bool = Field(default=False)
_transform_email = field_validator('email', mode='before')(transform_email)
_transform_name = field_validator('name', mode='before')(remove_whitespace)
_check_alphanumeric = field_validator('name')(check_alphanumeric)
_transform_email = field_validator("email", mode="before")(transform_email)
_transform_name = field_validator("name", mode="before")(remove_whitespace)
_check_alphanumeric = field_validator("name")(check_alphanumeric)
class EditPasswordByInvitationSchema(BaseModel):
@ -198,7 +217,7 @@ class AssignmentSchema(BaseModel):
title: str = Field(...)
issue_type: str = Field(...)
_transform_title = field_validator('title', mode='before')(remove_whitespace)
_transform_title = field_validator("title", mode="before")(remove_whitespace)
class CommentAssignmentSchema(BaseModel):
@ -294,14 +313,14 @@ class MetadataSchema(BaseModel):
index: Optional[int] = Field(default=None)
key: str = Field(...)
_transform_key = field_validator('key', mode='before')(remove_whitespace)
_transform_key = field_validator("key", mode="before")(remove_whitespace)
class _AlertMessageSchema(BaseModel):
type: str = Field(...)
value: str = Field(...)
_transform_value = field_validator('value', mode='before')(int_to_string)
_transform_value = field_validator("value", mode="before")(int_to_string)
class AlertDetectionType(str, Enum):
@ -319,7 +338,9 @@ class _AlertOptionSchema(BaseModel):
class AlertColumn(str, Enum):
PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE = "performance.dom_content_loaded.average"
PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE = "performance.first_meaningful_paint.average"
PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE = (
"performance.first_meaningful_paint.average"
)
PERFORMANCE__PAGE_LOAD_TIME__AVERAGE = "performance.page_load_time.average"
PERFORMANCE__DOM_BUILD_TIME__AVERAGE = "performance.dom_build_time.average"
PERFORMANCE__SPEED_INDEX__AVERAGE = "performance.speed_index.average"
@ -490,30 +511,30 @@ class SearchEventOrder(str, Enum):
class IssueType(str, Enum):
CLICK_RAGE = 'click_rage'
DEAD_CLICK = 'dead_click'
EXCESSIVE_SCROLLING = 'excessive_scrolling'
BAD_REQUEST = 'bad_request'
MISSING_RESOURCE = 'missing_resource'
MEMORY = 'memory'
CPU = 'cpu'
SLOW_RESOURCE = 'slow_resource'
SLOW_PAGE_LOAD = 'slow_page_load'
CRASH = 'crash'
CUSTOM = 'custom'
JS_EXCEPTION = 'js_exception'
MOUSE_THRASHING = 'mouse_thrashing'
CLICK_RAGE = "click_rage"
DEAD_CLICK = "dead_click"
EXCESSIVE_SCROLLING = "excessive_scrolling"
BAD_REQUEST = "bad_request"
MISSING_RESOURCE = "missing_resource"
MEMORY = "memory"
CPU = "cpu"
SLOW_RESOURCE = "slow_resource"
SLOW_PAGE_LOAD = "slow_page_load"
CRASH = "crash"
CUSTOM = "custom"
JS_EXCEPTION = "js_exception"
MOUSE_THRASHING = "mouse_thrashing"
# IOS
TAP_RAGE = 'tap_rage'
TAP_RAGE = "tap_rage"
class MetricFormatType(str, Enum):
SESSION_COUNT = 'sessionCount'
SESSION_COUNT = "sessionCount"
class MetricExtendedFormatType(str, Enum):
SESSION_COUNT = 'sessionCount'
USER_COUNT = 'userCount'
SESSION_COUNT = "sessionCount"
USER_COUNT = "userCount"
class FetchFilterType(str, Enum):
@ -540,8 +561,13 @@ class RequestGraphqlFilterSchema(BaseModel):
@model_validator(mode="before")
@classmethod
def _transform_data(cls, values):
if values.get("type") in [FetchFilterType.FETCH_DURATION, FetchFilterType.FETCH_STATUS_CODE]:
values["value"] = [int(v) for v in values["value"] if v is not None and str(v).isnumeric()]
if values.get("type") in [
FetchFilterType.FETCH_DURATION,
FetchFilterType.FETCH_STATUS_CODE,
]:
values["value"] = [
int(v) for v in values["value"] if v is not None and str(v).isnumeric()
]
return values
@ -554,8 +580,10 @@ class SessionSearchEventSchema2(BaseModel):
sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
_remove_duplicate_values = field_validator("value", mode="before")(
remove_duplicate_values
)
_single_to_list_values = field_validator("value", mode="before")(single_to_list)
@model_validator(mode="after")
def event_validator(self):
@ -563,24 +591,32 @@ class SessionSearchEventSchema2(BaseModel):
if self.type == PerformanceEventType.FETCH_FAILED:
return self
assert self.sourceOperator is not None, \
"sourceOperator should not be null for PerformanceEventType"
assert (
self.sourceOperator is not None
), "sourceOperator should not be null for PerformanceEventType"
assert self.source is not None, f"source is required for {self.type}"
assert isinstance(self.source, list), f"source of type list is required for {self.type}"
assert isinstance(
self.source, list
), f"source of type list is required for {self.type}"
for c in self.source:
assert isinstance(c, int), f"source value should be of type int for {self.type}"
assert isinstance(
c, int
), f"source value should be of type int for {self.type}"
elif self.type == EventType.ERROR and self.source is None:
self.source = [ErrorSource.JS_EXCEPTION]
elif self.type == EventType.REQUEST_DETAILS:
assert isinstance(self.filters, List) and len(self.filters) > 0, \
f"filters should be defined for {EventType.REQUEST_DETAILS}"
assert (
isinstance(self.filters, List) and len(self.filters) > 0
), f"filters should be defined for {EventType.REQUEST_DETAILS}"
elif self.type == EventType.GRAPHQL:
assert isinstance(self.filters, List) and len(self.filters) > 0, \
f"filters should be defined for {EventType.GRAPHQL}"
assert (
isinstance(self.filters, List) and len(self.filters) > 0
), f"filters should be defined for {EventType.GRAPHQL}"
if isinstance(self.operator, ClickEventExtraOperator):
assert self.type == EventType.CLICK, \
f"operator:{self.operator} is only available for event-type: {EventType.CLICK}"
assert (
self.type == EventType.CLICK
), f"operator:{self.operator} is only available for event-type: {EventType.CLICK}"
return self
@ -591,8 +627,10 @@ class SessionSearchFilterSchema(BaseModel):
operator: Union[SearchEventOperator, MathOperator] = Field(...)
source: Optional[Union[ErrorSource, str]] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
_remove_duplicate_values = field_validator("value", mode="before")(
remove_duplicate_values
)
_single_to_list_values = field_validator("value", mode="before")(single_to_list)
@model_validator(mode="before")
@classmethod
@ -610,33 +648,44 @@ class SessionSearchFilterSchema(BaseModel):
@model_validator(mode="after")
def filter_validator(self):
if self.type == FilterType.METADATA:
assert self.source is not None and len(self.source) > 0, \
"must specify a valid 'source' for metadata filter"
assert (
self.source is not None and len(self.source) > 0
), "must specify a valid 'source' for metadata filter"
elif self.type == FilterType.ISSUE:
for i, v in enumerate(self.value):
if IssueType.has_value(v):
self.value[i] = IssueType(v)
else:
raise ValueError(f"value should be of type IssueType for {self.type} filter")
raise ValueError(
f"value should be of type IssueType for {self.type} filter"
)
elif self.type == FilterType.PLATFORM:
for i, v in enumerate(self.value):
if PlatformType.has_value(v):
self.value[i] = PlatformType(v)
else:
raise ValueError(f"value should be of type PlatformType for {self.type} filter")
raise ValueError(
f"value should be of type PlatformType for {self.type} filter"
)
elif self.type == FilterType.EVENTS_COUNT:
if MathOperator.has_value(self.operator):
self.operator = MathOperator(self.operator)
else:
raise ValueError(f"operator should be of type MathOperator for {self.type} filter")
raise ValueError(
f"operator should be of type MathOperator for {self.type} filter"
)
for v in self.value:
assert isinstance(v, int), f"value should be of type int for {self.type} filter"
assert isinstance(
v, int
), f"value should be of type int for {self.type} filter"
else:
if SearchEventOperator.has_value(self.operator):
self.operator = SearchEventOperator(self.operator)
else:
raise ValueError(f"operator should be of type SearchEventOperator for {self.type} filter")
raise ValueError(
f"operator should be of type SearchEventOperator for {self.type} filter"
)
return self
@ -653,19 +702,26 @@ class SortOrderType(str, Enum):
def add_missing_is_event(values: dict):
if values.get("isEvent") is None:
values["isEvent"] = (EventType.has_value(values["type"])
or PerformanceEventType.has_value(values["type"])
or ProductAnalyticsSelectedEventType.has_value(values["type"]))
values["isEvent"] = (
EventType.has_value(values["type"])
or PerformanceEventType.has_value(values["type"])
or ProductAnalyticsSelectedEventType.has_value(values["type"])
)
return values
# this type is created to allow mixing events&filters and specifying a discriminator
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
GroupedFilterType = Annotated[
Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
Field(discriminator="is_event"),
BeforeValidator(add_missing_is_event),
]
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
events: List[SessionSearchEventSchema2] = Field(
default_factory=list, doc_hidden=True
)
filters: List[GroupedFilterType] = Field(default_factory=list)
sort: str = Field(default="startTs")
order: SortOrderType = Field(default=SortOrderType.DESC)
@ -709,8 +765,9 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
for v in f.get("value", []):
if f.get("type", "") == FilterType.DURATION.value and v is None:
v = 0
if v is not None and (f.get("type", "") != FilterType.DURATION.value
or str(v).isnumeric()):
if v is not None and (
f.get("type", "") != FilterType.DURATION.value or str(v).isnumeric()
):
vals.append(v)
f["value"] = vals
return values
@ -741,9 +798,14 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
continue
j = i + 1
while j < len(values):
if values[i].type == values[j].type \
and values[i].operator == values[j].operator \
and (values[i].type != FilterType.METADATA or values[i].source == values[j].source):
if (
values[i].type == values[j].type
and values[i].operator == values[j].operator
and (
values[i].type != FilterType.METADATA
or values[i].source == values[j].source
)
):
values[i].value += values[j].value
del values[j]
else:
@ -755,16 +817,16 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
class ErrorStatus(str, Enum):
ALL = 'all'
UNRESOLVED = 'unresolved'
RESOLVED = 'resolved'
IGNORED = 'ignored'
ALL = "all"
UNRESOLVED = "unresolved"
RESOLVED = "resolved"
IGNORED = "ignored"
class ErrorSort(str, Enum):
OCCURRENCE = 'occurrence'
USERS_COUNT = 'users'
SESSIONS_COUNT = 'sessions'
OCCURRENCE = "occurrence"
USERS_COUNT = "users"
SESSIONS_COUNT = "sessions"
class SearchErrorsSchema(SessionsSearchPayloadSchema):
@ -787,7 +849,9 @@ class PathAnalysisSubFilterSchema(BaseModel):
type: ProductAnalyticsSelectedEventType = Field(...)
operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_remove_duplicate_values = field_validator("value", mode="before")(
remove_duplicate_values
)
@model_validator(mode="before")
@classmethod
@ -799,26 +863,36 @@ class PathAnalysisSubFilterSchema(BaseModel):
class _ProductAnalyticsFilter(BaseModel):
is_event: Literal[False] = False
type: FilterType
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...)
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(
...
)
value: List[Union[IssueType, PlatformType, int, str]] = Field(...)
source: Optional[str] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_remove_duplicate_values = field_validator("value", mode="before")(
remove_duplicate_values
)
class _ProductAnalyticsEventFilter(BaseModel):
is_event: Literal[True] = True
type: ProductAnalyticsSelectedEventType
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...)
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(
...
)
# TODO: support session metadata filters
value: List[Union[IssueType, PlatformType, int, str]] = Field(...)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_remove_duplicate_values = field_validator("value", mode="before")(
remove_duplicate_values
)
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
Field(discriminator='is_event')]
ProductAnalyticsFilter = Annotated[
Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
Field(discriminator="is_event"),
]
class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
@ -826,8 +900,9 @@ class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
filters: List[ProductAnalyticsFilter] = Field(default_factory=list)
type: Optional[str] = Field(default=None)
_transform_filters = field_validator('filters', mode='before') \
(force_is_event(events_enum=[ProductAnalyticsSelectedEventType]))
_transform_filters = field_validator("filters", mode="before")(
force_is_event(events_enum=[ProductAnalyticsSelectedEventType])
)
class MobileSignPayloadSchema(BaseModel):
@ -924,8 +999,9 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
# Used mainly for PathAnalysis, and could be used by other cards
hide_excess: Optional[bool] = Field(default=False, description="Hide extra values")
_transform_filters = field_validator('filters', mode='before') \
(force_is_event(events_enum=[EventType, PerformanceEventType]))
_transform_filters = field_validator("filters", mode="before")(
force_is_event(events_enum=[EventType, PerformanceEventType])
)
@model_validator(mode="before")
@classmethod
@ -1051,12 +1127,17 @@ class CardTable(__CardSchema):
metric_type: Literal[MetricType.TABLE]
metric_of: MetricOfTable = Field(default=MetricOfTable.USER_ID)
view_type: MetricTableViewType = Field(...)
metric_format: MetricExtendedFormatType = Field(default=MetricExtendedFormatType.SESSION_COUNT)
metric_format: MetricExtendedFormatType = Field(
default=MetricExtendedFormatType.SESSION_COUNT
)
@model_validator(mode="before")
@classmethod
def __enforce_default(cls, values):
if values.get("metricOf") is not None and values.get("metricOf") != MetricOfTable.ISSUES:
if (
values.get("metricOf") is not None
and values.get("metricOf") != MetricOfTable.ISSUES
):
values["metricValue"] = []
return values
@ -1067,12 +1148,18 @@ class CardTable(__CardSchema):
@model_validator(mode="after")
def __validator(self):
if self.metric_of not in (MetricOfTable.ISSUES, MetricOfTable.USER_BROWSER,
MetricOfTable.USER_DEVICE, MetricOfTable.USER_COUNTRY,
MetricOfTable.VISITED_URL, MetricOfTable.REFERRER,
MetricOfTable.FETCH):
assert self.metric_format == MetricExtendedFormatType.SESSION_COUNT, \
f'metricFormat:{MetricExtendedFormatType.USER_COUNT.value} is not supported for this metricOf'
if self.metric_of not in (
MetricOfTable.ISSUES,
MetricOfTable.USER_BROWSER,
MetricOfTable.USER_DEVICE,
MetricOfTable.USER_COUNTRY,
MetricOfTable.VISITED_URL,
MetricOfTable.REFERRER,
MetricOfTable.FETCH,
):
assert (
self.metric_format == MetricExtendedFormatType.SESSION_COUNT
), f"metricFormat:{MetricExtendedFormatType.USER_COUNT.value} is not supported for this metricOf"
return self
@ -1080,7 +1167,9 @@ class CardFunnel(__CardSchema):
metric_type: Literal[MetricType.FUNNEL]
metric_of: MetricOfFunnels = Field(default=MetricOfFunnels.SESSION_COUNT)
view_type: MetricOtherViewType = Field(...)
metric_format: MetricExtendedFormatType = Field(default=MetricExtendedFormatType.SESSION_COUNT)
metric_format: MetricExtendedFormatType = Field(
default=MetricExtendedFormatType.SESSION_COUNT
)
@model_validator(mode="before")
@classmethod
@ -1122,10 +1211,16 @@ class CardPathAnalysisSeriesSchema(CardSeriesSchema):
@model_validator(mode="before")
@classmethod
def __enforce_default(cls, values):
if values.get("filter") is None and values.get("startTimestamp") and values.get("endTimestamp"):
values["filter"] = PathAnalysisSchema(startTimestamp=values["startTimestamp"],
endTimestamp=values["endTimestamp"],
density=values.get("density", 4))
if (
values.get("filter") is None
and values.get("startTimestamp")
and values.get("endTimestamp")
):
values["filter"] = PathAnalysisSchema(
startTimestamp=values["startTimestamp"],
endTimestamp=values["endTimestamp"],
density=values.get("density", 4),
)
return values
@ -1175,20 +1270,23 @@ class CardPathAnalysis(__CardSchema):
for f in self.excludes:
exclude_values[f.type] = exclude_values.get(f.type, []) + f.value
assert len(
self.start_point) <= 1, \
f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
assert (
len(self.start_point) <= 1
), f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
for t in exclude_values:
for v in t:
assert v not in s_e_values.get(t, []), f"startPoint and endPoint cannot be excluded, value: {v}"
assert v not in s_e_values.get(
t, []
), f"startPoint and endPoint cannot be excluded, value: {v}"
return self
# Union of cards-schemas that doesn't change between FOSS and EE
__cards_union_base = Union[
CardTimeSeries, CardTable, CardFunnel, CardHeatMap, CardPathAnalysis]
CardSchema = ORUnion(__cards_union_base, discriminator='metric_type')
CardTimeSeries, CardTable, CardFunnel, CardHeatMap, CardPathAnalysis
]
CardSchema = ORUnion(__cards_union_base, discriminator="metric_type")
class UpdateCardStatusSchema(BaseModel):
@ -1216,7 +1314,7 @@ class ProjectSettings(BaseModel):
class CreateDashboardSchema(BaseModel):
name: str = Field(..., min_length=1)
description: Optional[str] = Field(default='')
description: Optional[str] = Field(default="")
is_public: bool = Field(default=False)
is_pinned: bool = Field(default=False)
metrics: Optional[List[int]] = Field(default_factory=list)
@ -1269,13 +1367,16 @@ class LiveSessionSearchFilterSchema(BaseModel):
value: Union[List[str], str] = Field(...)
type: LiveFilterType = Field(...)
source: Optional[str] = Field(default=None)
operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] \
= Field(default=SearchEventOperator.CONTAINS)
operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] = Field(
default=SearchEventOperator.CONTAINS
)
@model_validator(mode="after")
def __validator(self):
if self.type is not None and self.type == LiveFilterType.METADATA:
assert self.source is not None, "source should not be null for METADATA type"
assert (
self.source is not None
), "source should not be null for METADATA type"
assert len(self.source) > 0, "source should not be empty for METADATA type"
return self
@ -1293,7 +1394,10 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
if values.get("filters") is not None:
i = 0
while i < len(values["filters"]):
if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0:
if (
values["filters"][i]["value"] is None
or len(values["filters"][i]["value"]) == 0
):
del values["filters"][i]
else:
i += 1
@ -1349,8 +1453,11 @@ class SessionUpdateNoteSchema(SessionNoteSchema):
@model_validator(mode="after")
def __validator(self):
assert self.message is not None or self.timestamp is not None or self.is_public is not None, \
"at least 1 attribute should be provided for update"
assert (
self.message is not None
or self.timestamp is not None
or self.is_public is not None
), "at least 1 attribute should be provided for update"
return self
@ -1368,13 +1475,56 @@ class SearchCardsSchema(_PaginatedSchema):
query: Optional[str] = Field(default=None)
class MetricSortColumnType(str, Enum):
NAME = "name"
METRIC_TYPE = "metric_type"
METRIC_OF = "metric_of"
IS_PUBLIC = "is_public"
CREATED_AT = "created_at"
EDITED_AT = "edited_at"
class MetricFilterColumnType(str, Enum):
NAME = "name"
METRIC_TYPE = "metric_type"
METRIC_OF = "metric_of"
IS_PUBLIC = "is_public"
USER_ID = "user_id"
CREATED_AT = "created_at"
EDITED_AT = "edited_at"
class MetricListSort(BaseModel):
# column_key: Optional[MetricSortColumnType] = Field(
# default=MetricSortColumnType.CREATED_AT
# )
field: Optional[str] = Field(default=None)
order: Optional[str] = Field(default=SortOrderType.DESC)
class MetricFilter(BaseModel):
type: Optional[str] = Field(default=None)
query: Optional[str] = Field(default=None)
class MetricSearchSchema(_PaginatedSchema):
# order: SortOrderType = Field(default=SortOrderType.DESC)
filter: Optional[MetricFilter] = Field(default=None)
sort: Optional[MetricListSort] = Field(default=MetricListSort())
shared_only: bool = Field(default=False)
mine_only: bool = Field(default=False)
# query: Optional[str] = Field(default=None)
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
type: Literal[EventType.LOCATION] = Field(...)
class HeatMapSessionsSearch(SessionsSearchPayloadSchema):
events: Optional[List[_HeatMapSearchEventRaw]] = Field(default_factory=list)
filters: List[Union[SessionSearchFilterSchema, _HeatMapSearchEventRaw]] = Field(default_factory=list)
filters: List[Union[SessionSearchFilterSchema, _HeatMapSearchEventRaw]] = Field(
default_factory=list
)
@model_validator(mode="before")
@classmethod
@ -1383,13 +1533,21 @@ class HeatMapSessionsSearch(SessionsSearchPayloadSchema):
if f.get("type") == FilterType.DURATION:
return values
values["filters"] = values.get("filters", [])
values["filters"].append({"value": [5000], "type": FilterType.DURATION,
"operator": SearchEventOperator.IS, "filters": []})
values["filters"].append(
{
"value": [5000],
"type": FilterType.DURATION,
"operator": SearchEventOperator.IS,
"filters": [],
}
)
return values
class HeatMapFilterSchema(BaseModel):
value: List[Literal[IssueType.CLICK_RAGE, IssueType.DEAD_CLICK]] = Field(default_factory=list)
value: List[Literal[IssueType.CLICK_RAGE, IssueType.DEAD_CLICK]] = Field(
default_factory=list
)
type: Literal[FilterType.ISSUE] = Field(...)
operator: Literal[SearchEventOperator.IS, MathOperator.EQUAL] = Field(...)
@ -1398,8 +1556,12 @@ class GetHeatMapPayloadSchema(_TimedSchema):
url: Optional[str] = Field(default=None)
filters: List[HeatMapFilterSchema] = Field(default_factory=list)
click_rage: bool = Field(default=False)
operator: Literal[SearchEventOperator.IS, SearchEventOperator.STARTS_WITH,
SearchEventOperator.CONTAINS, SearchEventOperator.ENDS_WITH] = Field(default=SearchEventOperator.STARTS_WITH)
operator: Literal[
SearchEventOperator.IS,
SearchEventOperator.STARTS_WITH,
SearchEventOperator.CONTAINS,
SearchEventOperator.ENDS_WITH,
] = Field(default=SearchEventOperator.STARTS_WITH)
class GetClickMapPayloadSchema(GetHeatMapPayloadSchema):
@ -1420,7 +1582,9 @@ class FeatureFlagConditionFilterSchema(BaseModel):
value: List[str] = Field(default_factory=list, min_length=1)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
source: Optional[str] = Field(default=None)
sourceOperator: Optional[Union[SearchEventOperator, MathOperator]] = Field(default=None)
sourceOperator: Optional[Union[SearchEventOperator, MathOperator]] = Field(
default=None
)
@model_validator(mode="before")
@classmethod
@ -1455,7 +1619,7 @@ class FeatureFlagStatus(BaseModel):
class FeatureFlagSchema(BaseModel):
payload: Optional[str] = Field(default=None)
flag_key: str = Field(..., pattern=r'^[a-zA-Z0-9\-]+$')
flag_key: str = Field(..., pattern=r"^[a-zA-Z0-9\-]+$")
description: Optional[str] = Field(default=None)
flag_type: FeatureFlagType = Field(default=FeatureFlagType.SINGLE_VARIANT)
is_persist: Optional[bool] = Field(default=False)
@ -1482,7 +1646,7 @@ class ModuleStatus(BaseModel):
class TagUpdate(BaseModel):
name: str = Field(..., min_length=1, max_length=100, pattern='^[a-zA-Z0-9\" -]*$')
name: str = Field(..., min_length=1, max_length=100, pattern='^[a-zA-Z0-9" -]*$')
class TagCreate(TagUpdate):