diff --git a/api/app.py b/api/app.py index 7b16c308b..2f368d5b9 100644 --- a/api/app.py +++ b/api/app.py @@ -1,4 +1,5 @@ import logging +import time from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -10,8 +11,8 @@ from starlette.responses import StreamingResponse from chalicelib.utils import helper from chalicelib.utils import pg_client -from routers import core, core_dynamic from crons import core_crons, core_dynamic_crons +from routers import core, core_dynamic from routers.subs import insights, metrics, v1_api, health loglevel = config("LOGLEVEL", default=logging.WARNING) @@ -54,13 +55,18 @@ app.add_middleware(GZipMiddleware, minimum_size=1000) @app.middleware('http') async def or_middleware(request: Request, call_next): if helper.TRACK_TIME: - import time - now = int(time.time() * 1000) - response: StreamingResponse = await call_next(request) + now = time.time() + try: + response: StreamingResponse = await call_next(request) + except: + logging.error(f"{request.method}: {request.url.path} FAILED!") + raise + if response.status_code // 100 != 2: + logging.warning(f"{request.method}:{request.url.path} {response.status_code}!") if helper.TRACK_TIME: - now = int(time.time() * 1000) - now - if now > 1500: - logging.warning(f"Execution time: {now} ms for {request.method}:{request.url.path}") + now = time.time() - now + if now > 2: + logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}") return response diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index bdfa973d2..cd41d6caf 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -1,5 +1,6 @@ import json import logging +from typing import List from decouple import config from fastapi import HTTPException, status @@ -290,25 +291,32 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel) def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis): - if len(data.series) == 0: + if len(data.filters) > 0 or len(data.series) > 0: + filters = [f.model_dump(by_alias=True) for f in data.filters] \ + + [f.model_dump(by_alias=True) for f in data.series[0].filter.filters] + else: return {"data": {}} + search_data = schemas.SessionsSearchPayloadSchema( startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp, limit=data.limit, page=data.page, - filters=data.series[0].filter.model_dump(by_alias=True)["filters"] + filters=filters ) - for s in data.start_point: - if data.start_type == "end": - search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, - operator=s.operator, - value=s.value)) - else: - search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type, - operator=s.operator, - value=s.value)) + if len(search_data.events) == 0: + return {"data": {}} + + # for s in data.start_point: + # if data.start_type == "end": + # search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, + # operator=s.operator, + # value=s.value)) + # else: + # search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type, + # operator=s.operator, + # value=s.value)) for s in data.excludes: search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, operator=schemas.SearchEventOperator._not_on, diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py index a5f31cf5d..32a13bc02 100644 --- a/api/schemas/schemas.py +++ b/api/schemas/schemas.py @@ -728,12 +728,10 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): if len(values.get("events", [])) > 0: for v in values["events"]: v["isEvent"] = True - for v in values.get("filters", []): + + for v in values.get("filters", []): + if v.get("isEvent") is None: v["isEvent"] = False - else: - for v in values.get("filters", []): - if v.get("isEvent") is None: - v["isEvent"] = False return values @model_validator(mode="before") @@ -746,13 +744,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): f["value"] = vals return values - @model_validator(mode="before") - def __force_is_event(cls, values): - for v in values.get("filters", []): - if v.get("isEvent") is None: - v["isEvent"] = EventType.has_value(v["type"]) or PerformanceEventType.has_value(v["type"]) - return values - @model_validator(mode="after") def split_filters_events(cls, values): n_filters = [] @@ -983,15 +974,36 @@ class MetricOfPathAnalysis(str, Enum): session_count = MetricOfTimeseries.session_count.value -class CardSessionsSchema(SessionsSearchPayloadSchema): +# class CardSessionsSchema(SessionsSearchPayloadSchema): +class CardSessionsSchema(_TimedSchema, _PaginatedSchema): startTimestamp: int = Field(default=TimeUTC.now(-7)) endTimestamp: int = Field(defautl=TimeUTC.now()) density: int = Field(default=7, ge=1, le=200) series: List[CardSeriesSchema] = Field(default=[]) + # events: List[SessionSearchEventSchema2] = Field(default=[], doc_hidden=True) + filters: List[GroupedFilterType] = Field(default=[]) + # Used mainly for PathAnalysis, and could be used by other cards hide_excess: Optional[bool] = Field(default=False, description="Hide extra values") + @model_validator(mode="before") + def __force_is_event(cls, values): + for v in values.get("filters"): + if v.get("isEvent") is None: + v["isEvent"] = ProductAnalyticsSelectedEventType.has_value(v["type"]) + return values + + @model_validator(mode="before") + def remove_wrong_filter_values(cls, values): + for f in values.get("filters", []): + vals = [] + for v in f.get("value", []): + if v is not None: + vals.append(v) + f["value"] = vals + return values + @model_validator(mode="before") def __enforce_default(cls, values): if values.get("startTimestamp") is None: diff --git a/ee/api/app.py b/ee/api/app.py index a1041f75d..ec2a5282b 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -1,5 +1,6 @@ import logging import queue +import time from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -71,13 +72,18 @@ async def or_middleware(request: Request, call_next): return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN) if helper.TRACK_TIME: - import time - now = int(time.time() * 1000) - response: StreamingResponse = await call_next(request) + now = time.time() + try: + response: StreamingResponse = await call_next(request) + except: + logging.error(f"{request.method}: {request.url.path} FAILED!") + raise + if response.status_code // 100 != 2: + logging.warning(f"{request.method}:{request.url.path} {response.status_code}!") if helper.TRACK_TIME: - now = int(time.time() * 1000) - now - if now > 1500: - logging.warning(f"Execution time: {now} ms for {request.method}:{request.url.path}") + now = time.time() - now + if now > 2: + logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}") return response diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py index 2a1741ff2..d0ad72eda 100644 --- a/ee/api/chalicelib/core/custom_metrics.py +++ b/ee/api/chalicelib/core/custom_metrics.py @@ -310,25 +310,32 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel) def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis): - if len(data.series) == 0: + if len(data.filters) > 0 or len(data.series) > 0: + filters = [f.model_dump(by_alias=True) for f in data.filters] \ + + [f.model_dump(by_alias=True) for f in data.series[0].filter.filters] + else: return {"data": {}} + search_data = schemas.SessionsSearchPayloadSchema( startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp, limit=data.limit, page=data.page, - filters=data.series[0].filter.model_dump(by_alias=True)["filters"] + filters=filters ) - for s in data.start_point: - if data.start_type == "end": - search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, - operator=s.operator, - value=s.value)) - else: - search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type, - operator=s.operator, - value=s.value)) + if len(search_data.events) == 0: + return {"data": {}} + + # for s in data.start_point: + # if data.start_type == "end": + # search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, + # operator=s.operator, + # value=s.value)) + # else: + # search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type, + # operator=s.operator, + # value=s.value)) for s in data.excludes: search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, operator=schemas.SearchEventOperator._not_on,