Api v1.15.0 (#1541)
* feat(chalice): upgraded dependencies * feat(chalice): changed path analysis schema * feat(DB): click coordinate support * feat(chalice): changed path analysis issues schema feat(chalice): upgraded dependencies * fix(chalice): fixed pydantic issue * refactor(chalice): refresh token validator * feat(chalice): role restrictions * feat(chalice): EE path analysis changes * refactor(DB): changed creation queries refactor(DB): changed delte queries feat(DB): support new path analysis payload * feat(chalice): save path analysis card * feat(chalice): restrict access * feat(chalice): restrict access * feat(chalice): EE save new path analysis card * refactor(chalice): path analysis * feat(chalice): path analysis new query * fix(chalice): configurable CH config * fix(chalice): assist autocomplete * refactor(chalice): refactored permissions * refactor(chalice): changed log level * refactor(chalice): upgraded dependencies * refactor(chalice): changed path analysis query * refactor(chalice): changed path analysis query * refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies * feat(chalice): path analysis ignore start point * feat(chalice): path analysis in progress * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis new query refactor(chalice): authorizers * refactor(chalice): refactored authorizer * fix(chalice): fixed create card of PathAnalysis * refactor(chalice): compute link-percentage for Path Analysis * refactor(chalice): remove null starting point from Path Analysis * feat(chalice): path analysis CH query * refactor(chalice): changed Path Analysis links-value fix(chalice): fixed search notes for EE * feat(chalice): path analysis enhanced query results * feat(chalice): include timezone in search sessions response * refactor(chalice): refactored logs * refactor(chalice): refactored logs feat(chalice): get path analysis issues * fix(chalice): fixed path analysis issues pagination * fix(chalice): sessions-search handle null values * feat(chalice): PathAnalysis start event support middle-event matching * feat(chalice): PathAnalysis start event support middle-event matching * feat(chalice): PathAnalysis support mixed events with start-point * fix(chalice): PathAnalysis fixed eventType value when metricValue is missing * fix(chalice): PathAnalysis fixed wrong super-class model for update card * fix(chalice): PathAnalysis fixed search issues refactor(chalice): upgraded dependencies * fix(chalice): enforce isEvent if missing * fix(chalice): enforce isEvent if missing * refactor(chalice): refactored custom-metrics * refactor(chalice): small changes * feat(chalice): path analysis EE new query * fix(chalice): fixed hide-excess state for Path Analysis * fix(chalice): fixed update start point and excludes for Path Analysis * fix(chalice): fix payload validation fix(chalice): fix update widget endpoint * fix(chalice): fix payload validation fix(chalice): fix update widget endpoint * fix(chalice): fix add member * refactor(chalice): upgraded dependencies refactor!(chalice): upgraded SAML dependencies * feat(chalice): ios-project support 1/5 * refactor(chalice): changed logs handling * fix(chalice): fix path analysis issues list
This commit is contained in:
parent
f2594f06ae
commit
9f75be6a4f
5 changed files with 87 additions and 48 deletions
20
api/app.py
20
api/app.py
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
|
|
@ -10,8 +11,8 @@ from starlette.responses import StreamingResponse
|
|||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from routers import core, core_dynamic
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
|
|
@ -54,13 +55,18 @@ app.add_middleware(GZipMiddleware, minimum_size=1000)
|
|||
@app.middleware('http')
|
||||
async def or_middleware(request: Request, call_next):
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
now = time.time()
|
||||
try:
|
||||
response: StreamingResponse = await call_next(request)
|
||||
except:
|
||||
logging.error(f"{request.method}: {request.url.path} FAILED!")
|
||||
raise
|
||||
if response.status_code // 100 != 2:
|
||||
logging.warning(f"{request.method}:{request.url.path} {response.status_code}!")
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 1500:
|
||||
logging.warning(f"Execution time: {now} ms for {request.method}:{request.url.path}")
|
||||
now = time.time() - now
|
||||
if now > 2:
|
||||
logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}")
|
||||
return response
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from decouple import config
|
||||
from fastapi import HTTPException, status
|
||||
|
|
@ -290,25 +291,32 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
|
|||
|
||||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
if len(data.filters) > 0 or len(data.series) > 0:
|
||||
filters = [f.model_dump(by_alias=True) for f in data.filters] \
|
||||
+ [f.model_dump(by_alias=True) for f in data.series[0].filter.filters]
|
||||
else:
|
||||
return {"data": {}}
|
||||
|
||||
search_data = schemas.SessionsSearchPayloadSchema(
|
||||
startTimestamp=data.startTimestamp,
|
||||
endTimestamp=data.endTimestamp,
|
||||
limit=data.limit,
|
||||
page=data.page,
|
||||
filters=data.series[0].filter.model_dump(by_alias=True)["filters"]
|
||||
filters=filters
|
||||
)
|
||||
|
||||
for s in data.start_point:
|
||||
if data.start_type == "end":
|
||||
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=s.operator,
|
||||
value=s.value))
|
||||
else:
|
||||
search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=s.operator,
|
||||
value=s.value))
|
||||
if len(search_data.events) == 0:
|
||||
return {"data": {}}
|
||||
|
||||
# for s in data.start_point:
|
||||
# if data.start_type == "end":
|
||||
# search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
# operator=s.operator,
|
||||
# value=s.value))
|
||||
# else:
|
||||
# search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
|
||||
# operator=s.operator,
|
||||
# value=s.value))
|
||||
for s in data.excludes:
|
||||
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=schemas.SearchEventOperator._not_on,
|
||||
|
|
|
|||
|
|
@ -728,12 +728,10 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
if len(values.get("events", [])) > 0:
|
||||
for v in values["events"]:
|
||||
v["isEvent"] = True
|
||||
for v in values.get("filters", []):
|
||||
|
||||
for v in values.get("filters", []):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = False
|
||||
else:
|
||||
for v in values.get("filters", []):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = False
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
|
@ -746,13 +744,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
for v in values.get("filters", []):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = EventType.has_value(v["type"]) or PerformanceEventType.has_value(v["type"])
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(cls, values):
|
||||
n_filters = []
|
||||
|
|
@ -983,15 +974,36 @@ class MetricOfPathAnalysis(str, Enum):
|
|||
session_count = MetricOfTimeseries.session_count.value
|
||||
|
||||
|
||||
class CardSessionsSchema(SessionsSearchPayloadSchema):
|
||||
# class CardSessionsSchema(SessionsSearchPayloadSchema):
|
||||
class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
||||
startTimestamp: int = Field(default=TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(defautl=TimeUTC.now())
|
||||
density: int = Field(default=7, ge=1, le=200)
|
||||
series: List[CardSeriesSchema] = Field(default=[])
|
||||
|
||||
# events: List[SessionSearchEventSchema2] = Field(default=[], doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default=[])
|
||||
|
||||
# Used mainly for PathAnalysis, and could be used by other cards
|
||||
hide_excess: Optional[bool] = Field(default=False, description="Hide extra values")
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
for v in values.get("filters"):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = ProductAnalyticsSelectedEventType.has_value(v["type"])
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def remove_wrong_filter_values(cls, values):
|
||||
for f in values.get("filters", []):
|
||||
vals = []
|
||||
for v in f.get("value", []):
|
||||
if v is not None:
|
||||
vals.append(v)
|
||||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
if values.get("startTimestamp") is None:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
import queue
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
|
|
@ -71,13 +72,18 @@ async def or_middleware(request: Request, call_next):
|
|||
return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
now = time.time()
|
||||
try:
|
||||
response: StreamingResponse = await call_next(request)
|
||||
except:
|
||||
logging.error(f"{request.method}: {request.url.path} FAILED!")
|
||||
raise
|
||||
if response.status_code // 100 != 2:
|
||||
logging.warning(f"{request.method}:{request.url.path} {response.status_code}!")
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 1500:
|
||||
logging.warning(f"Execution time: {now} ms for {request.method}:{request.url.path}")
|
||||
now = time.time() - now
|
||||
if now > 2:
|
||||
logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}")
|
||||
return response
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -310,25 +310,32 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
|
|||
|
||||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
if len(data.filters) > 0 or len(data.series) > 0:
|
||||
filters = [f.model_dump(by_alias=True) for f in data.filters] \
|
||||
+ [f.model_dump(by_alias=True) for f in data.series[0].filter.filters]
|
||||
else:
|
||||
return {"data": {}}
|
||||
|
||||
search_data = schemas.SessionsSearchPayloadSchema(
|
||||
startTimestamp=data.startTimestamp,
|
||||
endTimestamp=data.endTimestamp,
|
||||
limit=data.limit,
|
||||
page=data.page,
|
||||
filters=data.series[0].filter.model_dump(by_alias=True)["filters"]
|
||||
filters=filters
|
||||
)
|
||||
|
||||
for s in data.start_point:
|
||||
if data.start_type == "end":
|
||||
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=s.operator,
|
||||
value=s.value))
|
||||
else:
|
||||
search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=s.operator,
|
||||
value=s.value))
|
||||
if len(search_data.events) == 0:
|
||||
return {"data": {}}
|
||||
|
||||
# for s in data.start_point:
|
||||
# if data.start_type == "end":
|
||||
# search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
# operator=s.operator,
|
||||
# value=s.value))
|
||||
# else:
|
||||
# search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
|
||||
# operator=s.operator,
|
||||
# value=s.value))
|
||||
for s in data.excludes:
|
||||
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=schemas.SearchEventOperator._not_on,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue