Merge remote-tracking branch 'origin/api-bookmarked-pagination' into api-v1.5.4
This commit is contained in:
commit
76b00444a6
5 changed files with 42 additions and 41 deletions
|
|
@ -2,7 +2,7 @@ import json
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils import pg_client, helper, dev
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
||||
|
|
@ -399,7 +399,10 @@ def get_details_chart(project_id, error_id, user_id, **data):
|
|||
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
|
||||
project_key="project_id"):
|
||||
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||
if project_key is None:
|
||||
ch_sub_query = []
|
||||
else:
|
||||
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||
if time_constraint:
|
||||
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
|
||||
f"timestamp < %({endTime_arg_name})s"]
|
||||
|
|
@ -421,15 +424,12 @@ def __get_sort_key(key):
|
|||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
@dev.timed
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
|
||||
empty_response = {"data": {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}}
|
||||
status = status.upper()
|
||||
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
|
||||
return {"errors": ["invalid error status"]}
|
||||
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||
|
|
@ -437,8 +437,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
|
||||
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||
"pe.project_id=%(project_id)s"]
|
||||
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True)
|
||||
pg_sub_query_chart.append("source ='js_exception'")
|
||||
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
|
||||
# pg_sub_query_chart.append("source ='js_exception'")
|
||||
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
|
|
@ -446,13 +446,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
data.startDate = TimeUTC.now(-30)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
|
||||
if len(data.events) > 0 or len(data.filters) > 0:
|
||||
print("-- searching for sessions before errors")
|
||||
# if favorite_only=True search for sessions associated with favorite_error
|
||||
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=status)
|
||||
error_status=data.status)
|
||||
if len(statuses) == 0:
|
||||
return empty_response
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
error_ids = [e["errorId"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-7)
|
||||
|
|
@ -473,6 +474,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if data.status != schemas.ErrorStatus.all:
|
||||
pg_sub_query.append("status = %(error_status)s")
|
||||
params["error_status"] = data.status
|
||||
if data.limit is not None and data.page is not None:
|
||||
params["errors_offset"] = (data.page - 1) * data.limit
|
||||
params["errors_limit"] = data.limit
|
||||
|
|
@ -483,7 +487,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||
if favorite_only:
|
||||
if data.bookmarked:
|
||||
pg_sub_query.append("ufe.user_id = %(userId)s")
|
||||
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
main_pg_query = f"""\
|
||||
|
|
@ -522,7 +526,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
|
||||
FROM events.errors
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS sessions ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
|
|
|
|||
|
|
@ -235,24 +235,16 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
|
|||
# print("--------------------")
|
||||
|
||||
cur.execute(main_query)
|
||||
if errors_only:
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
sessions = cur.fetchone()
|
||||
if count_only:
|
||||
return helper.dict_to_camel_case(sessions)
|
||||
|
||||
total = sessions["count"]
|
||||
sessions = sessions["sessions"]
|
||||
# sessions = []
|
||||
# total = cur.rowcount
|
||||
# row = cur.fetchone()
|
||||
# limit = 200
|
||||
# while row is not None and len(sessions) < limit:
|
||||
# if row.get("favorite"):
|
||||
# limit += 1
|
||||
# sessions.append(row)
|
||||
# row = cur.fetchone()
|
||||
|
||||
if errors_only:
|
||||
return sessions
|
||||
if data.group_by_user:
|
||||
for i, s in enumerate(sessions):
|
||||
sessions[i] = {**s.pop("last_session")[0], **s}
|
||||
|
|
@ -969,9 +961,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
|
|||
if errors_only:
|
||||
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_constraints.append("ser.source = 'js_exception'")
|
||||
extra_constraints.append("ser.project_id = %(project_id)s")
|
||||
if error_status != "ALL":
|
||||
extra_constraints.append("ser.status = %(error_status)s")
|
||||
full_args["status"] = error_status.lower()
|
||||
full_args["error_status"] = error_status.lower()
|
||||
if favorite_only:
|
||||
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
extra_constraints.append("ufe.user_id = %(userId)s")
|
||||
|
|
|
|||
|
|
@ -884,12 +884,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
|
|||
|
||||
|
||||
@app.post('/{projectId}/errors/search', tags=['errors'])
|
||||
def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False,
|
||||
data: schemas.SearchErrorsSchema = Body(...),
|
||||
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(favorite, str):
|
||||
favorite = True if len(favorite) == 0 else False
|
||||
return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite)
|
||||
return errors.search(data, projectId, user_id=context.user_id)
|
||||
|
||||
|
||||
@app.get('/{projectId}/errors/stats', tags=['errors'])
|
||||
|
|
|
|||
|
|
@ -690,8 +690,16 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
|
|||
rangeValue: Optional[str] = Field(None)
|
||||
|
||||
|
||||
class ErrorStatus(str, Enum):
|
||||
all = 'all'
|
||||
unresolved = 'unresolved'
|
||||
resolved = 'resolved'
|
||||
ignored = 'ignored'
|
||||
|
||||
|
||||
class SearchErrorsSchema(SessionsSearchPayloadSchema):
|
||||
density: Optional[int] = Field(7)
|
||||
status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
|
||||
|
||||
|
||||
class MetricPayloadSchema(BaseModel):
|
||||
|
|
|
|||
|
|
@ -444,14 +444,11 @@ def __get_sort_key(key):
|
|||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
|
||||
empty_response = {"data": {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}}
|
||||
status = status.upper()
|
||||
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
|
||||
return {"errors": ["invalid error status"]}
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||
|
|
@ -460,17 +457,19 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
ch_sub_query.append("source ='js_exception'")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.startDate is None:
|
||||
# Clickhouse keeps data for the past month only, so no need to search beyond that
|
||||
if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
|
||||
data.startDate = TimeUTC.now(-30)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
|
||||
if len(data.events) > 0 or len(data.filters) > 0 or data.status != "ALL":
|
||||
print("-- searching for sessions before errors")
|
||||
# if favorite_only=True search for sessions associated with favorite_error
|
||||
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=status, favorite_only=favorite_only)
|
||||
error_status=data.status)
|
||||
if len(statuses) == 0:
|
||||
return empty_response
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
error_ids = [e["errorId"] for e in statuses]
|
||||
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-7)
|
||||
|
|
@ -495,7 +494,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
else:
|
||||
params["errors_offset"] = 0
|
||||
params["errors_limit"] = 200
|
||||
if favorite_only:
|
||||
if data.bookmarked:
|
||||
cur.execute(cur.mogrify(f"""SELECT error_id
|
||||
FROM public.user_favorite_errors
|
||||
WHERE user_id = %(userId)s
|
||||
|
|
@ -573,7 +572,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
|
|||
cur.execute(query=query)
|
||||
statuses = cur.fetchall()
|
||||
statuses = {
|
||||
s["error_id"]: s for s in statuses
|
||||
s["errorId"]: s for s in statuses
|
||||
}
|
||||
|
||||
for r in rows:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue