feat(chalice): fixed sessions search

This commit is contained in:
Taha Yassine Kraiem 2022-08-30 12:16:15 +01:00
parent 460aa9dbb7
commit db0c33a6ec

View file

@ -226,119 +226,9 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
# This function executes the query and return result
def search_sessions_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
else:
full_args["sessions_limit_s"] = 1
full_args["sessions_limit_e"] = 200
meta_keys = []
with pg_client.PostgresClient() as cur:
if count_only:
main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions,
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc
else:
data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
else:
sort = 'start_ts'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
FROM (SELECT user_id,
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
MIN(full_sessions.start_ts) AS first_session_ts,
ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
) AS filtred_sessions
) AS full_sessions
GROUP BY user_id
) AS users_sessions;""",
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
full_args)
print("--------------------")
print(main_query)
print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data.json())
print("--------------------")
raise err
if errors_only:
return helper.list_to_camel_case(cur.fetchall())
sessions = cur.fetchone()
if count_only:
return helper.dict_to_camel_case(sessions)
total = sessions["count"]
sessions = sessions["sessions"]
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
sessions[i].pop("rn")
sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \
if sessions[i][f'metadata_{k["index"]}'] is not None}
else:
for i, s in enumerate(sessions):
sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \
if sessions[i][f'metadata_{k["index"]}'] is not None}
# if not data.group_by_user and data.sort is not None and data.sort != "session_id":
# sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)],
# reverse=data.order.upper() == "DESC")
return {
'total': total,
'sessions': helper.list_to_camel_case(sessions)
}
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
print("------ search2_ch")
full_args, query_part = search_query_parts_ch(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
@ -417,13 +307,13 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
-- ORDER BY {sort} {data.order}
) AS raw
WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s;""", full_args)
print("--------------------")
print(main_query)
print("--------------------")
# print("--------------------")
# print(main_query)
# print("--------------------")
try:
sessions = cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
print(main_query)
print("--------- PAYLOAD -----------")
print(data.json())
@ -1651,7 +1541,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
# TODO: isNot for ERROR
elif event_type == events.event_type.ERROR.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
events_extra_join = "SELECT * FROM final.errors AS main1 WHERE main1.project_id=%(project_id)s"
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
events_conditions.append({"type": event_where[-1]})
event.source = tuple(event.source)
@ -2001,7 +1891,6 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
GROUP BY session_id
{having}"""
else:
print(">>>>> OR EVENTS")
type_conditions = []
sequence_conditions = []
has_values = False