refactor(chalice): refactored sessions-search for CH-PG
fix(chalice): fixed usability-tests' sessions
This commit is contained in:
parent
ad3f72a10b
commit
447a2490ef
5 changed files with 22 additions and 75 deletions
|
|
@ -56,14 +56,16 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
|
|||
'viewed', toString(viewed_sessions.session_id > 0)
|
||||
"""
|
||||
|
||||
|
||||
def __parse_metadata(metadata_map):
|
||||
return json.loads(metadata_map.replace("'", '"').replace("NULL", 'null'))
|
||||
|
||||
|
||||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False,
|
||||
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
|
||||
platform="web"):
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False):
|
||||
platform = project.platform
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -219,69 +221,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
|
||||
|
||||
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
||||
if project_id is None:
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id)
|
||||
else:
|
||||
all_projects = [
|
||||
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,
|
||||
include_gdpr=False)]
|
||||
|
||||
all_projects = {int(p["projectId"]): p["name"] for p in all_projects}
|
||||
project_ids = list(all_projects.keys())
|
||||
|
||||
available_keys = metadata.get_keys_by_projects(project_ids)
|
||||
for i in available_keys:
|
||||
available_keys[i]["user_id"] = schemas.FilterType.USER_ID
|
||||
available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID
|
||||
results = {}
|
||||
for i in project_ids:
|
||||
if m_key not in available_keys[i].values():
|
||||
available_keys.pop(i)
|
||||
results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
|
||||
project_ids = list(available_keys.keys())
|
||||
if len(project_ids) > 0:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
for i in project_ids:
|
||||
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
|
||||
sub_queries.append(cur.mogrify(
|
||||
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
|
||||
{"id": i, "value": m_value}).decode('UTF-8'))
|
||||
query = f"""SELECT {", ".join(sub_queries)};"""
|
||||
cur.execute(query=query)
|
||||
|
||||
rows = cur.fetchone()
|
||||
|
||||
sub_queries = []
|
||||
for i in rows.keys():
|
||||
results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]}
|
||||
if rows[i] > 0:
|
||||
col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)]
|
||||
sub_queries.append(
|
||||
cur.mogrify(
|
||||
f"""(
|
||||
SELECT *
|
||||
FROM (
|
||||
SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS_CH}
|
||||
FROM public.sessions AS s LEFT JOIN (SELECT session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_favorite_sessions.user_id = %(userId)s
|
||||
) AS favorite_sessions USING (session_id)
|
||||
WHERE s.project_id = %(id)s AND isNotNull(s.duration) AND s.{col_name} = %(value)s
|
||||
) AS full_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC
|
||||
LIMIT 10
|
||||
)""",
|
||||
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
|
||||
if len(sub_queries) > 0:
|
||||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
return sessions_search_legacy.search_by_metadata(tenant_id, user_id, m_key, m_value, project_id)
|
||||
|
||||
|
||||
# TODO: rewrite this function to use ClickHouse
|
||||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
return sessions_search_legacy.search_sessions_by_ids(project_id, session_ids, sort_by, ascending)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,8 @@ COALESCE((SELECT TRUE
|
|||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False, platform="web"):
|
||||
count_only=False, issue=None, ids_only=False):
|
||||
platform = project.platform
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -239,6 +240,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
i["src"] = 1
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
|
||||
|
|
@ -246,7 +248,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": []}
|
||||
return {"total": 0, "sessions": [], "src": 1}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
|
|
@ -265,4 +267,4 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
|
|||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "src": 1}
|
||||
|
|
|
|||
|
|
@ -259,8 +259,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -268,8 +267,7 @@ def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema =
|
|||
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -79,8 +79,7 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
|
|||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
live: bool = False, user_id: str = None):
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.UsabilityTestQuery, Query()]):
|
||||
"""
|
||||
Get sessions related to a specific UT test.
|
||||
|
||||
|
|
@ -88,10 +87,11 @@ async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[sch
|
|||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
|
||||
if live:
|
||||
if filter_query.live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
|
||||
else:
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit, user_id, live)
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit,
|
||||
filter_query.user_id, filter_query.live)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
|
||||
|
|
|
|||
|
|
@ -1608,3 +1608,8 @@ class SessionModel(BaseModel):
|
|||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class UsabilityTestQuery(_PaginatedSchema):
|
||||
live: bool = Field(default=False)
|
||||
user_id: Optional[str] = Field(default=None)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue