From 68dd0a7f146eb85f665dc8b8b7a4fb9755ae13a3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 13:25:14 +0100 Subject: [PATCH] feat(chalice): split replay --- api/chalicelib/core/sessions.py | 88 +-------- api/chalicelib/core/sessions_favorite.py | 19 +- api/chalicelib/core/sessions_replay.py | 23 +-- api/routers/core_dynamic.py | 4 +- ee/api/chalicelib/core/sessions.py | 91 +--------- ee/api/chalicelib/core/sessions_favorite.py | 27 +-- ee/api/chalicelib/core/sessions_replay.py | 189 ++++++++++++++++++++ ee/api/routers/core_dynamic.py | 50 +++++- 8 files changed, 273 insertions(+), 218 deletions(-) create mode 100644 ee/api/chalicelib/core/sessions_replay.py diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index c95bed903..8f98aac83 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,10 +1,7 @@ from typing import List import schemas -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -33,89 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 00228b31f..d3bf5e9b4 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -1,5 +1,4 @@ import schemas -from chalicelib.core import sessions from chalicelib.utils import pg_client @@ -8,11 +7,14 @@ def add_favorite_session(context: schemas.CurrentContext, project_id, session_id cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(session_id)s);""", + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): @@ -21,11 +23,14 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(session_id)s;""", + AND session_id = %(session_id)s + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas.CurrentContext, project_id, session_id): diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py index 84c6703eb..94e3cc504 100644 --- a/api/chalicelib/core/sessions_replay.py +++ b/api/chalicelib/core/sessions_replay.py @@ -1,12 +1,8 @@ -from typing import List - import schemas from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper, metrics_helper -from chalicelib.utils import sql_helper as sh +from chalicelib.utils import pg_client, helper def __group_metadata(session, project_metadata): @@ -148,8 +144,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat def get_events(project_id, session_id): with pg_client.PostgresClient() as cur: query = cur.mogrify( - f"""SELECT s.*, - s.session_id::text AS session_id + f"""SELECT session_id, platform, start_ts, duration FROM public.sessions AS s WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", @@ -159,11 +154,11 @@ def get_events(project_id, session_id): # print(query) cur.execute(query=query) - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - - if data["platform"] == 'ios': + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) for e in data['events']: if e["type"].endswith("_IOS"): @@ -183,7 +178,7 @@ def get_events(project_id, session_id): data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, session_id=session_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) + start_ts=s_data["startTs"], duration=s_data["duration"]) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) return data diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index cdd57a327..c6f71e88f 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -271,8 +271,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 6d92c3954..8f98aac83 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -1,11 +1,7 @@ from typing import List import schemas -import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -34,91 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, - include_fav_viewed=False, group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index d8ae4e1f7..85e308756 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -10,13 +10,15 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s);""", - {"userId": context.user_id, "sessionId": session_id}) + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): @@ -25,12 +27,15 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s;""", - {"userId": context.user_id, "sessionId": session_id}) + AND session_id = %(session_id)s + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..798029aee --- /dev/null +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,189 @@ +import schemas +import schemas_ee +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, + include_fav_viewed=False, group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, + encode(file_key,'hex') AS file_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 8c8aa55b6..209fdbd6d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite, sessions_notes, click_maps + errors_favorite, sessions_notes, click_maps, sessions_replay from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -59,7 +59,8 @@ async def edit_account(data: schemas_ee.EditUserSchema = Body(...), @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -155,13 +156,15 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True, user_id=context.user_id)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -172,6 +175,39 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, @@ -250,8 +286,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"):