From d325c03bcbda0478389f6449d2a3d42554d1a6d2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 21 Sep 2022 13:57:30 +0100 Subject: [PATCH 1/7] feat(chalice): check sessions existance --- api/chalicelib/core/sessions.py | 12 ++++++++++++ api/routers/core.py | 2 ++ ee/api/chalicelib/core/sessions_exp.py | 11 +++++++++++ 3 files changed, 25 insertions(+) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 6c846eb42..054e7fcb2 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1237,3 +1237,15 @@ def count_all(): with pg_client.PostgresClient(unlimited_query=True) as cur: row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") return row.get("count", 0) + + +def session_exists(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify("""SELECT 1 + FROM public.sessions + WHERE session_id=%(session_id)s + AND project_id=%(project_id)s""", + {"project_id": project_id, "session_id": session_id}) + cur.execute(query) + row = cur.fetchone(query) + return row is not None diff --git a/api/routers/core.py b/api/routers/core.py index b3252e34a..92161f960 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -913,6 +913,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun @app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"]) def get_live_session_replay_file(projectId: int, sessionId: str, context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + return {"errors": ["Replay file not found"]} path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) if path is None: return {"errors": ["Replay file not found"]} diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index add1a790d..81953bcc5 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -1542,3 +1542,14 @@ def count_all(): with pg_client.PostgresClient(unlimited_query=True) as cur: row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") return row.get("count", 0) + + +def session_exists(project_id, session_id): + with ch_client.ClickHouseClient() as cur: + query = cur.format("""SELECT 1 + FROM public.sessions + WHERE session_id=%(session_id)s + AND project_id=%(project_id)s""", + {"project_id": project_id, "session_id": session_id}) + row = cur.execute(query) + return row is not None From a10488345b39d0b42a4c291e7b31eb2ae3427ec2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 21 Sep 2022 14:57:52 +0100 Subject: [PATCH 2/7] feat(chalice): check session existance id-casting --- api/routers/core.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/api/routers/core.py b/api/routers/core.py index 92161f960..5c4b311f5 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -911,9 +911,14 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun @app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"]) @app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"]) -def get_live_session_replay_file(projectId: int, sessionId: str, +def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + if isinstance(sessionId, str): + print(f"{sessionId} not a valid number.") + else: + print(f"{projectId}/{sessionId} not found in DB.") + return {"errors": ["Replay file not found"]} path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) if path is None: From 2a760fe876e07495a49486c8a47cc0df15601e25 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 21 Sep 2022 15:05:36 +0100 Subject: [PATCH 3/7] feat(chalice): check session existance fetchone --- api/chalicelib/core/sessions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 054e7fcb2..4b6835a1d 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1247,5 +1247,5 @@ def session_exists(project_id, session_id): AND project_id=%(project_id)s""", {"project_id": project_id, "session_id": session_id}) cur.execute(query) - row = cur.fetchone(query) + row = cur.fetchone() return row is not None From 28b6c58a497459bbd7a08c90a3264ad6e6b739fe Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 21 Sep 2022 17:31:39 +0100 Subject: [PATCH 4/7] feat(chalice): router level permissions for sessions feat(chalice): router level permissions for errors --- api/auth/auth_jwt.py | 10 +- api/chalicelib/core/errors.py | 30 ----- api/routers/core.py | 188 -------------------------- api/routers/core_dynamic.py | 182 ++++++++++++++++++++++++++ ee/api/.gitignore | 1 - ee/api/auth/auth_jwt.py | 60 +++++++++ ee/api/auth/router_security.py | 15 +++ ee/api/chalicelib/core/errors.py | 30 ----- ee/api/clean.sh | 1 - ee/api/or_dependencies.py | 3 +- ee/api/routers/core_dynamic.py | 218 ++++++++++++++++++++++++++++++- ee/api/schemas_ee.py | 16 ++- 12 files changed, 495 insertions(+), 259 deletions(-) create mode 100644 ee/api/auth/auth_jwt.py create mode 100644 ee/api/auth/router_security.py diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index 4eff80789..e8824b6b9 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -6,14 +6,14 @@ from starlette import status from starlette.exceptions import HTTPException from chalicelib.core import authorizers, users -from schemas import CurrentContext +import schemas class JWTAuth(HTTPBearer): def __init__(self, auto_error: bool = True): super(JWTAuth, self).__init__(auto_error=auto_error) - async def __call__(self, request: Request) -> Optional[CurrentContext]: + async def __call__(self, request: Request) -> Optional[schemas.CurrentContext]: credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) if credentials: if not credentials.scheme == "Bearer": @@ -49,9 +49,9 @@ class JWTAuth(HTTPBearer): jwt_payload["authorizer_identity"] = "jwt" print(jwt_payload) request.state.authorizer_identity = "jwt" - request.state.currentContext = CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), - user_id=jwt_payload.get("userId", -1), - email=user["email"]) + request.state.currentContext = schemas.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), + user_id=jwt_payload.get("userId", -1), + email=user["email"]) return request.state.currentContext else: diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 1c7ed94c9..b20853646 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -709,36 +709,6 @@ def __status_rank(status): }.get(status) -def merge(error_ids): - error_ids = list(set(error_ids)) - errors = get_batch(error_ids) - if len(error_ids) <= 1 or len(error_ids) > len(errors): - return {"errors": ["invalid list of ids"]} - error_ids = [e["errorId"] for e in errors] - parent_error_id = error_ids[0] - status = "unresolved" - for e in errors: - if __status_rank(status) < __status_rank(e["status"]): - status = e["status"] - if __status_rank(status) == MAX_RANK: - break - params = { - "error_ids": tuple(error_ids), - "parent_error_id": parent_error_id, - "status": status - } - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """UPDATE public.errors - SET parent_error_id = %(parent_error_id)s, status = %(status)s - WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""", - params) - cur.execute(query=query) - # row = cur.fetchone() - - return {"data": "success"} - - def format_first_stack_frame(error): error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True) for s in error["stack"]: diff --git a/api/routers/core.py b/api/routers/core.py index 5c4b311f5..3741fdec6 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -59,87 +59,6 @@ def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchem return {'data': data} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) -@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) -def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): - return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, - include_fav_viewed=True, group_metadata=True) - if data is None: - return {"errors": ["session not found"]} - if data.get("inDB"): - background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, - session_id=sessionId) - return { - 'data': data - } - - -@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) -@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"]) -def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId)} - - -@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) -@app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"]) -def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, - tenant_id=context.tenant_id, - user_id=context.user_id) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) -@app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) -def get_error_trace(projectId: int, sessionId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_trace(project_id=projectId, error_id=errorId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) -@app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) -def assign_session(projectId: int, sessionId: int, issueId: str, - context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, - tenant_id=context.tenant_id, user_id=context.user_id) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, - session_id=sessionId, assignment_id=issueId, - user_id=context.user_id, message=data.message) - if "errors" in data.keys(): - return data - return { - 'data': data - } - - @app.get('/{projectId}/events/search', tags=["events"]) def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType, @@ -664,13 +583,6 @@ def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)) return {"data": announcements.view(user_id=context.user_id)} -@app.post('/{projectId}/errors/merge', tags=["errors"]) -def errors_merge(projectId: int, data: schemas.ErrorIdsPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.merge(error_ids=data.errors) - return data - - @app.get('/show_banner', tags=["banner"]) def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): return {"data": False} @@ -894,45 +806,6 @@ def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema return {'data': data} -@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) -def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) - if data is None: - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) - if data is None: - return {"errors": ["session not found"]} - if data.get("inDB"): - background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, - user_id=context.user_id, session_id=sessionId) - return {'data': data} - - -@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"]) -@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"]) -def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): - if isinstance(sessionId, str): - print(f"{sessionId} not a valid number.") - else: - print(f"{projectId}/{sessionId} not found in DB.") - - return {"errors": ["Replay file not found"]} - path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) - if path is None: - return {"errors": ["Replay file not found"]} - - return FileResponse(path=path, media_type="application/octet-stream") - - -@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) -def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} - - @app.post('/{projectId}/mobile/{sessionId}/urls', tags=['mobile']) def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -978,67 +851,6 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data) -@app.post('/{projectId}/errors/search', tags=['errors']) -def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": errors.search(data, projectId, user_id=context.user_id)} - - -@app.get('/{projectId}/errors/stats', tags=['errors']) -def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): - return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) - - -@app.get('/{projectId}/errors/{errorId}', tags=['errors']) -def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, - **{"density24": density24, "density30": density30}) - if data.get("data") is not None: - background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id, - error_id=errorId) - return data - - -@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) -def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, - **{"startDate": startDate, "endDate": endDate, "density": density}) - return data - - -@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) -def errors_get_details_sourcemaps(projectId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_trace(project_id=projectId, error_id=errorId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) -def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): - if action == "favorite": - return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) - elif action == "sessions": - start_date = startDate - end_date = endDate - return { - "data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId, - start_date=start_date, end_date=end_date)} - elif action in list(errors.ACTION_STATE.keys()): - return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action) - else: - return {"errors": ["undefined action"]} - - @app.get('/notifications', tags=['notifications']) def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index d37a56728..0b3952dd2 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -11,6 +11,7 @@ from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import helper +from chalicelib.utils.TimeUTC import TimeUTC from or_dependencies import OR_context from routers.base import get_routers @@ -165,3 +166,184 @@ def get_general_stats(): def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True)} + + +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) +@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) +def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, + include_fav_viewed=True, group_metadata=True) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) +@app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) +def get_error_trace(projectId: int, sessionId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/errors/search', tags=['errors']) +def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": errors.search(data, projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/errors/stats', tags=['errors']) +def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, + context: schemas.CurrentContext = Depends(OR_context)): + return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) + + +@app.get('/{projectId}/errors/{errorId}', tags=['errors']) +def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, + density30: int = 30, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"density24": density24, "density30": density30}) + if data.get("data") is not None: + background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id, + error_id=errorId) + return data + + +@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) +def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"startDate": startDate, "endDate": endDate, "density": density}) + return data + + +@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) +def errors_get_details_sourcemaps(projectId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) +def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + if action == "favorite": + return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) + elif action == "sessions": + start_date = startDate + end_date = endDate + return { + "data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId, + start_date=start_date, end_date=end_date)} + elif action in list(errors.ACTION_STATE.keys()): + return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action) + else: + return {"errors": ["undefined action"]} + + +@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) +def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) + if data is None: + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, + user_id=context.user_id, session_id=sessionId) + return {'data': data} + + +@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"]) +@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"]) +def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + if isinstance(sessionId, str): + print(f"{sessionId} not a valid number.") + else: + print(f"{projectId}/{sessionId} not found in DB.") + + return {"errors": ["Replay file not found"]} + path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) + if path is None: + return {"errors": ["Replay file not found"]} + + return FileResponse(path=path, media_type="application/octet-stream") + + +@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) +def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} + + +@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) +@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"]) +def add_remove_favorite_session2(projectId: int, sessionId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, + session_id=sessionId)} + + +@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) +@app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"]) +def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, + tenant_id=context.tenant_id, + user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) +@app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) +def assign_session(projectId: int, sessionId: int, issueId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, + tenant_id=context.tenant_id, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, assignment_id=issueId, + user_id=context.user_id, message=data.message) + if "errors" in data.keys(): + return data + return { + 'data': data + } diff --git a/ee/api/.gitignore b/ee/api/.gitignore index d25a4474d..e0bc9b436 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -243,7 +243,6 @@ Pipfile /routers/__init__.py /chalicelib/core/assist.py /auth/auth_apikey.py -/auth/auth_jwt.py /build.sh /routers/base.py /routers/core.py diff --git a/ee/api/auth/auth_jwt.py b/ee/api/auth/auth_jwt.py new file mode 100644 index 000000000..477beba3d --- /dev/null +++ b/ee/api/auth/auth_jwt.py @@ -0,0 +1,60 @@ +from typing import Optional + +from fastapi import Request +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from starlette import status +from starlette.exceptions import HTTPException + +from chalicelib.core import authorizers, users +import schemas_ee + + +class JWTAuth(HTTPBearer): + def __init__(self, auto_error: bool = True): + super(JWTAuth, self).__init__(auto_error=auto_error) + + async def __call__(self, request: Request) -> Optional[schemas_ee.CurrentContext]: + credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) + if credentials: + if not credentials.scheme == "Bearer": + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") + jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + auth_exists = jwt_payload is not None \ + and users.auth_exists(user_id=jwt_payload.get("userId", -1), + tenant_id=jwt_payload.get("tenantId", -1), + jwt_iat=jwt_payload.get("iat", 100), + jwt_aud=jwt_payload.get("aud", "")) + if jwt_payload is None \ + or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ + or not auth_exists: + print("JWTAuth: Token issue") + if jwt_payload is not None: + print(jwt_payload) + print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}") + if jwt_payload is None: + print("JWTAuth: jwt_payload is None") + print(credentials.scheme + " " + credentials.credentials) + if jwt_payload is not None and jwt_payload.get("iat") is None: + print("JWTAuth: iat is None") + if jwt_payload is not None and jwt_payload.get("aud") is None: + print("JWTAuth: aud is None") + if jwt_payload is not None and not auth_exists: + print("JWTAuth: not users.auth_exists") + + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") + user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) + if user is None: + print("JWTAuth: User not found.") + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") + jwt_payload["authorizer_identity"] = "jwt" + print(jwt_payload) + request.state.authorizer_identity = "jwt" + request.state.currentContext = schemas_ee.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), + user_id=jwt_payload.get("userId", -1), + email=user["email"], + permissions=user["permissions"]) + return request.state.currentContext + + else: + print("JWTAuth: Invalid authorization code.") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.") diff --git a/ee/api/auth/router_security.py b/ee/api/auth/router_security.py new file mode 100644 index 000000000..1b0c98980 --- /dev/null +++ b/ee/api/auth/router_security.py @@ -0,0 +1,15 @@ +from fastapi import HTTPException, Depends +from fastapi.security import SecurityScopes + +import schemas_ee +from or_dependencies import OR_context + + +def check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): + for scope in security_scopes.scopes: + if scope not in context.permissions: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not enough permissions", + ) + \ No newline at end of file diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index d50c6b54a..1db619b15 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -716,36 +716,6 @@ def __status_rank(status): }.get(status) -def merge(error_ids): - error_ids = list(set(error_ids)) - errors = get_batch(error_ids) - if len(error_ids) <= 1 or len(error_ids) > len(errors): - return {"errors": ["invalid list of ids"]} - error_ids = [e["errorId"] for e in errors] - parent_error_id = error_ids[0] - status = "unresolved" - for e in errors: - if __status_rank(status) < __status_rank(e["status"]): - status = e["status"] - if __status_rank(status) == MAX_RANK: - break - params = { - "error_ids": tuple(error_ids), - "parent_error_id": parent_error_id, - "status": status - } - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """UPDATE public.errors - SET parent_error_id = %(parent_error_id)s, status = %(status)s - WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""", - params) - cur.execute(query=query) - # row = cur.fetchone() - - return {"data": "success"} - - def format_first_stack_frame(error): error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True) for s in error["stack"]: diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 9aa916080..395bd21af 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -65,7 +65,6 @@ rm -rf ./routers/subs/__init__.py rm -rf ./routers/__init__.py rm -rf ./chalicelib/core/assist.py rm -rf ./auth/auth_apikey.py -rm -rf ./auth/auth_jwt.py rm -rf ./build.sh rm -rf ./routers/core.py rm -rf ./routers/crons/core_crons.py diff --git a/ee/api/or_dependencies.py b/ee/api/or_dependencies.py index ec0eb5d51..4ca35476d 100644 --- a/ee/api/or_dependencies.py +++ b/ee/api/or_dependencies.py @@ -8,10 +8,11 @@ from starlette.requests import Request from starlette.responses import Response, JSONResponse import schemas +import schemas_ee from chalicelib.core import traces -async def OR_context(request: Request) -> schemas.CurrentContext: +async def OR_context(request: Request) -> schemas_ee.CurrentContext: if hasattr(request.state, "currentContext"): return request.state.currentContext else: diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index a414aed05..75806aeca 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -1,17 +1,23 @@ -from typing import Optional +from typing import Optional, Union from decouple import config -from fastapi import Body, Depends, BackgroundTasks +from fastapi import Body, Depends, BackgroundTasks, Security, HTTPException +from fastapi.security import SecurityScopes +from starlette import status from starlette.responses import RedirectResponse import schemas import schemas_ee +from schemas_ee import Permissions +from auth import router_security from chalicelib.core import sessions from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook +from chalicelib.core import sessions_viewed from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import SAML2_helper from chalicelib.utils import helper +from chalicelib.utils.TimeUTC import TimeUTC from or_dependencies import OR_context from routers.base import get_routers @@ -171,3 +177,211 @@ def get_general_stats(): def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, stack_integrations=True, user_id=context.user_id)} + + +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, + include_fav_viewed=True, group_metadata=True) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], + dependencies=[Security(router_security.check, + scopes=[Permissions.session_replay, Permissions.errors])]) +@app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], + dependencies=[Security(router_security.check, + scopes=[Permissions.session_replay, Permissions.errors])]) +def get_error_trace(projectId: int, sessionId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/errors/search', tags=['errors'], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": errors.search(data, projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/errors/stats', tags=['errors'], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, + context: schemas.CurrentContext = Depends(OR_context)): + return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) + + +@app.get('/{projectId}/errors/{errorId}', tags=['errors'], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, + density30: int = 30, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"density24": density24, "density30": density30}) + if data.get("data") is not None: + background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id, + error_id=errorId) + return data + + +@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"startDate": startDate, "endDate": endDate, "density": density}) + return data + + +@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def errors_get_details_sourcemaps(projectId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], + dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + if action == "favorite": + return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) + elif action == "sessions": + start_date = startDate + end_date = endDate + return { + "data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId, + start_date=start_date, end_date=end_date)} + elif action in list(errors.ACTION_STATE.keys()): + return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action) + else: + return {"errors": ["undefined action"]} + + +@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], + dependencies=[Security(router_security.check, scopes=[Permissions.assist_live])]) +def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) + if data is None: + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, + user_id=context.user_id, session_id=sessionId) + return {'data': data} + + +@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"], dependencies=[Security(router_security.check, scopes=[ + Permissions.assist_live, Permissions.session_replay])]) +@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"], dependencies=[ + Security(router_security.check, + scopes=[Permissions.assist_live, Permissions.session_replay])]) +def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + if isinstance(sessionId, str): + print(f"{sessionId} not a valid number.") + else: + print(f"{projectId}/{sessionId} not found in DB.") + + return {"errors": ["Replay file not found"]} + path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) + if path is None: + return {"errors": ["Replay file not found"]} + + return FileResponse(path=path, media_type="application/octet-stream") + + +@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} + + +@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def add_remove_favorite_session2(projectId: int, sessionId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, + session_id=sessionId)} + + +@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, + tenant_id=context.tenant_id, + user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def assign_session(projectId: int, sessionId: int, issueId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, + tenant_id=context.tenant_id, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], + dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, assignment_id=issueId, + user_id=context.user_id, message=data.message) + if "errors" in data.keys(): + return data + return { + 'data': data + } diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 458bdc052..9a91cb944 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -4,12 +4,26 @@ from pydantic import BaseModel, Field, EmailStr import schemas from chalicelib.utils.TimeUTC import TimeUTC +from enum import Enum + + +class Permissions(str, Enum): + session_replay = "SESSION_REPLAY" + dev_tools = "DEV_TOOLS" + errors = "ERRORS" + metrics = "METRICS" + assist_live = "ASSIST_LIVE" + assist_call = "ASSIST_CALL" + + +class CurrentContext(schemas.CurrentContext): + permissions: List[Optional[Permissions]] = Field(...) class RolePayloadSchema(BaseModel): name: str = Field(...) description: Optional[str] = Field(None) - permissions: List[str] = Field(...) + permissions: List[Permissions] = Field(...) all_projects: bool = Field(True) projects: List[int] = Field([]) From 49952103719d6f5b1bb96458f69c0366a8f82cf4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 21 Sep 2022 18:52:39 +0100 Subject: [PATCH 5/7] feat(chalice): refactored router level permissions for sessions feat(chalice): refactored router level permissions for errors --- ee/api/auth/router_security.py | 15 ------- ee/api/or_dependencies.py | 15 ++++++- ee/api/routers/core_dynamic.py | 82 +++++++++++++--------------------- 3 files changed, 46 insertions(+), 66 deletions(-) delete mode 100644 ee/api/auth/router_security.py diff --git a/ee/api/auth/router_security.py b/ee/api/auth/router_security.py deleted file mode 100644 index 1b0c98980..000000000 --- a/ee/api/auth/router_security.py +++ /dev/null @@ -1,15 +0,0 @@ -from fastapi import HTTPException, Depends -from fastapi.security import SecurityScopes - -import schemas_ee -from or_dependencies import OR_context - - -def check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): - for scope in security_scopes.scopes: - if scope not in context.permissions: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Not enough permissions", - ) - \ No newline at end of file diff --git a/ee/api/or_dependencies.py b/ee/api/or_dependencies.py index 4ca35476d..fed974c0d 100644 --- a/ee/api/or_dependencies.py +++ b/ee/api/or_dependencies.py @@ -1,13 +1,15 @@ import json from typing import Callable +from fastapi import HTTPException, Depends +from fastapi import Security from fastapi.routing import APIRoute +from fastapi.security import SecurityScopes from starlette import status from starlette.exceptions import HTTPException from starlette.requests import Request from starlette.responses import Response, JSONResponse -import schemas import schemas_ee from chalicelib.core import traces @@ -44,3 +46,14 @@ class ORRoute(APIRoute): return response return custom_route_handler + + +def check_permissions(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): + for scope in security_scopes.scopes: + if scope not in context.permissions: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not enough permissions") + + +def OR_scope(*scopes): + return Security(check_permissions, scopes=list(scopes)) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 75806aeca..a2a09c92d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -1,25 +1,22 @@ from typing import Optional, Union from decouple import config -from fastapi import Body, Depends, BackgroundTasks, Security, HTTPException -from fastapi.security import SecurityScopes -from starlette import status +from fastapi import Body, Depends, BackgroundTasks from starlette.responses import RedirectResponse import schemas import schemas_ee -from schemas_ee import Permissions -from auth import router_security from chalicelib.core import sessions +from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook -from chalicelib.core import sessions_viewed from chalicelib.core.collaboration_slack import Slack from chalicelib.utils import SAML2_helper from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC -from or_dependencies import OR_context +from or_dependencies import OR_context, OR_scope from routers.base import get_routers +from schemas_ee import Permissions public_app, app, app_apikey = get_routers() @@ -179,10 +176,8 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True, user_id=context.user_id)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) -@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) +@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): @@ -200,11 +195,9 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], - dependencies=[Security(router_security.check, - scopes=[Permissions.session_replay, Permissions.errors])]) + dependencies=[OR_scope(Permissions.session_replay, Permissions.errors)]) @app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], - dependencies=[Security(router_security.check, - scopes=[Permissions.session_replay, Permissions.errors])]) + dependencies=[OR_scope(Permissions.session_replay, Permissions.errors)]) def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) @@ -215,25 +208,21 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, } -@app.post('/{projectId}/errors/search', tags=['errors'], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.errors)]) def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": errors.search(data, projectId, user_id=context.user_id)} -@app.get('/{projectId}/errors/stats', tags=['errors'], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.errors)]) def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, context: schemas.CurrentContext = Depends(OR_context)): return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) -@app.get('/{projectId}/errors/{errorId}', tags=['errors'], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.errors)]) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, - context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -242,8 +231,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun return data -@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.errors)]) def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), endDate: int = TimeUTC.now(), density: int = 7, context: schemas.CurrentContext = Depends(OR_context)): @@ -252,8 +240,7 @@ def errors_get_details_right_column(projectId: int, errorId: str, startDate: int return data -@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.errors)]) def errors_get_details_sourcemaps(projectId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) @@ -264,8 +251,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str, } -@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], - dependencies=[Security(router_security.check, scopes=[Permissions.errors])]) +@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.errors)]) def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": @@ -282,8 +268,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa return {"errors": ["undefined action"]} -@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], - dependencies=[Security(router_security.check, scopes=[Permissions.assist_live])]) +@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) @@ -298,11 +283,10 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun return {'data': data} -@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"], dependencies=[Security(router_security.check, scopes=[ - Permissions.assist_live, Permissions.session_replay])]) -@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"], dependencies=[ - Security(router_security.check, - scopes=[Permissions.assist_live, Permissions.session_replay])]) +@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"], + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) +@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"], + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): @@ -319,28 +303,26 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) +@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) -def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas.CurrentContext = Depends(OR_context)): + dependencies=[OR_scope(Permissions.session_replay)]) +def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, session_id=sessionId)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, tenant_id=context.tenant_id, @@ -353,9 +335,9 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) def assign_session(projectId: int, sessionId: int, issueId: str, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, @@ -368,13 +350,13 @@ def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) @app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[Security(router_security.check, scopes=[Permissions.session_replay])]) + dependencies=[OR_scope(Permissions.session_replay)]) def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, From 9b207d3a80ebe1fef65c86518faa2ba449cd1023 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 22 Sep 2022 13:23:49 +0100 Subject: [PATCH 6/7] feat(chalice): router level permissions for metrics feat(chalice): router level permissions for dashboards feat(chalice): router level permissions for insights --- api/routers/base.py | 10 +- ee/api/.gitignore | 3 - ee/api/clean.sh | 4 +- ee/api/routers/subs/dashboard.py | 400 +++++++++++++++++++++++++++++++ ee/api/routers/subs/insights.py | 110 +++++++++ ee/api/routers/subs/metrics.py | 238 ++++++++++++++++++ 6 files changed, 755 insertions(+), 10 deletions(-) create mode 100644 ee/api/routers/subs/dashboard.py create mode 100644 ee/api/routers/subs/insights.py create mode 100644 ee/api/routers/subs/metrics.py diff --git a/api/routers/base.py b/api/routers/base.py index 5c665b2d1..09821e93e 100644 --- a/api/routers/base.py +++ b/api/routers/base.py @@ -6,9 +6,11 @@ from auth.auth_project import ProjectAuthorizer from or_dependencies import ORRoute -def get_routers() -> (APIRouter, APIRouter, APIRouter): +def get_routers(extra_dependencies=[]) -> (APIRouter, APIRouter, APIRouter): public_app = APIRouter(route_class=ORRoute) - app = APIRouter(dependencies=[Depends(JWTAuth()), Depends(ProjectAuthorizer("projectId"))], route_class=ORRoute) - app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth()), Depends(ProjectAuthorizer("projectKey"))], - route_class=ORRoute) + app = APIRouter(dependencies=[Depends(JWTAuth()), Depends(ProjectAuthorizer("projectId"))] + extra_dependencies, + route_class=ORRoute) + app_apikey = APIRouter( + dependencies=[Depends(APIKeyAuth()), Depends(ProjectAuthorizer("projectKey"))] + extra_dependencies, + route_class=ORRoute) return public_app, app, app_apikey diff --git a/ee/api/.gitignore b/ee/api/.gitignore index e0bc9b436..54c615e37 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -247,18 +247,15 @@ Pipfile /routers/base.py /routers/core.py /routers/crons/core_crons.py -/routers/subs/dashboard.py /db_changes.sql /Dockerfile.bundle /entrypoint.bundle.sh /chalicelib/core/heatmaps.py -/routers/subs/insights.py /schemas.py #exp /chalicelib/core/custom_metrics.py /chalicelib/core/performance_event.py /chalicelib/core/saved_search.py /app_alerts.py /build_alerts.sh -/routers/subs/metrics.py /routers/subs/v1_api.py #exp /chalicelib/core/dashboards.py diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 395bd21af..23c9aac3b 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -66,17 +66,15 @@ rm -rf ./routers/__init__.py rm -rf ./chalicelib/core/assist.py rm -rf ./auth/auth_apikey.py rm -rf ./build.sh +rm -rf ./routers/base.py rm -rf ./routers/core.py rm -rf ./routers/crons/core_crons.py -rm -rf ./routers/subs/dashboard.py rm -rf ./db_changes.sql rm -rf ./Dockerfile.bundle rm -rf ./entrypoint.bundle.sh rm -rf ./chalicelib/core/heatmaps.py -rm -rf ./routers/subs/insights.py rm -rf ./schemas.py rm -rf ./routers/subs/v1_api.py -rm -rf ./routers/subs/metrics.py #exp rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/saved_search.py diff --git a/ee/api/routers/subs/dashboard.py b/ee/api/routers/subs/dashboard.py new file mode 100644 index 000000000..206a234ca --- /dev/null +++ b/ee/api/routers/subs/dashboard.py @@ -0,0 +1,400 @@ +from fastapi import Body + +import schemas +from chalicelib.core import metadata +from chalicelib.core import metrics +from chalicelib.utils import helper +from or_dependencies import OR_scope +from routers.base import get_routers +from schemas_ee import Permissions + +public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) + + +@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"]) +def get_metadata_map(projectId: int): + metamap = [] + for m in metadata.get(project_id=projectId): + metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) + return {"data": metamap} + + +@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) +def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) +def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_errors(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) +def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_errors_trend(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) +def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_application_activity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) +def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_page_metrics(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) +def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_user_activity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) +def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_performance(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_slowest_images(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) +def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_missing_resources_trend(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) +def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_network(project_id=projectId, **data.dict())} + + +@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"]) +def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None, + key: str = ""): + if q is None or len(q) == 0: + return {"data": []} + q = '^' + q + + if widget in ['performance']: + data = metrics.search(q, type, project_id=projectId, + platform=platform, performance=True) + elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', + 'impacted_sessions_by_slow_pages', 'pages_response_time']: + data = metrics.search(q, type, project_id=projectId, + platform=platform, pages_only=True) + elif widget in ['resources_loading_time']: + data = metrics.search(q, type, project_id=projectId, + platform=platform, performance=False) + elif widget in ['time_between_events', 'events']: + data = metrics.search(q, type, project_id=projectId, + platform=platform, performance=False, events_only=True) + elif widget in ['metadata']: + data = metrics.search(q, None, project_id=projectId, + platform=platform, metadata=True, key=key) + else: + return {"errors": [f"unsupported widget: {widget}"]} + return {'data': data} + + +# 1 +@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_slowest_resources(project_id=projectId, **data.dict())} + + +# 2 +@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) +def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_resources_loading_time(project_id=projectId, **data.dict())} + + +# 3 +@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) +def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())} + + +# 4 +@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) +def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_busiest_time_of_day(project_id=projectId, **data.dict())} + + +# 5 +@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) +def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_sessions_location(project_id=projectId, **data.dict())} + + +# 6 +@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) +def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_speed_index_location(project_id=projectId, **data.dict())} + + +# 7 +@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) +def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())} + + +# 8 +@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) +def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_pages_response_time_distribution(project_id=projectId, **data.dict())} + + +# 9 +@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) +def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_top_metrics(project_id=projectId, **data.dict())} + + +# 10 +@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) +def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_time_to_render(project_id=projectId, **data.dict())} + + +# 11 +@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())} + + +# 12 +@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) +def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())} + + +# 12.1 +@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) +def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_avg_fps(project_id=projectId, **data.dict())} + + +# 12.2 +@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) +def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())} + + +# 13 +@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_crashes(project_id=projectId, **data.dict())} + + +# 14 +@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_domains_errors(project_id=projectId, **data.dict())} + + +# 14.1 +@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_domains_errors_4xx(project_id=projectId, **data.dict())} + + +# 14.2 +@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_domains_errors_5xx(project_id=projectId, **data.dict())} + + +# 15 +@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_slowest_domains(project_id=projectId, **data.dict())} + + +# 16 +@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_errors_per_domains(project_id=projectId, **data.dict())} + + +# 17 +@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) +def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_sessions_per_browser(project_id=projectId, **data.dict())} + + +# 18 +@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_calls_errors(project_id=projectId, **data.dict())} + + +# 18.1 +@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_calls_errors_4xx(project_id=projectId, **data.dict())} + + +# 18.2 +@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_calls_errors_5xx(project_id=projectId, **data.dict())} + + +# 19 +@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_errors_per_type(project_id=projectId, **data.dict())} + + +# 20 +@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) +def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_resources_by_party(project_id=projectId, **data.dict())} + + +# 21 +@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.resource_type_vs_response_end(project_id=projectId, **data.dict())} + + +# 22 +@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) +def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_resources_vs_visually_complete(project_id=projectId, **data.dict())} + + +# 23 +@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())} + + +# 24 +@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) +def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": metrics.get_resources_count_by_type(project_id=projectId, **data.dict())} + + +# # 25 +# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"]) +# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"]) +# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): +# return {"errors": ["please choose 2 events"]} + + +@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) +def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + results = [ + {"key": "count_sessions", + "data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}, + *helper.explode_widget(data={**metrics.get_application_activity(project_id=projectId, **data.dict()), + "chart": metrics.get_performance(project_id=projectId, **data.dict()) + .get("chart", [])}), + *helper.explode_widget(data=metrics.get_page_metrics(project_id=projectId, **data.dict())), + *helper.explode_widget(data=metrics.get_user_activity(project_id=projectId, **data.dict())), + {"key": "avg_pages_dom_buildtime", + "data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}, + {"key": "avg_pages_response_time", + "data": metrics.get_pages_response_time(project_id=projectId, **data.dict()) + }, + *helper.explode_widget(metrics.get_top_metrics(project_id=projectId, **data.dict())), + {"key": "avg_time_to_render", "data": metrics.get_time_to_render(project_id=projectId, **data.dict())}, + {"key": "avg_used_js_heap_size", "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}, + {"key": "avg_cpu", "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_fps, + "data": metrics.get_avg_fps(project_id=projectId, **data.dict())} + ] + results = sorted(results, key=lambda r: r["key"]) + return {"data": results} + + +@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"]) +def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + results = [ + {"key": schemas.TemplatePredefinedKeys.count_sessions, + "data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_image_load_time, + "data": metrics.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_page_load_time, + "data": metrics.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_request_load_time, + "data": metrics.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start, + "data": metrics.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel, + "data": metrics.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_visited_pages, + "data": metrics.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_session_duration, + "data": metrics.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime, + "data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_pages_response_time, + "data": metrics.get_pages_response_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_response_time, + "data": metrics.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_first_paint, + "data": metrics.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded, + "data": metrics.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_till_first_bit, + "data": metrics.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive, + "data": metrics.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.count_requests, + "data": metrics.get_top_metrics_count_requests(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_time_to_render, + "data": metrics.get_time_to_render(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size, + "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_cpu, + "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_fps, + "data": metrics.get_avg_fps(project_id=projectId, **data.dict())} + ] + results = sorted(results, key=lambda r: r["key"]) + return {"data": results} diff --git a/ee/api/routers/subs/insights.py b/ee/api/routers/subs/insights.py new file mode 100644 index 000000000..aa3ca2674 --- /dev/null +++ b/ee/api/routers/subs/insights.py @@ -0,0 +1,110 @@ +from fastapi import Body + +import schemas +from chalicelib.core import insights +from or_dependencies import OR_scope +from routers.base import get_routers +from schemas_ee import Permissions + +public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) + + +@app.post('/{projectId}/insights/journey', tags=["insights"]) +@app.get('/{projectId}/insights/journey', tags=["insights"]) +def get_insights_journey(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.journey(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) +@app.get('/{projectId}/insights/users_acquisition', tags=["insights"]) +def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_acquisition(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_retention', tags=["insights"]) +@app.get('/{projectId}/insights/users_retention', tags=["insights"]) +def get_users_retention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_retention(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_retention', tags=["insights"]) +@app.get('/{projectId}/insights/feature_retention', tags=["insights"]) +def get_feature_rentention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_retention(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_acquisition', tags=["insights"]) +@app.get('/{projectId}/insights/feature_acquisition', tags=["insights"]) +def get_feature_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_acquisition(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_popularity_frequency', tags=["insights"]) +@app.get('/{projectId}/insights/feature_popularity_frequency', tags=["insights"]) +def get_feature_popularity_frequency(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_popularity_frequency(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_intensity', tags=["insights"]) +@app.get('/{projectId}/insights/feature_intensity', tags=["insights"]) +def get_feature_intensity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_intensity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_adoption', tags=["insights"]) +@app.get('/{projectId}/insights/feature_adoption', tags=["insights"]) +def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_adoption(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_adoption_top_users', tags=["insights"]) +@app.get('/{projectId}/insights/feature_adoption_top_users', tags=["insights"]) +def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_adoption_top_users(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_active', tags=["insights"]) +@app.get('/{projectId}/insights/users_active', tags=["insights"]) +def get_users_active(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_active(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_power', tags=["insights"]) +@app.get('/{projectId}/insights/users_power', tags=["insights"]) +def get_users_power(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_power(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_slipping', tags=["insights"]) +@app.get('/{projectId}/insights/users_slipping', tags=["insights"]) +def get_users_slipping(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_slipping(project_id=projectId, **data.dict())} + +# +# +# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) +# def get_dashboard_autocomplete(projectId:int, widget): +# params = app.current_request.query_params +# if params is None or params.get('q') is None or len(params.get('q')) == 0: +# return {"data": []} +# params['q'] = '^' + params['q'] +# +# if widget in ['performance']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=True) +# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', +# 'impacted_sessions_by_slow_pages', 'pages_response_time']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), pages_only=True) +# elif widget in ['resources_loading_time']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=False) +# elif widget in ['time_between_events', 'events']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=False, events_only=True) +# elif widget in ['metadata']: +# data = dashboard.search(params.get('q', ''), None, project_id=projectId, +# platform=params.get('platform', None), metadata=True, key=params.get("key")) +# else: +# return {"errors": [f"unsupported widget: {widget}"]} +# return {'data': data} diff --git a/ee/api/routers/subs/metrics.py b/ee/api/routers/subs/metrics.py new file mode 100644 index 000000000..2d296251b --- /dev/null +++ b/ee/api/routers/subs/metrics.py @@ -0,0 +1,238 @@ +from fastapi import Body, Depends + +import schemas +from chalicelib.core import dashboards, custom_metrics, funnels +from or_dependencies import OR_context, OR_scope +from routers.base import get_routers +from schemas_ee import Permissions + +public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) + + +@app.post('/{projectId}/dashboards', tags=["dashboard"]) +@app.put('/{projectId}/dashboards', tags=["dashboard"]) +def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) + + +@app.get('/{projectId}/dashboards', tags=["dashboard"]) +def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) + if data is None: + return {"errors": ["dashboard not found"]} + return {"data": data} + + +@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id, + dashboard_id=dashboardId, data=data)} + + +@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def delete_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) + + +@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"]) +def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +def add_widget_to_dashboard(projectId: int, dashboardId: int, + data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + data=data)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, + data: schemas.CreateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, + dashboard_id=dashboardId, data=data)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, + data: schemas.UpdateWidgetPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId, data=data) + + +@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId) + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"]) +def get_widget_chart(projectId: int, dashboardId: int, widgetId: int, + data: schemas.CustomMetricChartPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId, data=data) + if data is None: + return {"errors": ["widget not found"]} + return {"data": data} + + +@app.get('/{projectId}/metrics/templates', tags=["dashboard"]) +def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.get_templates(project_id=projectId, user_id=context.user_id)} + + +@app.post('/{projectId}/metrics/try', tags=["dashboard"]) +@app.put('/{projectId}/metrics/try', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} + + +@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) +def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) + return {"data": data} + + +@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) +def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if len(data.series) == 0: + return {"data": []} + data.series[0].filter.startDate = data.startTimestamp + data.series[0].filter.endDate = data.endTimestamp + data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) + return {"data": data} + + +@app.post('/{projectId}/metrics', tags=["dashboard"]) +@app.put('/{projectId}/metrics', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) +def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) + + +@app.get('/{projectId}/metrics', tags=["dashboard"]) +@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) +def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def get_custom_metric(projectId: int, metric_id: str, context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) +def get_custom_metric_sessions(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) +def get_custom_metric_funnel_issues(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) +def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id, + metric_id=metric_id, issue_id=issueId, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) +def get_custom_metric_errors_list(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) +def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +def update_custom_metric_state(projectId: int, metric_id: int, + data: schemas.UpdateCustomMetricsStatusSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + status=data.active)} + + +@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} From 3f6a90cd432b7e94fae8a89fe77d112cc7812301 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 22 Sep 2022 17:32:47 +0100 Subject: [PATCH 7/7] feat(chalice): devtools permission feat(chalice): devtools as a mob file feat(chalice): unprocessed devtools endpoint --- api/chalicelib/core/assist.py | 38 +- api/chalicelib/core/jobs.py | 2 +- api/chalicelib/core/sessions.py | 15 +- api/chalicelib/core/sessions_devtool.py | 24 + api/chalicelib/core/sessions_favorite.py | 12 +- api/chalicelib/core/sessions_mobs.py | 47 +- api/chalicelib/core/sessions_viewed.py | 4 +- api/chalicelib/core/socket_ios.py | 2 +- api/env.default | 8 +- api/routers/core_dynamic.py | 27 +- ee/api/.gitignore | 1 - ee/api/chalicelib/core/permissions.py | 10 + ee/api/chalicelib/core/sessions.py | 1254 +++++++++++++++++++ ee/api/chalicelib/core/sessions_devtool.py | 31 + ee/api/chalicelib/core/sessions_favorite.py | 13 +- ee/api/clean.sh | 1 - ee/api/env.default | 8 +- ee/api/or_dependencies.py | 5 +- ee/api/routers/core_dynamic.py | 37 +- 19 files changed, 1472 insertions(+), 67 deletions(-) create mode 100644 api/chalicelib/core/sessions_devtool.py create mode 100644 ee/api/chalicelib/core/permissions.py create mode 100644 ee/api/chalicelib/core/sessions.py create mode 100644 ee/api/chalicelib/core/sessions_devtool.py diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 8c4053a9b..8b908b004 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,10 +1,12 @@ +from os import access, R_OK +from os.path import exists + import requests from decouple import config -from os.path import exists +from starlette.exceptions import HTTPException + import schemas from chalicelib.core import projects -from starlette.exceptions import HTTPException -from os import access, R_OK ASSIST_KEY = config("ASSIST_KEY") ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY @@ -165,16 +167,24 @@ def get_ice_servers(): and len(config("iceServers")) > 0 else None -def get_raw_mob_by_id(project_id, session_id): +def __get_efs_path(): efs_path = config("FS_DIR") if not exists(efs_path): raise HTTPException(400, f"EFS not found in path: {efs_path}") if not access(efs_path, R_OK): raise HTTPException(400, f"EFS found under: {efs_path}; but it is not readable, please check permissions") + return efs_path - path_to_file = efs_path + "/" + str(session_id) +def __get_mob_path(project_id, session_id): + params = {"projectId": project_id, "sessionId": session_id} + return config("EFS_SESSION_MOB_PATTERN", default="%(sessionId)s") % params + + +def get_raw_mob_by_id(project_id, session_id): + efs_path = __get_efs_path() + path_to_file = efs_path + "/" + __get_mob_path(project_id=project_id, session_id=session_id) if exists(path_to_file): if not access(path_to_file, R_OK): raise HTTPException(400, f"Replay file found under: {efs_path};" @@ -183,3 +193,21 @@ def get_raw_mob_by_id(project_id, session_id): return path_to_file return None + + +def __get_devtools_path(project_id, session_id): + params = {"projectId": project_id, "sessionId": session_id} + return config("EFS_DEVTOOLS_MOB_PATTERN", default="%(sessionId)s") % params + + +def get_raw_devtools_by_id(project_id, session_id): + efs_path = __get_efs_path() + path_to_file = efs_path + "/" + __get_devtools_path(project_id=project_id, session_id=session_id) + if exists(path_to_file): + if not access(path_to_file, R_OK): + raise HTTPException(400, f"Devtools file found under: {efs_path};" + f" but it is not readable, please check permissions") + + return path_to_file + + return None diff --git a/api/chalicelib/core/jobs.py b/api/chalicelib/core/jobs.py index 4b7ba85ee..2d244e366 100644 --- a/api/chalicelib/core/jobs.py +++ b/api/chalicelib/core/jobs.py @@ -144,7 +144,7 @@ def execute_jobs(): ) sessions.delete_sessions_by_session_ids(session_ids) - sessions_mobs.delete_mobs(session_ids) + sessions_mobs.delete_mobs(session_ids=session_ids, project_id=job["projectId"]) else: raise Exception(f"The action {job['action']} not supported.") diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 4b6835a1d..4a27d0b13 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -2,7 +2,8 @@ from typing import List import schemas from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite + sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ + sessions_devtool from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, @@ -81,7 +82,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) else: data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, group_clickrage=True) @@ -89,14 +90,14 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] # to keep only the first stack data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if - e['source'] == "js_exception"][ - :500] # limit the number of errors to reduce the response-body size + # limit the number of errors to reduce the response-body size + e['source'] == "js_exception"][:500] data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], - duration=data["duration"]) + start_ts=data["startTs"], duration=data["duration"]) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) diff --git a/api/chalicelib/core/sessions_devtool.py b/api/chalicelib/core/sessions_devtool.py new file mode 100644 index 000000000..eef7b8e6b --- /dev/null +++ b/api/chalicelib/core/sessions_devtool.py @@ -0,0 +1,24 @@ +from decouple import config + +from chalicelib.utils.s3 import client + + +def __get_devtools_keys(project_id, session_id): + params = { + "sessionId": session_id, + "projectId": project_id + } + return [ + config("DEVTOOLS_MOB_PATTERN", default="%(sessionId)sdevtools") % params + ] + + +def get_urls(session_id, project_id): + results = [] + for k in __get_devtools_keys(project_id=project_id, session_id=session_id): + results.append(client.generate_presigned_url( + 'get_object', + Params={'Bucket': config("sessions_bucket"), 'Key': k}, + ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) + )) + return results diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 98d7f18ce..691e5ec3e 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -7,8 +7,8 @@ def add_favorite_session(project_id, user_id, session_id): cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s);""", - {"userId": user_id, "sessionId": session_id}) + VALUES (%(userId)s,%(session_id)s);""", + {"userId": user_id, "session_id": session_id}) ) return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, include_fav_viewed=True) @@ -20,8 +20,8 @@ def remove_favorite_session(project_id, user_id, session_id): cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s;""", - {"userId": user_id, "sessionId": session_id}) + AND session_id = %(session_id)s;""", + {"userId": user_id, "session_id": session_id}) ) return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, include_fav_viewed=True) @@ -42,8 +42,8 @@ def favorite_session_exists(user_id, session_id): FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s""", - {"userId": user_id, "sessionId": session_id}) + AND session_id = %(session_id)s""", + {"userId": user_id, "session_id": session_id}) ) r = cur.fetchone() return r is not None diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index ccbda20bb..1107ee6d4 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -4,37 +4,40 @@ from chalicelib.utils import s3 from chalicelib.utils.s3 import client -def get_web(sessionId): +def __get_mob_keys(project_id, session_id): + params = { + "sessionId": session_id, + "projectId": project_id + } return [ - client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) - }, - ExpiresIn=100000 - ), - client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) + "e" - }, - ExpiresIn=100000 - )] + config("SESSION_MOB_PATTERN_S", default="%(sessionId)s") % params, + config("SESSION_MOB_PATTERN_E", default="%(sessionId)se") % params + ] -def get_ios(sessionId): +def get_urls(project_id, session_id): + results = [] + for k in __get_mob_keys(project_id=project_id, session_id=session_id): + results.append(client.generate_presigned_url( + 'get_object', + Params={'Bucket': config("sessions_bucket"), 'Key': k}, + ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) + )) + return results + + +def get_ios(session_id): return client.generate_presigned_url( 'get_object', Params={ 'Bucket': config("ios_bucket"), - 'Key': str(sessionId) + 'Key': str(session_id) }, - ExpiresIn=100000 + ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) ) -def delete_mobs(session_ids): +def delete_mobs(project_id, session_ids): for session_id in session_ids: - s3.schedule_for_deletion(config("sessions_bucket"), session_id) + for k in __get_mob_keys(project_id=project_id, session_id=session_id): + s3.schedule_for_deletion(config("sessions_bucket"), k) diff --git a/api/chalicelib/core/sessions_viewed.py b/api/chalicelib/core/sessions_viewed.py index c9b2c9b46..d84483bf2 100644 --- a/api/chalicelib/core/sessions_viewed.py +++ b/api/chalicelib/core/sessions_viewed.py @@ -5,7 +5,7 @@ def view_session(project_id, user_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""INSERT INTO public.user_viewed_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s) + VALUES (%(userId)s,%(session_id)s) ON CONFLICT DO NOTHING;""", - {"userId": user_id, "sessionId": session_id}) + {"userId": user_id, "session_id": session_id}) ) diff --git a/api/chalicelib/core/socket_ios.py b/api/chalicelib/core/socket_ios.py index 50e4d025c..15e16ec0b 100644 --- a/api/chalicelib/core/socket_ios.py +++ b/api/chalicelib/core/socket_ios.py @@ -7,7 +7,7 @@ def start_replay(project_id, session_id, device, os_version, mob_url): r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={ "projectId": project_id, "projectKey": projects.get_project_key(project_id), - "sessionId": session_id, + "session_id": session_id, "device": device, "osVersion": os_version, "mobUrl": mob_url diff --git a/api/env.default b/api/env.default index c4388c7d5..d09e9b57c 100644 --- a/api/env.default +++ b/api/env.default @@ -48,4 +48,10 @@ sourcemaps_bucket=sourcemaps sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps stage=default-foss version_number=1.4.0 -FS_DIR=/mnt/efs \ No newline at end of file +FS_DIR=/mnt/efs +EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob +EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs +SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +PRESIGNED_URL_EXPIRATION=3600 \ No newline at end of file diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 0b3952dd2..a4e051249 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -1,12 +1,13 @@ -from typing import Optional +from typing import Optional, Union from decouple import config from fastapi import Body, Depends, BackgroundTasks -from starlette.responses import RedirectResponse +from starlette.responses import RedirectResponse, FileResponse import schemas -from chalicelib.core import integrations_manager -from chalicelib.core import sessions +from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ + sessions_favorite, assist +from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook from chalicelib.core.collaboration_slack import Slack @@ -292,6 +293,24 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") +@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"]) +@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"]) +def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + if isinstance(sessionId, str): + print(f"{sessionId} not a valid number.") + else: + print(f"{projectId}/{sessionId} not found in DB.") + + return {"errors": ["Devtools file not found"]} + path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId) + if path is None: + return {"errors": ["Devtools file not found"]} + + return FileResponse(path=path, media_type="application/octet-stream") + + @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 54c615e37..811b00301 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -210,7 +210,6 @@ Pipfile /chalicelib/core/log_tool_sumologic.py /chalicelib/core/metadata.py /chalicelib/core/mobile.py -/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py diff --git a/ee/api/chalicelib/core/permissions.py b/ee/api/chalicelib/core/permissions.py new file mode 100644 index 000000000..bcc30b891 --- /dev/null +++ b/ee/api/chalicelib/core/permissions.py @@ -0,0 +1,10 @@ +from fastapi.security import SecurityScopes + +import schemas_ee + + +def check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext): + for scope in security_scopes.scopes: + if scope not in context.permissions: + return False + return True diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py new file mode 100644 index 000000000..92c6e8f74 --- /dev/null +++ b/ee/api/chalicelib/core/sessions.py @@ -0,0 +1,1254 @@ +from typing import List + +import schemas +import schemas_ee +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ + sessions_devtool +from chalicelib.utils import pg_client, helper, metrics_helper + +SESSION_PROJECTION_COLS = """s.project_id, +s.session_id::text AS session_id, +s.user_uuid, +s.user_id, +s.user_os, +s.user_browser, +s.user_device, +s.user_device_type, +s.user_country, +s.start_ts, +s.duration, +s.events_count, +s.pages_count, +s.errors_count, +s.user_anonymous_id, +s.platform, +s.issue_score, +to_jsonb(s.issue_types) AS issue_types, +favorite_sessions.session_id NOTNULL AS favorite, +COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentContext, full_data=False, + include_fav_viewed=False, group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if + # limit the number of errors to reduce the response-body size + e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, + session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def __get_sql_operator(op: schemas.SearchEventOperator): + return { + schemas.SearchEventOperator._is: "=", + schemas.SearchEventOperator._is_any: "IN", + schemas.SearchEventOperator._on: "=", + schemas.SearchEventOperator._on_any: "IN", + schemas.SearchEventOperator._is_not: "!=", + schemas.SearchEventOperator._not_on: "!=", + schemas.SearchEventOperator._contains: "ILIKE", + schemas.SearchEventOperator._not_contains: "NOT ILIKE", + schemas.SearchEventOperator._starts_with: "ILIKE", + schemas.SearchEventOperator._ends_with: "ILIKE", + }.get(op, "=") + + +def __is_negation_operator(op: schemas.SearchEventOperator): + return op in [schemas.SearchEventOperator._is_not, + schemas.SearchEventOperator._not_on, + schemas.SearchEventOperator._not_contains] + + +def __reverse_sql_operator(op): + return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE" + + +def __get_sql_operator_multiple(op: schemas.SearchEventOperator): + return " IN " if op not in [schemas.SearchEventOperator._is_not, schemas.SearchEventOperator._not_on, + schemas.SearchEventOperator._not_contains] else " NOT IN " + + +def __get_sql_value_multiple(values): + if isinstance(values, tuple): + return values + return tuple(values) if isinstance(values, list) else (values,) + + +def _multiple_conditions(condition, values, value_key="value", is_not=False): + query = [] + for i in range(len(values)): + k = f"{value_key}_{i}" + query.append(condition.replace(value_key, k)) + return "(" + (" AND " if is_not else " OR ").join(query) + ")" + + +def _multiple_values(values, value_key="value"): + query_values = {} + if values is not None and isinstance(values, list): + for i in range(len(values)): + k = f"{value_key}_{i}" + query_values[k] = values[i] + return query_values + + +def _isAny_opreator(op: schemas.SearchEventOperator): + return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any] + + +def _isUndefined_operator(op: schemas.SearchEventOperator): + return op in [schemas.SearchEventOperator._is_undefined] + + +# This function executes the query and return result +def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, + error_status=schemas.ErrorStatus.all, count_only=False, issue=None): + if data.bookmarked: + data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) + + full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, + favorite_only=data.bookmarked, issue=issue, project_id=project_id, + user_id=user_id) + if data.limit is not None and data.page is not None: + full_args["sessions_limit_s"] = (data.page - 1) * data.limit + full_args["sessions_limit_e"] = data.page * data.limit + else: + full_args["sessions_limit_s"] = 1 + full_args["sessions_limit_e"] = 200 + + meta_keys = [] + with pg_client.PostgresClient() as cur: + if errors_only: + main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE er.error_id = ve.error_id + AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed + {query_part};""", full_args) + + elif count_only: + main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions, + COUNT(DISTINCT s.user_uuid) AS count_users + {query_part};""", full_args) + elif data.group_by_user: + g_sort = "count(full_sessions)" + if data.order is None: + data.order = schemas.SortOrderType.desc + else: + data.order = data.order.upper() + if data.sort is not None and data.sort != 'sessionsCount': + sort = helper.key_to_snake_case(data.sort) + g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" + else: + sort = 'start_ts' + + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, + COALESCE(JSONB_AGG(users_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT user_id, + count(full_sessions) AS user_sessions_count, + jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, + MIN(full_sessions.start_ts) AS first_session_ts, + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ) AS filtred_sessions + ) AS full_sessions + GROUP BY user_id + ) AS users_sessions;""", + full_args) + else: + if data.order is None: + data.order = schemas.SortOrderType.desc + sort = 'session_id' + if data.sort is not None and data.sort != "session_id": + # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) + sort = helper.key_to_snake_case(data.sort) + + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, + COALESCE(JSONB_AGG(full_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + full_args) + # print("--------------------") + # print(main_query) + # print("--------------------") + try: + cur.execute(main_query) + except Exception as err: + print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") + print(main_query.decode('UTF-8')) + print("--------- PAYLOAD -----------") + print(data.json()) + print("--------------------") + raise err + if errors_only: + return helper.list_to_camel_case(cur.fetchall()) + + sessions = cur.fetchone() + if count_only: + return helper.dict_to_camel_case(sessions) + + total = sessions["count"] + sessions = sessions["sessions"] + + if data.group_by_user: + for i, s in enumerate(sessions): + sessions[i] = {**s.pop("last_session")[0], **s} + sessions[i].pop("rn") + sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ + if sessions[i][f'metadata_{k["index"]}'] is not None} + else: + for i, s in enumerate(sessions): + sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ + if sessions[i][f'metadata_{k["index"]}'] is not None} + # if not data.group_by_user and data.sort is not None and data.sort != "session_id": + # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], + # reverse=data.order.upper() == "DESC") + return { + 'total': total, + 'sessions': helper.list_to_camel_case(sessions) + } + + +def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, + view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, + metric_of: schemas.TableMetricOfType, metric_value: List): + step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, + density=density, factor=1, decimal=True)) + extra_event = None + if metric_of == schemas.TableMetricOfType.visited_url: + extra_event = "events.pages" + elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0: + data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, + operator=schemas.SearchEventOperator._is)) + full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, + favorite_only=False, issue=None, project_id=project_id, + user_id=None, extra_event=extra_event) + full_args["step_size"] = step_size + sessions = [] + with pg_client.PostgresClient() as cur: + if metric_type == schemas.MetricType.timeseries: + if view_type == schemas.MetricTimeseriesViewType.line_chart: + main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts + {query_part}) + SELECT generated_timestamp AS timestamp, + COUNT(s) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT 1 AS s + FROM full_sessions + WHERE start_ts >= generated_timestamp + AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""", full_args) + else: + main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count + {query_part};""", full_args) + + # print("--------------------") + # print(main_query) + # print("--------------------") + cur.execute(main_query) + if view_type == schemas.MetricTimeseriesViewType.line_chart: + sessions = cur.fetchall() + else: + sessions = cur.fetchone()["count"] + elif metric_type == schemas.MetricType.table: + if isinstance(metric_of, schemas.TableMetricOfType): + main_col = "user_id" + extra_col = "" + extra_where = "" + pre_query = "" + if metric_of == schemas.TableMetricOfType.user_country: + main_col = "user_country" + elif metric_of == schemas.TableMetricOfType.user_device: + main_col = "user_device" + elif metric_of == schemas.TableMetricOfType.user_browser: + main_col = "user_browser" + elif metric_of == schemas.TableMetricOfType.issues: + main_col = "issue" + extra_col = f", UNNEST(s.issue_types) AS {main_col}" + if len(metric_value) > 0: + extra_where = [] + for i in range(len(metric_value)): + arg_name = f"selected_issue_{i}" + extra_where.append(f"{main_col} = %({arg_name})s") + full_args[arg_name] = metric_value[i] + extra_where = f"WHERE ({' OR '.join(extra_where)})" + elif metric_of == schemas.TableMetricOfType.visited_url: + main_col = "path" + extra_col = ", path" + main_query = cur.mogrify(f"""{pre_query} + SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values + FROM (SELECT {main_col} AS name, + count(full_sessions) AS session_count, + ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn + FROM (SELECT * + FROM (SELECT DISTINCT ON(s.session_id) s.session_id, s.user_uuid, + s.user_id, s.user_os, + s.user_browser, s.user_device, + s.user_device_type, s.user_country, s.issue_types{extra_col} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ) AS full_sessions + {extra_where} + GROUP BY {main_col} + ORDER BY session_count DESC) AS users_sessions;""", + full_args) + # print("--------------------") + # print(main_query) + # print("--------------------") + cur.execute(main_query) + sessions = cur.fetchone() + for s in sessions["values"]: + s.pop("rn") + sessions["values"] = helper.list_to_camel_case(sessions["values"]) + + return sessions + + +def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): + return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, + schemas.EventType.graphql] \ + or event.type in [schemas.PerformanceEventType.location_dom_complete, + schemas.PerformanceEventType.location_largest_contentful_paint_time, + schemas.PerformanceEventType.location_ttfb, + schemas.PerformanceEventType.location_avg_cpu_load, + schemas.PerformanceEventType.location_avg_memory_usage + ] and (event.source is None or len(event.source) == 0) \ + or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( + event.filters is None or len(event.filters) == 0)) + + +# this function generates the query and return the generated-query with the dict of query arguments +def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None): + ss_constraints = [] + full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, + "projectId": project_id, "userId": user_id} + extra_constraints = [ + "s.project_id = %(project_id)s", + "s.duration IS NOT NULL" + ] + extra_from = "" + events_query_part = "" + if len(data.filters) > 0: + meta_keys = None + for i, f in enumerate(data.filters): + if not isinstance(f.value, list): + f.value = [f.value] + filter_type = f.type + f.value = helper.values_for_operator(value=f.value, op=f.operator) + f_k = f"f_value{i}" + full_args = {**full_args, **_multiple_values(f.value, value_key=f_k)} + op = __get_sql_operator(f.operator) \ + if filter_type not in [schemas.FilterType.events_count] else f.operator + is_any = _isAny_opreator(f.operator) + is_undefined = _isUndefined_operator(f.operator) + if not is_any and not is_undefined and len(f.value) == 0: + continue + is_not = False + if __is_negation_operator(f.operator): + is_not = True + if filter_type == schemas.FilterType.user_browser: + if is_any: + extra_constraints.append('s.user_browser IS NOT NULL') + ss_constraints.append('ms.user_browser IS NOT NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]: + if is_any: + extra_constraints.append('s.user_os IS NOT NULL') + ss_constraints.append('ms.user_os IS NOT NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]: + if is_any: + extra_constraints.append('s.user_device IS NOT NULL') + ss_constraints.append('ms.user_device IS NOT NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]: + if is_any: + extra_constraints.append('s.user_country IS NOT NULL') + ss_constraints.append('ms.user_country IS NOT NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif filter_type in [schemas.FilterType.utm_source]: + if is_any: + extra_constraints.append('s.utm_source IS NOT NULL') + ss_constraints.append('ms.utm_source IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.utm_source IS NULL') + ss_constraints.append('ms.utm_source IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + elif filter_type in [schemas.FilterType.utm_medium]: + if is_any: + extra_constraints.append('s.utm_medium IS NOT NULL') + ss_constraints.append('ms.utm_medium IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.utm_medium IS NULL') + ss_constraints.append('ms.utm_medium IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + elif filter_type in [schemas.FilterType.utm_campaign]: + if is_any: + extra_constraints.append('s.utm_campaign IS NOT NULL') + ss_constraints.append('ms.utm_campaign IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.utm_campaign IS NULL') + ss_constraints.append('ms.utm_campaign IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif filter_type == schemas.FilterType.duration: + if len(f.value) > 0 and f.value[0] is not None: + extra_constraints.append("s.duration >= %(minDuration)s") + ss_constraints.append("ms.duration >= %(minDuration)s") + full_args["minDuration"] = f.value[0] + if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: + extra_constraints.append("s.duration <= %(maxDuration)s") + ss_constraints.append("ms.duration <= %(maxDuration)s") + full_args["maxDuration"] = f.value[1] + elif filter_type == schemas.FilterType.referrer: + # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + if is_any: + extra_constraints.append('s.base_referrer IS NOT NULL') + else: + extra_constraints.append( + _multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) + elif filter_type == events.event_type.METADATA.ui_type: + # get metadata list only if you need it + if meta_keys is None: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if f.source in meta_keys.keys(): + if is_any: + extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") + ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") + elif is_undefined: + extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") + ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") + else: + extra_constraints.append( + _multiple_conditions( + f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", + f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions( + f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", + f.value, is_not=is_not, value_key=f_k)) + elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: + if is_any: + extra_constraints.append('s.user_id IS NOT NULL') + ss_constraints.append('ms.user_id IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.user_id IS NULL') + ss_constraints.append('ms.user_id IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + elif filter_type in [schemas.FilterType.user_anonymous_id, + schemas.FilterType.user_anonymous_id_ios]: + if is_any: + extra_constraints.append('s.user_anonymous_id IS NOT NULL') + ss_constraints.append('ms.user_anonymous_id IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.user_anonymous_id IS NULL') + ss_constraints.append('ms.user_anonymous_id IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: + if is_any: + extra_constraints.append('s.rev_id IS NOT NULL') + ss_constraints.append('ms.rev_id IS NOT NULL') + elif is_undefined: + extra_constraints.append('s.rev_id IS NULL') + ss_constraints.append('ms.rev_id IS NULL') + else: + extra_constraints.append( + _multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + elif filter_type == schemas.FilterType.platform: + # op = __get_sql_operator(f.operator) + extra_constraints.append( + _multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + elif filter_type == schemas.FilterType.issue: + if is_any: + extra_constraints.append("array_length(s.issue_types, 1) > 0") + ss_constraints.append("array_length(ms.issue_types, 1) > 0") + else: + extra_constraints.append( + _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, + value_key=f_k)) + elif filter_type == schemas.FilterType.events_count: + extra_constraints.append( + _multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + ss_constraints.append( + _multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + # --------------------------------------------------------------------------- + if len(data.events) > 0: + valid_events_count = 0 + for event in data.events: + is_any = _isAny_opreator(event.operator) + if not isinstance(event.value, list): + event.value = [event.value] + if __is_valid_event(is_any=is_any, event=event): + valid_events_count += 1 + events_query_from = [] + event_index = 0 + or_events = data.events_order == schemas.SearchEventOrder._or + # events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL " + events_joiner = " UNION " if or_events else " INNER JOIN LATERAL " + for i, event in enumerate(data.events): + event_type = event.type + is_any = _isAny_opreator(event.operator) + if not isinstance(event.value, list): + event.value = [event.value] + if not __is_valid_event(is_any=is_any, event=event): + continue + op = __get_sql_operator(event.operator) + is_not = False + if __is_negation_operator(event.operator): + is_not = True + op = __reverse_sql_operator(op) + if event_index == 0 or or_events: + event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)" + event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", + "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", + "ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"] + if favorite_only and not errors_only: + event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)" + event_where.append("fs.user_id = %(userId)s") + else: + event_from = "%s" + event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", + "main.session_id=event_0.session_id"] + if data.events_order == schemas.SearchEventOrder._then: + event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") + e_k = f"e_value{i}" + s_k = e_k + "_source" + if event.type != schemas.PerformanceEventType.time_between_events: + event.value = helper.values_for_operator(value=event.value, op=event.operator) + full_args = {**full_args, + **_multiple_values(event.value, value_key=e_k), + **_multiple_values(event.source, value_key=s_k)} + + if event_type == events.event_type.CLICK.ui_type: + event_from = event_from % f"{events.event_type.CLICK.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value, + value_key=e_k)) + + elif event_type == events.event_type.INPUT.ui_type: + event_from = event_from % f"{events.event_type.INPUT.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value, + value_key=e_k)) + if event.source is not None and len(event.source) > 0: + event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source, + value_key=f"custom{i}")) + full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")} + + elif event_type == events.event_type.LOCATION.ui_type: + event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + event.value, value_key=e_k)) + elif event_type == events.event_type.CUSTOM.ui_type: + event_from = event_from % f"{events.event_type.CUSTOM.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value, + value_key=e_k)) + elif event_type == events.event_type.REQUEST.ui_type: + event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value, + value_key=e_k)) + # elif event_type == events.event_type.GRAPHQL.ui_type: + # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " + # if not is_any: + # event_where.append( + # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, + # value_key=e_k)) + elif event_type == events.event_type.STATEACTION.ui_type: + event_from = event_from % f"{events.event_type.STATEACTION.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s", + event.value, value_key=e_k)) + elif event_type == events.event_type.ERROR.ui_type: + event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" + event.source = list(set(event.source)) + if not is_any and event.value not in [None, "*", ""]: + event_where.append( + _multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)", + event.value, value_key=e_k)) + if len(event.source) > 0 and event.source[0] not in [None, "*", ""]: + event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k)) + + + # ----- IOS + elif event_type == events.event_type.CLICK_IOS.ui_type: + event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s", + event.value, value_key=e_k)) + + elif event_type == events.event_type.INPUT_IOS.ui_type: + event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s", + event.value, value_key=e_k)) + if event.source is not None and len(event.source) > 0: + event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source, + value_key="custom{i}")) + full_args = {**full_args, **_multiple_values(event.source, f"custom{i}")} + elif event_type == events.event_type.VIEW_IOS.ui_type: + event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s", + event.value, value_key=e_k)) + elif event_type == events.event_type.CUSTOM_IOS.ui_type: + event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s", + event.value, value_key=e_k)) + elif event_type == events.event_type.REQUEST_IOS.ui_type: + event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s", + event.value, value_key=e_k)) + elif event_type == events.event_type.ERROR_IOS.ui_type: + event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" + if not is_any and event.value not in [None, "*", ""]: + event_where.append( + _multiple_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", + event.value, value_key=e_k)) + elif event_type == schemas.PerformanceEventType.fetch_failed: + event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", + event.value, value_key=e_k)) + col = performance_event.get_col(event_type) + colname = col["column"] + event_where.append(f"main.{colname} = FALSE") + # elif event_type == schemas.PerformanceEventType.fetch_duration: + # event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + # if not is_any: + # event_where.append( + # _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", + # event.value, value_key=e_k)) + # col = performance_event.get_col(event_type) + # colname = col["column"] + # tname = "main" + # e_k += "_custom" + # full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)} + # event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + + # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + # event.source, value_key=e_k)) + elif event_type in [schemas.PerformanceEventType.location_dom_complete, + schemas.PerformanceEventType.location_largest_contentful_paint_time, + schemas.PerformanceEventType.location_ttfb, + schemas.PerformanceEventType.location_avg_cpu_load, + schemas.PerformanceEventType.location_avg_memory_usage + ]: + event_from = event_from % f"{events.event_type.LOCATION.table} AS main " + col = performance_event.get_col(event_type) + colname = col["column"] + tname = "main" + if col.get("extraJoin") is not None: + tname = "ej" + event_from += f" INNER JOIN {col['extraJoin']} AS {tname} USING(session_id)" + event_where += [f"{tname}.timestamp >= main.timestamp", f"{tname}.timestamp >= %(startDate)s", + f"{tname}.timestamp <= %(endDate)s"] + if not is_any: + event_where.append( + _multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s", + event.value, value_key=e_k)) + e_k += "_custom" + full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)} + + event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + + _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + event.source, value_key=e_k)) + elif event_type == schemas.PerformanceEventType.time_between_events: + event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) " + if not isinstance(event.value[0].value, list): + event.value[0].value = [event.value[0].value] + if not isinstance(event.value[1].value, list): + event.value[1].value = [event.value[1].value] + event.value[0].value = helper.values_for_operator(value=event.value[0].value, + op=event.value[0].operator) + event.value[1].value = helper.values_for_operator(value=event.value[1].value, + op=event.value[0].operator) + e_k1 = e_k + "_e1" + e_k2 = e_k + "_e2" + full_args = {**full_args, + **_multiple_values(event.value[0].value, value_key=e_k1), + **_multiple_values(event.value[1].value, value_key=e_k2)} + s_op = __get_sql_operator(event.value[0].operator) + event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"] + if event_index > 0 and not or_events: + event_where.append("main2.session_id=event_0.session_id") + is_any = _isAny_opreator(event.value[0].operator) + if not is_any: + event_where.append( + _multiple_conditions( + f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s", + event.value[0].value, value_key=e_k1)) + s_op = __get_sql_operator(event.value[1].operator) + is_any = _isAny_opreator(event.value[1].operator) + if not is_any: + event_where.append( + _multiple_conditions( + f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s", + event.value[1].value, value_key=e_k2)) + + e_k += "_custom" + full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)} + event_where.append( + _multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s", + event.source, value_key=e_k)) + + elif event_type == schemas.EventType.request_details: + event_from = event_from % f"{events.event_type.REQUEST.table} AS main " + apply = False + for j, f in enumerate(event.filters): + is_any = _isAny_opreator(f.operator) + if is_any or len(f.value) == 0: + continue + f.value = helper.values_for_operator(value=f.value, op=f.operator) + op = __get_sql_operator(f.operator) + e_k_f = e_k + f"_fetch{j}" + full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)} + if f.type == schemas.FetchFilterType._url: + event_where.append( + _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text", + f.value, value_key=e_k_f)) + apply = True + elif f.type == schemas.FetchFilterType._status_code: + event_where.append( + _multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value, + value_key=e_k_f)) + apply = True + elif f.type == schemas.FetchFilterType._method: + event_where.append( + _multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) + apply = True + elif f.type == schemas.FetchFilterType._duration: + event_where.append( + _multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value, + value_key=e_k_f)) + apply = True + elif f.type == schemas.FetchFilterType._request_body: + event_where.append( + _multiple_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) + apply = True + elif f.type == schemas.FetchFilterType._response_body: + event_where.append( + _multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, + value_key=e_k_f)) + apply = True + else: + print(f"undefined FETCH filter: {f.type}") + if not apply: + continue + elif event_type == schemas.EventType.graphql: + event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " + for j, f in enumerate(event.filters): + is_any = _isAny_opreator(f.operator) + if is_any or len(f.value) == 0: + continue + f.value = helper.values_for_operator(value=f.value, op=f.operator) + op = __get_sql_operator(f.operator) + e_k_f = e_k + f"_graphql{j}" + full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)} + if f.type == schemas.GraphqlFilterType._name: + event_where.append( + _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value, + value_key=e_k_f)) + elif f.type == schemas.GraphqlFilterType._method: + event_where.append( + _multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) + elif f.type == schemas.GraphqlFilterType._request_body: + event_where.append( + _multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) + elif f.type == schemas.GraphqlFilterType._response_body: + event_where.append( + _multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) + else: + print(f"undefined GRAPHQL filter: {f.type}") + else: + continue + if event_index == 0 or or_events: + event_where += ss_constraints + if is_not: + if event_index == 0 or or_events: + events_query_from.append(f"""\ + (SELECT + session_id, + 0 AS timestamp + FROM sessions + WHERE EXISTS(SELECT session_id + FROM {event_from} + WHERE {" AND ".join(event_where)} + AND sessions.session_id=ms.session_id) IS FALSE + AND project_id = %(projectId)s + AND start_ts >= %(startDate)s + AND start_ts <= %(endDate)s + AND duration IS NOT NULL + ) {"" if or_events else (f"AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\ + """) + else: + events_query_from.append(f"""\ + (SELECT + event_0.session_id, + event_{event_index - 1}.timestamp AS timestamp + WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE + ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ + """) + else: + events_query_from.append(f"""\ + (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp + FROM {event_from} + WHERE {" AND ".join(event_where)} + GROUP BY 1 + ) {"" if or_events else (f"AS event_{event_index} " + ("ON(TRUE)" if event_index > 0 else ""))}\ + """) + event_index += 1 + if event_index > 0: + if or_events: + events_query_part = f"""SELECT + session_id, + MIN(timestamp) AS first_event_ts, + MAX(timestamp) AS last_event_ts + FROM ({events_joiner.join(events_query_from)}) AS u + GROUP BY 1""" + else: + events_query_part = f"""SELECT + event_0.session_id, + MIN(event_0.timestamp) AS first_event_ts, + MAX(event_{event_index - 1}.timestamp) AS last_event_ts + FROM {events_joiner.join(events_query_from)} + GROUP BY 1""" + else: + data.events = [] + # --------------------------------------------------------------------------- + if data.startDate is not None: + extra_constraints.append("s.start_ts >= %(startDate)s") + if data.endDate is not None: + extra_constraints.append("s.start_ts <= %(endDate)s") + # if data.platform is not None: + # if data.platform == schemas.PlatformType.mobile: + # extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')") + # elif data.platform == schemas.PlatformType.desktop: + # extra_constraints.append( + # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") + + if errors_only: + extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" + extra_constraints.append("ser.source = 'js_exception'") + extra_constraints.append("ser.project_id = %(project_id)s") + # if error_status != schemas.ErrorStatus.all: + # extra_constraints.append("ser.status = %(error_status)s") + # full_args["error_status"] = error_status + # if favorite_only: + # extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + # extra_constraints.append("ufe.user_id = %(userId)s") + + if favorite_only and not errors_only and user_id is not None: + extra_from += """INNER JOIN (SELECT user_id, session_id + FROM public.user_favorite_sessions + WHERE user_id = %(userId)s) AS favorite_sessions + USING (session_id)""" + elif not favorite_only and not errors_only and user_id is not None: + extra_from += """LEFT JOIN (SELECT user_id, session_id + FROM public.user_favorite_sessions + WHERE user_id = %(userId)s) AS favorite_sessions + USING (session_id)""" + extra_join = "" + if issue is not None: + extra_join = """ + INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id) + WHERE issues.session_id=f.session_id + AND p_issues.type=%(issue_type)s + AND p_issues.context_string=%(issue_contextString)s + AND timestamp >= f.first_event_ts + AND timestamp <= f.last_event_ts) AS issues ON(TRUE) + """ + full_args["issue_contextString"] = issue["contextString"] + full_args["issue_type"] = issue["type"] + if extra_event: + extra_join += f"""INNER JOIN {extra_event} AS ev USING(session_id)""" + extra_constraints.append("ev.timestamp>=%(startDate)s") + extra_constraints.append("ev.timestamp<=%(endDate)s") + query_part = f"""\ + FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"} + {extra_join} + {"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""} + {extra_from} + WHERE + {" AND ".join(extra_constraints)}""" + return full_args, query_part + + +def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): + if project_id is None: + all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) + else: + all_projects = [ + projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, + include_gdpr=False)] + + all_projects = {int(p["projectId"]): p["name"] for p in all_projects} + project_ids = list(all_projects.keys()) + + available_keys = metadata.get_keys_by_projects(project_ids) + for i in available_keys: + available_keys[i]["user_id"] = schemas.FilterType.user_id + available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id + results = {} + for i in project_ids: + if m_key not in available_keys[i].values(): + available_keys.pop(i) + results[i] = {"total": 0, "sessions": [], "missingMetadata": True} + project_ids = list(available_keys.keys()) + if len(project_ids) > 0: + with pg_client.PostgresClient() as cur: + sub_queries = [] + for i in project_ids: + col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] + sub_queries.append(cur.mogrify( + f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", + {"id": i, "value": m_value}).decode('UTF-8')) + query = f"""SELECT {", ".join(sub_queries)};""" + cur.execute(query=query) + + rows = cur.fetchone() + + sub_queries = [] + for i in rows.keys(): + results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]} + if rows[i] > 0: + col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)] + sub_queries.append( + cur.mogrify( + f"""( + SELECT * + FROM ( + SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} + FROM public.sessions AS s LEFT JOIN (SELECT session_id + FROM public.user_favorite_sessions + WHERE user_favorite_sessions.user_id = %(userId)s + ) AS favorite_sessions USING (session_id) + WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s + ) AS full_sessions + ORDER BY favorite DESC, issue_score DESC + LIMIT 10 + )""", + {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) + if len(sub_queries) > 0: + cur.execute("\nUNION\n".join(sub_queries)) + rows = cur.fetchall() + for i in rows: + results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) + return results + + +def search_by_issue(user_id, issue, project_id, start_date, end_date): + constraints = ["s.project_id = %(projectId)s", + "p_issues.context_string = %(issueContextString)s", + "p_issues.type = %(issueType)s"] + if start_date is not None: + constraints.append("start_ts >= %(startDate)s") + if end_date is not None: + constraints.append("start_ts <= %(endDate)s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} + FROM public.sessions AS s + INNER JOIN events_common.issues USING (session_id) + INNER JOIN public.issues AS p_issues USING (issue_id) + LEFT JOIN (SELECT user_id, session_id + FROM public.user_favorite_sessions + WHERE user_id = %(userId)s) AS favorite_sessions + USING (session_id) + WHERE {" AND ".join(constraints)} + ORDER BY s.session_id DESC;""", + { + "issueContextString": issue["contextString"], + "issueType": issue["type"], "userId": user_id, + "projectId": project_id, + "startDate": start_date, + "endDate": end_date + })) + + rows = cur.fetchall() + return helper.list_to_camel_case(rows) + + +def get_user_sessions(project_id, user_id, start_date, end_date): + with pg_client.PostgresClient() as cur: + constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] + if start_date is not None: + constraints.append("s.start_ts >= %(startDate)s") + if end_date is not None: + constraints.append("s.start_ts <= %(endDate)s") + + query_part = f"""\ + FROM public.sessions AS s + WHERE {" AND ".join(constraints)}""" + + cur.execute(cur.mogrify(f"""\ + SELECT s.project_id, + s.session_id::text AS session_id, + s.user_uuid, + s.user_id, + s.user_os, + s.user_browser, + s.user_device, + s.user_country, + s.start_ts, + s.duration, + s.events_count, + s.pages_count, + s.errors_count + {query_part} + ORDER BY s.session_id + LIMIT 50;""", { + "projectId": project_id, + "userId": user_id, + "startDate": start_date, + "endDate": end_date + })) + + sessions = cur.fetchall() + return helper.list_to_camel_case(sessions) + + +def get_session_user(project_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + """\ + SELECT + user_id, + count(*) as session_count, + max(start_ts) as last_seen, + min(start_ts) as first_seen + FROM + "public".sessions + WHERE + project_id = %(project_id)s + AND user_id = %(userId)s + AND duration is not null + GROUP BY user_id; + """, + {"project_id": project_id, "userId": user_id} + ) + cur.execute(query=query) + data = cur.fetchone() + return helper.dict_to_camel_case(data) + + +def get_session_ids_by_user_ids(project_id, user_ids): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + """\ + SELECT session_id FROM public.sessions + WHERE + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} + ) + ids = cur.execute(query=query) + return ids + + +def delete_sessions_by_session_ids(session_ids): + with pg_client.PostgresClient(unlimited_query=True) as cur: + query = cur.mogrify( + """\ + DELETE FROM public.sessions + WHERE + session_id IN %(session_ids)s;""", + {"session_ids": tuple(session_ids)} + ) + cur.execute(query=query) + + return True + + +def delete_sessions_by_user_ids(project_id, user_ids): + with pg_client.PostgresClient(unlimited_query=True) as cur: + query = cur.mogrify( + """\ + DELETE FROM public.sessions + WHERE + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} + ) + cur.execute(query=query) + + return True + + +def count_all(): + with pg_client.PostgresClient(unlimited_query=True) as cur: + row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") + return row.get("count", 0) + + +def session_exists(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify("""SELECT 1 + FROM public.sessions + WHERE session_id=%(session_id)s + AND project_id=%(project_id)s""", + {"project_id": project_id, "session_id": session_id}) + cur.execute(query) + row = cur.fetchone() + return row is not None diff --git a/ee/api/chalicelib/core/sessions_devtool.py b/ee/api/chalicelib/core/sessions_devtool.py new file mode 100644 index 000000000..ed6ecf694 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_devtool.py @@ -0,0 +1,31 @@ +from decouple import config +from fastapi.security import SecurityScopes + +import schemas_ee +from chalicelib.core import permissions +from chalicelib.utils.s3 import client + +SCOPES = SecurityScopes([schemas_ee.Permissions.dev_tools]) + + +def __get_devtools_keys(project_id, session_id): + params = { + "sessionId": session_id, + "projectId": project_id + } + return [ + config("DEVTOOLS_MOB_PATTERN", default="%(sessionId)sdevtools") % params + ] + + +def get_urls(session_id, project_id, context: schemas_ee.CurrentContext): + if not permissions.check(security_scopes=SCOPES, context=context): + return [] + results = [] + for k in __get_devtools_keys(project_id=project_id, session_id=session_id): + results.append(client.generate_presigned_url( + 'get_object', + Params={'Bucket': config("sessions_bucket"), 'Key': k}, + ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) + )) + return results diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index bcb79cfb7..c3128cd03 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -1,10 +1,11 @@ from decouple import config +import schemas_ee from chalicelib.core import sessions, sessions_favorite_exp from chalicelib.utils import pg_client, s3_extra -def add_favorite_session(project_id, user_id, session_id): +def add_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -15,10 +16,10 @@ def add_favorite_session(project_id, user_id, session_id): sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + include_fav_viewed=True, context=context) -def remove_favorite_session(project_id, user_id, session_id): +def remove_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -29,10 +30,10 @@ def remove_favorite_session(project_id, user_id, session_id): ) sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + include_fav_viewed=True, context=context) -def favorite_session(project_id, user_id, session_id): +def favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): if favorite_session_exists(user_id=user_id, session_id=session_id): key = str(session_id) try: @@ -59,7 +60,7 @@ def favorite_session(project_id, user_id, session_id): except Exception as e: print(f"!!!Error while tagging: {key} to vault") print(str(e)) - return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, context=context) def favorite_session_exists(user_id, session_id): diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 23c9aac3b..ce58fe45e 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -32,7 +32,6 @@ rm -rf ./chalicelib/core/log_tool_stackdriver.py rm -rf ./chalicelib/core/log_tool_sumologic.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py -rm -rf ./chalicelib/core/sessions.py rm -rf ./chalicelib/core/sessions_assignments.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py diff --git a/ee/api/env.default b/ee/api/env.default index 9d6fe66d7..3fc6add36 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -68,4 +68,10 @@ EXP_7D_MV=false EXP_ALERTS=false EXP_FUNNELS=false EXP_RESOURCES=true -TRACE_PERIOD=300 \ No newline at end of file +TRACE_PERIOD=300 +EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob +EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs +SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +PRESIGNED_URL_EXPIRATION=3600 \ No newline at end of file diff --git a/ee/api/or_dependencies.py b/ee/api/or_dependencies.py index fed974c0d..ea5d45ef2 100644 --- a/ee/api/or_dependencies.py +++ b/ee/api/or_dependencies.py @@ -12,6 +12,7 @@ from starlette.responses import Response, JSONResponse import schemas_ee from chalicelib.core import traces +from chalicelib.core import permissions async def OR_context(request: Request) -> schemas_ee.CurrentContext: @@ -48,7 +49,7 @@ class ORRoute(APIRoute): return custom_route_handler -def check_permissions(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): +def __check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): for scope in security_scopes.scopes: if scope not in context.permissions: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, @@ -56,4 +57,4 @@ def check_permissions(security_scopes: SecurityScopes, context: schemas_ee.Curre def OR_scope(*scopes): - return Security(check_permissions, scopes=list(scopes)) + return Security(__check, scopes=list(scopes)) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index a2a09c92d..fa5528e6e 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -2,11 +2,12 @@ from typing import Optional, Union from decouple import config from fastapi import Body, Depends, BackgroundTasks -from starlette.responses import RedirectResponse +from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee -from chalicelib.core import sessions +from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ + errors_favorite from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -183,7 +184,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, - include_fav_viewed=True, group_metadata=True) + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -270,11 +271,12 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False, + context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -303,6 +305,26 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") +@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"], + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) +@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"], + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) +def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId): + if isinstance(sessionId, str): + print(f"{sessionId} not a valid number.") + else: + print(f"{projectId}/{sessionId} not found in DB.") + + return {"errors": ["Devtools file not found"]} + path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId) + if path is None: + return {"errors": ["Devtools file not found"]} + + return FileResponse(path=path, media_type="application/octet-stream") + + @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -313,10 +335,11 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = dependencies=[OR_scope(Permissions.session_replay)]) @app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) -def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): +def add_remove_favorite_session2(projectId: int, sessionId: int, + context: schemas_ee.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId)} + session_id=sessionId, context=context)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"],