From 014a51602a51967102e68f67209c19637a9177be Mon Sep 17 00:00:00 2001 From: Amirouche Date: Tue, 30 Jan 2024 17:50:50 +0100 Subject: [PATCH] wip --- api/app_alerts.py | 2 +- api/auth/auth_apikey.py | 2 +- api/auth/auth_jwt.py | 6 +- api/auth/auth_project.py | 4 +- api/chalicelib/core/alerts.py | 29 +- api/chalicelib/core/assist.py | 26 +- api/chalicelib/core/authorizers.py | 12 +- api/chalicelib/core/boarding.py | 38 +-- api/chalicelib/core/canvas.py | 2 +- api/chalicelib/core/click_maps.py | 6 +- api/chalicelib/core/collaboration_msteams.py | 34 +-- api/chalicelib/core/collaboration_slack.py | 20 +- api/chalicelib/core/custom_metrics.py | 10 +- .../core/custom_metrics_predefined.py | 4 +- api/chalicelib/core/dashboards.py | 2 +- api/chalicelib/core/errors.py | 8 +- api/chalicelib/core/events.py | 26 +- api/chalicelib/core/funnels.py | 8 +- api/chalicelib/core/health.py | 2 +- api/chalicelib/core/integration_base.py | 2 +- api/chalicelib/core/integration_github.py | 8 +- .../core/integration_github_issue.py | 44 +-- api/chalicelib/core/integration_jira_cloud.py | 7 +- .../core/integration_jira_cloud_issue.py | 34 +-- api/chalicelib/core/metrics.py | 14 +- api/chalicelib/core/mobile.py | 2 +- api/chalicelib/core/projects.py | 16 +- api/chalicelib/core/reset_password.py | 4 +- api/chalicelib/core/sessions.py | 14 +- api/chalicelib/core/sessions_assignments.py | 34 +-- api/chalicelib/core/sessions_devtool.py | 8 +- api/chalicelib/core/sessions_metas.py | 4 +- api/chalicelib/core/sessions_mobs.py | 22 +- api/chalicelib/core/sessions_notes.py | 6 +- api/chalicelib/core/sessions_replay.py | 72 ++--- api/chalicelib/core/significance.py | 8 +- api/chalicelib/core/signup.py | 8 +- api/chalicelib/core/socket_ios.py | 4 +- api/chalicelib/core/sourcemaps.py | 16 +- api/chalicelib/core/sourcemaps_parser.py | 2 +- api/chalicelib/utils/email_helper.py | 4 +- api/chalicelib/utils/jira_client.py | 1 + api/chalicelib/utils/storage/s3.py | 6 +- api/routers/core.py | 250 +++++++++--------- api/routers/core_dynamic.py | 88 +++--- api/routers/subs/health.py | 2 +- api/routers/subs/metrics.py | 50 ++-- api/routers/subs/v1_api.py | 20 +- 48 files changed, 497 insertions(+), 494 deletions(-) diff --git a/api/app_alerts.py b/api/app_alerts.py index 9587048dd..2b0a99629 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -63,4 +63,4 @@ if config("LOCAL_DEV", default=False, cast=bool): @app.get('/trigger', tags=["private"]) async def trigger_main_cron(): logging.info("Triggering main cron") - alerts_processor.process() + await alerts_processor.process() diff --git a/api/auth/auth_apikey.py b/api/auth/auth_apikey.py index 0171da436..6e2af052a 100644 --- a/api/auth/auth_apikey.py +++ b/api/auth/auth_apikey.py @@ -18,7 +18,7 @@ class APIKeyAuth(APIKeyHeader): async def __call__(self, request: Request) -> Optional[CurrentAPIContext]: api_key: Optional[str] = await super(APIKeyAuth, self).__call__(request) - r = authorizers.api_key_authorizer(api_key) + r = await authorizers.api_key_authorizer(api_key) if r is None: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index c3639e92e..24029b84c 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -13,8 +13,8 @@ from chalicelib.core import authorizers, users logger = logging.getLogger(__name__) -def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext: - user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) +async def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext: + user = await users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) if user is None: logger.warning("User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") @@ -36,7 +36,7 @@ class JWTAuth(HTTPBearer): jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=refresh_token) if jwt_payload is None or jwt_payload.get("jti") is None: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") - auth_exists = users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1), + auth_exists = await users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1), jwt_jti=jwt_payload["jti"]) if not auth_exists: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") diff --git a/api/auth/auth_project.py b/api/auth/auth_project.py index a3ccc04e5..c9b970bdd 100644 --- a/api/auth/auth_project.py +++ b/api/auth/auth_project.py @@ -23,9 +23,9 @@ class ProjectAuthorizer: current_project = None if self.project_identifier == "projectId" \ and (isinstance(value, int) or isinstance(value, str) and value.isnumeric()): - current_project = projects.get_project(project_id=value, tenant_id=current_user.tenant_id) + current_project = await projects.get_project(project_id=value, tenant_id=current_user.tenant_id) elif self.project_identifier == "projectKey": - current_project = projects.get_by_project_key(project_key=value) + current_project = await projects.get_by_project_key(project_key=value) if current_project is None: logger.debug(f"unauthorized project {self.project_identifier}:{value}") diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index c898c700e..2f21482bd 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -95,7 +95,7 @@ async def update(id, data: schemas.AlertSchema): return {"data": helper.custom_alert_to_front(__process_circular(a))} -def process_notifications(data): +async def process_notifications(data): full = {} for n in data: if "message" in n["options"]: @@ -112,7 +112,7 @@ def process_notifications(data): }) elif c["type"] in ["webhook"]: full[c["type"]].append({"data": webhook_data, "destination": c["value"]}) - notifications.create(data) + await notifications.create(data) BATCH_SIZE = 200 for t in full.keys(): for i in range(0, len(full[t]), BATCH_SIZE): @@ -122,52 +122,51 @@ def process_notifications(data): if t == "slack": try: - send_to_slack_batch(notifications_list=notifications_list) + await send_to_slack_batch(notifications_list=notifications_list) except Exception as e: logging.error("!!!Error while sending slack notifications batch") logging.error(str(e)) elif t == "msteams": try: - send_to_msteams_batch(notifications_list=notifications_list) + await send_to_msteams_batch(notifications_list=notifications_list) except Exception as e: logging.error("!!!Error while sending msteams notifications batch") logging.error(str(e)) elif t == "email": try: - send_by_email_batch(notifications_list=notifications_list) + await send_by_email_batch(notifications_list=notifications_list) except Exception as e: logging.error("!!!Error while sending email notifications batch") logging.error(str(e)) elif t == "webhook": try: - webhook.trigger_batch(data_list=notifications_list) + await webhook.trigger_batch(data_list=notifications_list) except Exception as e: logging.error("!!!Error while sending webhook notifications batch") logging.error(str(e)) -def send_by_email(notification, destination): +async def send_by_email(notification, destination): if notification is None: return - email_helper.alert_email(recipients=destination, + await email_helper.alert_email(recipients=destination, subject=f'"{notification["title"]}" has been triggered', data={ "message": f'"{notification["title"]}" {notification["description"]}', "project_id": notification["options"]["projectId"]}) -def send_by_email_batch(notifications_list): +async def send_by_email_batch(notifications_list): if not smtp.has_smtp(): logging.info("no SMTP configuration for email notifications") if notifications_list is None or len(notifications_list) == 0: logging.info("no email notifications") return for n in notifications_list: - send_by_email(notification=n.get("notification"), destination=n.get("destination")) - time.sleep(1) + await send_by_email(notification=n.get("notification"), destination=n.get("destination")) -def send_to_slack_batch(notifications_list): +async def send_to_slack_batch(notifications_list): webhookId_map = {} for n in notifications_list: if n.get("destination") not in webhookId_map: @@ -178,11 +177,11 @@ def send_to_slack_batch(notifications_list): "title_link": n["notification"]["buttonUrl"], "ts": datetime.now().timestamp()}) for batch in webhookId_map.keys(): - Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, + await Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, attachments=webhookId_map[batch]["batch"]) -def send_to_msteams_batch(notifications_list): +async def send_to_msteams_batch(notifications_list): webhookId_map = {} for n in notifications_list: if n.get("destination") not in webhookId_map: @@ -207,7 +206,7 @@ def send_to_msteams_batch(notifications_list): } ) for batch in webhookId_map.keys(): - MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, + await MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, attachments=webhookId_map[batch]["batch"]) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 36d9fa0b9..85ae87a21 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -28,24 +28,24 @@ SESSION_PROJECTION_COLS = """s.project_id, """ -def get_live_sessions_ws_user_id(project_id, user_id): +async def get_live_sessions_ws_user_id(project_id, user_id): data = { "filter": {"userId": user_id} if user_id else {} } - return __get_live_sessions_ws(project_id=project_id, data=data) + return await __get_live_sessions_ws(project_id=project_id, data=data) -def get_live_sessions_ws_test_id(project_id, test_id): +async def get_live_sessions_ws_test_id(project_id, test_id): data = { "filter": { 'uxtId': test_id, 'operator': 'is' } } - return __get_live_sessions_ws(project_id=project_id, data=data) + return await __get_live_sessions_ws(project_id=project_id, data=data) -def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema): +async def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema): data = { "filter": {}, "pagination": {"limit": body.limit, "page": body.page}, @@ -57,10 +57,10 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche else: data["filter"][f.type] = {"values": f.value, "operator": f.operator} - return __get_live_sessions_ws(project_id=project_id, data=data) + return await __get_live_sessions_ws(project_id=project_id, data=data) -def __get_live_sessions_ws(project_id, data): +async def __get_live_sessions_ws(project_id, data): project_key = projects.get_project_key(project_id) try: async with httpx.AsyncClient() as client: @@ -108,8 +108,8 @@ def __get_agent_token(project_id, project_key, session_id): ) -def get_live_session_by_id(project_id, session_id): - project_key = projects.get_project_key(project_id) +async def get_live_session_by_id(project_id, session_id): + project_key = await projects.get_project_key(project_id) try: async with httpx.AsyncClient() as client: results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}", @@ -135,9 +135,9 @@ def get_live_session_by_id(project_id, session_id): return results -def is_live(project_id, session_id, project_key=None): +async def is_live(project_id, session_id, project_key=None): if project_key is None: - project_key = projects.get_project_key(project_id) + project_key = await projects.get_project_key(project_id) try: async with httpx.AsyncClient() as client: results = await client.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}", @@ -244,8 +244,8 @@ def get_raw_devtools_by_id(project_id, session_id): return None -def session_exists(project_id, session_id): - project_key = projects.get_project_key(project_id) +async def session_exists(project_id, session_id): + project_key = await projects.get_project_key(project_id) try: async with httpx.AsyncClient() as client: results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}", diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index a8eb3e771..c1702fc89 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -25,7 +25,7 @@ def jwt_authorizer(scheme: str, token: str, leeway=0): except jwt.ExpiredSignatureError: logger.debug("! JWT Expired signature") return None - except BaseException as e: + except Exception as e: logger.warning("! JWT Base Exception") logger.debug(e) return None @@ -45,15 +45,15 @@ def jwt_refresh_authorizer(scheme: str, token: str): except jwt.ExpiredSignatureError: logger.debug("! JWT-refresh Expired signature") return None - except BaseException as e: + except Exception as e: logger.warning("! JWT-refresh Base Exception") logger.debug(e) return None return payload -def jwt_context(context): - user = users.get(user_id=context["userId"], tenant_id=context["tenantId"]) +async def jwt_context(context): + user = await users.get(user_id=context["userId"], tenant_id=context["tenantId"]) if user is None: return None return { @@ -96,8 +96,8 @@ def generate_jwt_refresh(user_id, tenant_id, iat, aud, jwt_jti): return token -def api_key_authorizer(token): - t = tenants.get_by_api_key(token) +async def api_key_authorizer(token): + t = await tenants.get_by_api_key(token) if t is not None: t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"]) return t diff --git a/api/chalicelib/core/boarding.py b/api/chalicelib/core/boarding.py index 51d3d925b..dd0860376 100644 --- a/api/chalicelib/core/boarding.py +++ b/api/chalicelib/core/boarding.py @@ -5,7 +5,7 @@ from chalicelib.core import users async def get_state(tenant_id): - pids = projects.get_projects_ids(tenant_id=tenant_id) + pids = await projects.get_projects_ids(tenant_id=tenant_id) async with pg_client.cursor() as cur: recorded = False meta = False @@ -49,35 +49,35 @@ async def get_state(tenant_id): "done": len(await users.get_members(tenant_id=tenant_id)) > 1, "URL": "https://app.openreplay.com/client/manage-users"}, {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, + "done": len(await log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ + or len(await log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ + or len(await log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, "URL": "https://docs.openreplay.com/integrations"} ] -def get_state_installing(tenant_id): - pids = projects.get_projects_ids(tenant_id=tenant_id) - with pg_client.cursor() as cur: +async def get_state_installing(tenant_id): + pids = await projects.get_projects_ids(tenant_id=tenant_id) + async with pg_client.cursor() as cur: recorded = False if len(pids) > 0: - cur.execute( + await cur.execute( cur.mogrify("""SELECT EXISTS(( SELECT 1 FROM public.sessions AS s WHERE s.project_id IN %(ids)s)) AS exists;""", {"ids": tuple(pids)}) ) - recorded = cur.fetchone()["exists"] + recorded = await cur.fetchone()["exists"] return {"task": "Install OpenReplay", "done": recorded, "URL": "https://docs.openreplay.com/getting-started/quick-start"} -def get_state_identify_users(tenant_id): - with pg_client.cursor() as cur: - cur.execute("""SELECT EXISTS((SELECT 1 +async def get_state_identify_users(tenant_id): + async with pg_client.cursor() as cur: + await cur.execute("""SELECT EXISTS((SELECT 1 FROM public.projects AS p LEFT JOIN LATERAL ( SELECT 1 FROM public.sessions @@ -93,22 +93,22 @@ def get_state_identify_users(tenant_id): OR p.metadata_10 IS NOT NULL ) )) AS exists;""") - meta = cur.fetchone()["exists"] + meta = await cur.fetchone()["exists"] return {"task": "Identify Users", "done": meta, "URL": "https://docs.openreplay.com/data-privacy-security/metadata"} -def get_state_manage_users(tenant_id): +async def get_state_manage_users(tenant_id): return {"task": "Invite Team Members", - "done": len(users.get_members(tenant_id=tenant_id)) > 1, + "done": len(await users.get_members(tenant_id=tenant_id)) > 1, "URL": "https://app.openreplay.com/client/manage-users"} -def get_state_integrations(tenant_id): +async def get_state_integrations(tenant_id): return {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, + "done": len(await log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ + or len(await log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ + or len(await log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, "URL": "https://docs.openreplay.com/integrations"} diff --git a/api/chalicelib/core/canvas.py b/api/chalicelib/core/canvas.py index bc09b8704..958668418 100644 --- a/api/chalicelib/core/canvas.py +++ b/api/chalicelib/core/canvas.py @@ -21,7 +21,7 @@ async def get_canvas_presigned_urls(session_id, project_id): "recordingId": rows[i]["recording_id"] } key = config("CANVAS_PATTERN", default="%(sessionId)s/%(recordingId)s.mp4") % params - rows[i] = StorageClient.get_presigned_url_for_sharing( + rows[i] = await StorageClient.get_presigned_url_for_sharing( bucket=config("CANVAS_BUCKET", default=config("sessions_bucket")), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), key=key diff --git a/api/chalicelib/core/click_maps.py b/api/chalicelib/core/click_maps.py index 4b1a67777..0bd152eba 100644 --- a/api/chalicelib/core/click_maps.py +++ b/api/chalicelib/core/click_maps.py @@ -69,9 +69,9 @@ async def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, session = await cur.fetchone() if session: if include_mobs: - session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) - session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) - session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"], + session['domURL'] = await sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) + session['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session["session_id"]) + session['events'] = await events.get_by_session_id(project_id=project_id, session_id=session["session_id"], event_type=schemas.EventType.location) return helper.dict_to_camel_case(session) diff --git a/api/chalicelib/core/collaboration_msteams.py b/api/chalicelib/core/collaboration_msteams.py index 5c8c22096..18eb3b66c 100644 --- a/api/chalicelib/core/collaboration_msteams.py +++ b/api/chalicelib/core/collaboration_msteams.py @@ -13,19 +13,19 @@ logger = logging.getLogger(__name__) class MSTeams(BaseCollaboration): @classmethod - def add(cls, tenant_id, data: schemas.AddCollaborationSchema): - if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, + async def add(cls, tenant_id, data: schemas.AddCollaborationSchema): + if await webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, webhook_type=schemas.WebhookType.msteams): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") - if cls.say_hello(data.url): - return webhook.add(tenant_id=tenant_id, + if await cls.say_hello(data.url): + return await webhook.add(tenant_id=tenant_id, endpoint=data.url.unicode_string(), webhook_type=schemas.WebhookType.msteams, name=data.name) return None @classmethod - def say_hello(cls, url): + async def say_hello(cls, url): async with httpx.AsyncClient() as client: r = await client.post( url=url, @@ -42,8 +42,8 @@ class MSTeams(BaseCollaboration): return True @classmethod - def send_raw(cls, tenant_id, webhook_id, body): - integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) + async def send_raw(cls, tenant_id, webhook_id, body): + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) if integration is None: return {"errors": ["msteams integration not found"]} try: @@ -63,8 +63,8 @@ class MSTeams(BaseCollaboration): return {"data": r.text} @classmethod - def send_batch(cls, tenant_id, webhook_id, attachments): - integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) + async def send_batch(cls, tenant_id, webhook_id, attachments): + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) if integration is None: return {"errors": ["msteams integration not found"]} logging.debug(f"====> sending msteams batch notification: {len(attachments)}") @@ -89,7 +89,7 @@ class MSTeams(BaseCollaboration): async def __share(cls, tenant_id, integration_id, attachement, extra=None): if extra is None: extra = {} - integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) if integration is None: return {"errors": ["Microsoft Teams integration not found"]} async with httpx.AsyncClient() as client: @@ -105,7 +105,7 @@ class MSTeams(BaseCollaboration): return r.text @classmethod - def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): + async def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): title = f"*{user}* has shared the below session!" link = f"{config('SITE_URL')}/{project_id}/session/{session_id}" args = { @@ -124,13 +124,13 @@ class MSTeams(BaseCollaboration): "name": "Comment:", "value": comment }) - data = cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) + data = await cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) if "errors" in data: return data return {"data": data} @classmethod - def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): + async def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): title = f"*{user}* has shared the below error!" link = f"{config('SITE_URL')}/{project_id}/errors/{error_id}" args = { @@ -149,18 +149,18 @@ class MSTeams(BaseCollaboration): "name": "Comment:", "value": comment }) - data = cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) + data = await cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) if "errors" in data: return data return {"data": data} @classmethod - def get_integration(cls, tenant_id, integration_id=None): + async def get_integration(cls, tenant_id, integration_id=None): if integration_id is not None: - return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, + return await webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, webhook_type=schemas.WebhookType.msteams) - integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams) + integrations = await webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams) if integrations is None or len(integrations) == 0: return None return integrations[0] diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index 651dcd37f..3c96d42df 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -11,7 +11,7 @@ from chalicelib.core.collaboration_base import BaseCollaboration class Slack(BaseCollaboration): @classmethod - def add(cls, tenant_id, data: schemas.AddCollaborationSchema): + async def add(cls, tenant_id, data: schemas.AddCollaborationSchema): if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, webhook_type=schemas.WebhookType.slack): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") @@ -43,7 +43,7 @@ class Slack(BaseCollaboration): @classmethod async def send_raw(cls, tenant_id, webhook_id, body): - integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) if integration is None: return {"errors": ["slack integration not found"]} try: @@ -64,7 +64,7 @@ class Slack(BaseCollaboration): @classmethod async def send_batch(cls, tenant_id, webhook_id, attachments): - integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) if integration is None: return {"errors": ["slack integration not found"]} print(f"====> sending slack batch notification: {len(attachments)}") @@ -83,7 +83,7 @@ class Slack(BaseCollaboration): async def __share(cls, tenant_id, integration_id, attachement, extra=None): if extra is None: extra = {} - integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) + integration = await cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) if integration is None: return {"errors": ["slack integration not found"]} attachement["ts"] = datetime.now().timestamp() @@ -92,36 +92,36 @@ class Slack(BaseCollaboration): return r.text @classmethod - def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): + async def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): args = {"fallback": f"{user} has shared the below session!", "pretext": f"{user} has shared the below session!", "title": f"{config('SITE_URL')}/{project_id}/session/{session_id}", "title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}", "text": comment} - data = cls.__share(tenant_id, integration_id, attachement=args) + data = await cls.__share(tenant_id, integration_id, attachement=args) if "errors" in data: return data return {"data": data} @classmethod - def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): + async def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): args = {"fallback": f"{user} has shared the below error!", "pretext": f"{user} has shared the below error!", "title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", "title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", "text": comment} - data = cls.__share(tenant_id, integration_id, attachement=args) + data = await cls.__share(tenant_id, integration_id, attachement=args) if "errors" in data: return data return {"data": data} @classmethod - def get_integration(cls, tenant_id, integration_id=None): + async def get_integration(cls, tenant_id, integration_id=None): if integration_id is not None: return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, webhook_type=schemas.WebhookType.slack) - integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack) + integrations = await webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack) if integrations is None or len(integrations) == 0: return None return integrations[0] diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 6c25f56b8..ae9a3b695 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -19,10 +19,10 @@ PIE_CHART_GROUP = 5 # timeseries / # table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs # remove "table of" calls from this function -def __try_live(project_id, data: schemas.CardSchema): +async def __try_live(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): - results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, + results.append(await sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value)) if data.view_type == schemas.MetricTimeseriesViewType.progress: @@ -30,7 +30,7 @@ def __try_live(project_id, data: schemas.CardSchema): diff = s.filter.endTimestamp - s.filter.startTimestamp s.filter.endTimestamp = s.filter.startTimestamp s.filter.startTimestamp = s.filter.endTimestamp - diff - r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, + r["previousCount"] = await sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) @@ -48,10 +48,10 @@ def __try_live(project_id, data: schemas.CardSchema): return results -def __get_table_of_series(project_id, data: schemas.CardSchema): +async def __get_table_of_series(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): - results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, + results.append(await sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, metric_of=data.metric_of, metric_value=data.metric_value)) return results diff --git a/api/chalicelib/core/custom_metrics_predefined.py b/api/chalicelib/core/custom_metrics_predefined.py index 71eb9d7d3..462535c03 100644 --- a/api/chalicelib/core/custom_metrics_predefined.py +++ b/api/chalicelib/core/custom_metrics_predefined.py @@ -10,7 +10,7 @@ from chalicelib.core import metrics logger = logging.getLogger(__name__) -def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ +async def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, @@ -58,4 +58,4 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } - return supported.get(key, lambda *args: None)(project_id=project_id, **data) + return await supported.get(key, lambda *args: None)(project_id=project_id, **data) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 46e30e3ad..88c351cda 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -29,7 +29,7 @@ async def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSch row = await cur.fetchone() if row is None: return {"errors": ["something went wrong while creating the dashboard"]} - return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])} + return {"data": await get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])} async def get_dashboards(project_id, user_id): diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 044f7022b..c83af3417 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -460,7 +460,7 @@ async def search(data: schemas.SearchErrorsSchema, project_id, user_id): data.endTimestamp = TimeUTC.now(1) if len(data.events) > 0 or len(data.filters) > 0: print("-- searching for sessions before errors") - statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, + statuses = await sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, error_status=data.status) if len(statuses) == 0: return empty_response @@ -594,8 +594,8 @@ async def __save_stacktrace(error_id, data): await cur.execute(query=query) -def get_trace(project_id, error_id): - error = get(error_id=error_id, family=False) +async def get_trace(project_id, error_id): + error = await get(error_id=error_id, family=False) if error is None: return {"errors": ["error not found"]} if error.get("source", "") != "js_exception": @@ -606,7 +606,7 @@ def get_trace(project_id, error_id): return {"sourcemapUploaded": True, "trace": error.get("stacktrace"), "preparsed": True} - trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"]) + trace, all_exists = await sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"]) if all_exists: __save_stacktrace(error_id=error_id, data=trace) return {"sourcemapUploaded": all_exists, diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 1f86bada0..a6c45e9e4 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -31,8 +31,8 @@ def __merge_cells(rows, start, count, replacement): return rows -def __get_grouped_clickrage(rows, session_id, project_id): - click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) +async def __get_grouped_clickrage(rows, session_id, project_id): + click_rage_issues = await issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) if len(click_rage_issues) == 0: return rows @@ -56,7 +56,7 @@ async def get_by_session_id(session_id, project_id, group_clickrage=False, event async with pg_client.cursor() as cur: rows = [] if event_type is None or event_type == schemas.EventType.click: - cur.execute(cur.mogrify("""\ + await cur.execute(cur.mogrify("""\ SELECT c.*, 'CLICK' AS type @@ -66,11 +66,11 @@ async def get_by_session_id(session_id, project_id, group_clickrage=False, event ORDER BY c.timestamp;""", {"project_id": project_id, "session_id": session_id}) ) - rows += cur.fetchall() + rows += await cur.fetchall() if group_clickrage: - rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) + rows = await __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) if event_type is None or event_type == schemas.EventType.input: - cur.execute(cur.mogrify(""" + await cur.execute(cur.mogrify(""" SELECT i.*, 'INPUT' AS type @@ -80,9 +80,9 @@ async def get_by_session_id(session_id, project_id, group_clickrage=False, event ORDER BY i.timestamp;""", {"project_id": project_id, "session_id": session_id}) ) - rows += cur.fetchall() + rows += await cur.fetchall() if event_type is None or event_type == schemas.EventType.location: - cur.execute(cur.mogrify("""\ + await cur.execute(cur.mogrify("""\ SELECT l.*, l.path AS value, @@ -92,7 +92,7 @@ async def get_by_session_id(session_id, project_id, group_clickrage=False, event WHERE l.session_id = %(session_id)s ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) - rows += cur.fetchall() + rows += await cur.fetchall() rows = helper.list_to_camel_case(rows) rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"])) return rows @@ -200,17 +200,17 @@ def search(text, event_type, project_id, source, key): return {"data": autocomplete.__get_autocomplete_table(text, project_id)} if event_type in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) + rows = await SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) # for IOS events autocomplete # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) + rows = await SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): - return sessions_metas.search(text, event_type, project_id) + return await sessions_metas.search(text, event_type, project_id) elif event_type.endswith("_IOS") \ and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys(): - return sessions_metas.search(text, event_type, project_id) + return await sessions_metas.search(text, event_type, project_id) else: return {"errors": ["unsupported event"]} diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index ba79abc70..6a6238069 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -36,12 +36,12 @@ def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]): # def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): -def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): +async def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): data.events = filter_stages(__parse_events(data.events)) data.events = __fix_stages(data.events) if len(data.events) == 0: return {"stages": [], "totalDropDueToIssues": 0} - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id) + insights, total_drop_due_to_issues = await significance.get_top_insights(filter_d=data, project_id=project_id) insights = helper.list_to_camel_case(insights) if len(insights) > 0: # TODO: check if this correct @@ -56,7 +56,7 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte # def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): -def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): +async def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): data.events = filter_stages(data.events) data.events = __fix_stages(data.events) if len(data.events) < 0: @@ -64,5 +64,5 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem return { "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1, + await significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1, last_stage=len(data.events)))} diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 38332f2a5..96b7dc39c 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -110,7 +110,7 @@ async def __check_be_service(service_name): return fn -def __check_redis(*_): +async def __check_redis(*_): fail_response = { "health": False, "details": {"errors": ["server health-check failed"]} diff --git a/api/chalicelib/core/integration_base.py b/api/chalicelib/core/integration_base.py index 2167fb6d1..cdf754f52 100644 --- a/api/chalicelib/core/integration_base.py +++ b/api/chalicelib/core/integration_base.py @@ -21,7 +21,7 @@ class BaseIntegration(ABC): @property def integration_token(self): - integration = self.get() + integration = await self.get() if integration is None: print("no token configured yet") return None diff --git a/api/chalicelib/core/integration_github.py b/api/chalicelib/core/integration_github.py index 1c52bfad3..f94b0d4cd 100644 --- a/api/chalicelib/core/integration_github.py +++ b/api/chalicelib/core/integration_github.py @@ -72,10 +72,10 @@ class GitHubIntegration(integration_base.BaseIntegration): ) return {"state": "success"} - def add_edit(self, data: schemas.IssueTrackingGithubSchema): - s = self.get() + async def add_edit(self, data: schemas.IssueTrackingGithubSchema): + s = await self.get() if s is not None: - return self.update( + return await self.update( changes={ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ else s.token @@ -83,4 +83,4 @@ class GitHubIntegration(integration_base.BaseIntegration): obfuscate=True ) else: - return self.add(token=data.token, obfuscate=True) + return await self.add(token=data.token, obfuscate=True) diff --git a/api/chalicelib/core/integration_github_issue.py b/api/chalicelib/core/integration_github_issue.py index 0c2b78720..7c0bd49fa 100644 --- a/api/chalicelib/core/integration_github_issue.py +++ b/api/chalicelib/core/integration_github_issue.py @@ -8,13 +8,13 @@ class GithubIntegrationIssue(BaseIntegrationIssue): self.__client = github_client_v3.githubV3Request(integration_token) super(GithubIntegrationIssue, self).__init__("GITHUB", integration_token) - def get_current_user(self): - return formatter.user(self.__client.get("/user")) + async def get_current_user(self): + return formatter.user(await self.__client.get("/user")) - def get_meta(self, repoId): - current_user = self.get_current_user() + async def get_meta(self, repoId): + current_user = await self.get_current_user() try: - users = self.__client.get(f"/repositories/{repoId}/collaborators") + users = await self.__client.get(f"/repositories/{repoId}/collaborators") except Exception as e: users = [] users = [formatter.user(u) for u in users] @@ -23,18 +23,18 @@ class GithubIntegrationIssue(BaseIntegrationIssue): meta = { 'users': users, 'issueTypes': [formatter.label(l) for l in - self.__client.get(f"/repositories/{repoId}/labels")] + await self.__client.get(f"/repositories/{repoId}/labels")] } return meta - def create_new_assignment(self, integration_project_id, title, description, assignee, + async def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type): repoId = integration_project_id assignees = [assignee] labels = [str(issue_type)] - metas = self.get_meta(repoId) + metas = await self.get_meta(repoId) real_assignees = [] for a in assignees: for u in metas["users"]: @@ -51,38 +51,38 @@ class GithubIntegrationIssue(BaseIntegrationIssue): break if not found: real_labels.append(l) - issue = self.__client.post(f"/repositories/{repoId}/issues", body={"title": title, "body": description, + issue = await self.__client.post(f"/repositories/{repoId}/issues", body={"title": title, "body": description, "assignees": real_assignees, "labels": real_labels}) return formatter.issue(issue) - def get_by_ids(self, saved_issues): + async def get_by_ids(self, saved_issues): results = [] for i in saved_issues: - results.append(self.get(integration_project_id=i["integrationProjectId"], assignment_id=i["id"])) + results.append(await self.get(integration_project_id=i["integrationProjectId"], assignment_id=i["id"])) return {"issues": results} - def get(self, integration_project_id, assignment_id): + async def get(self, integration_project_id, assignment_id): repoId = integration_project_id issueNumber = assignment_id - issue = self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}") + issue = await self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}") issue = formatter.issue(issue) if issue["commentsCount"] > 0: issue["comments"] = [formatter.comment(c) for c in - self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}/comments")] + await self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}/comments")] return issue - def comment(self, integration_project_id, assignment_id, comment): + async def comment(self, integration_project_id, assignment_id, comment): repoId = integration_project_id issueNumber = assignment_id - commentCreated = self.__client.post(f"/repositories/{repoId}/issues/{issueNumber}/comments", + commentCreated = await self.__client.post(f"/repositories/{repoId}/issues/{issueNumber}/comments", body={"body": comment}) return formatter.comment(commentCreated) - def get_metas(self, integration_project_id): - current_user = self.get_current_user() + async def get_metas(self, integration_project_id): + current_user = await self.get_current_user() try: - users = self.__client.get(f"/repositories/{integration_project_id}/collaborators") + users = await self.__client.get(f"/repositories/{integration_project_id}/collaborators") except Exception as e: users = [] users = [formatter.user(u) for u in users] @@ -92,9 +92,9 @@ class GithubIntegrationIssue(BaseIntegrationIssue): return {"provider": self.provider.lower(), 'users': users, 'issueTypes': [formatter.label(l) for l in - self.__client.get(f"/repositories/{integration_project_id}/labels")] + await self.__client.get(f"/repositories/{integration_project_id}/labels")] } - def get_projects(self): - repos = self.__client.get("/user/repos") + async def get_projects(self): + repos = await self.__client.get("/user/repos") return [formatter.repo(r) for r in repos] diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py index 27e71168c..ca8409034 100644 --- a/api/chalicelib/core/integration_jira_cloud.py +++ b/api/chalicelib/core/integration_jira_cloud.py @@ -11,20 +11,23 @@ def obfuscate_string(string): class JIRAIntegration(integration_base.BaseIntegration): + def __init__(self, tenant_id, user_id): self.__tenant_id = tenant_id # TODO: enable super-constructor when OAuth is done # super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy) self._issue_handler = None self._user_id = user_id - self.integration = self.get() + + async def init(self): + self.integration = self.get() if self.integration is None: return self.integration["valid"] = True if not self.integration["url"].endswith('atlassian.net'): self.integration["valid"] = False - + @property def provider(self): return PROVIDER diff --git a/api/chalicelib/core/integration_jira_cloud_issue.py b/api/chalicelib/core/integration_jira_cloud_issue.py index bb847007a..9ad2d6fe0 100644 --- a/api/chalicelib/core/integration_jira_cloud_issue.py +++ b/api/chalicelib/core/integration_jira_cloud_issue.py @@ -9,8 +9,8 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue): self._client = jira_client.JiraManager(self.url, self.username, token, None) super(JIRACloudIntegrationIssue, self).__init__("JIRA", token) - def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type): - self._client.set_jira_project_id(integration_project_id) + async def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type): + await self._client.set_jira_project_id(integration_project_id) data = { 'summary': title, 'description': description, @@ -20,7 +20,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue): } return self._client.create_issue(data) - def get_by_ids(self, saved_issues): + async def get_by_ids(self, saved_issues): projects_map = {} for i in saved_issues: if i["integrationProjectId"] not in projects_map.keys(): @@ -29,28 +29,28 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue): results = [] for integration_project_id in projects_map: - self._client.set_jira_project_id(integration_project_id) + await self._client.set_jira_project_id(integration_project_id) jql = 'labels = OpenReplay' if len(projects_map[integration_project_id]) > 0: jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})" - issues = self._client.get_issues(jql, offset=0) + issues = await self._client.get_issues(jql, offset=0) results += issues return {"issues": results} - def get(self, integration_project_id, assignment_id): - self._client.set_jira_project_id(integration_project_id) - return self._client.get_issue_v3(assignment_id) + async def get(self, integration_project_id, assignment_id): + await self._client.set_jira_project_id(integration_project_id) + return await self._client.get_issue_v3(assignment_id) - def comment(self, integration_project_id, assignment_id, comment): - self._client.set_jira_project_id(integration_project_id) - return self._client.add_comment_v3(assignment_id, comment) + async def comment(self, integration_project_id, assignment_id, comment): + await self._client.set_jira_project_id(integration_project_id) + return await self._client.add_comment_v3(assignment_id, comment) - def get_metas(self, integration_project_id): + async def get_metas(self, integration_project_id): meta = {} - self._client.set_jira_project_id(integration_project_id) - meta['issueTypes'] = self._client.get_issue_types() - meta['users'] = self._client.get_assignable_users() + await self._client.set_jira_project_id(integration_project_id) + meta['issueTypes'] = await self._client.get_issue_types() + meta['users'] = await self._client.get_assignable_users() return {"provider": self.provider.lower(), **meta} - def get_projects(self): - return self._client.get_projects() + async def get_projects(self): + return await self._client.get_projects() diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 861f00f66..54b44ba52 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -28,7 +28,7 @@ def __quantiles(a, q, interpolation='higher'): return arr[ind] -def __get_constraints(project_id, time_constraint=True, chart=False, duration=True, project=True, +async def __get_constraints(project_id, time_constraint=True, chart=False, duration=True, project=True, project_identifier="project_id", main_table="sessions", time_column="start_ts", data={}): pg_sub_query = [] @@ -43,7 +43,7 @@ def __get_constraints(project_id, time_constraint=True, chart=False, duration=Tr if chart: pg_sub_query.append(f"{main_table}{time_column} >= generated_timestamp") pg_sub_query.append(f"{main_table}{time_column} < generated_timestamp + %(step_size)s") - return pg_sub_query + __get_meta_constraint(project_id=project_id, data=data) + return pg_sub_query + await __get_meta_constraint(project_id=project_id, data=data) def __merge_charts(list1, list2, time_key="timestamp"): @@ -77,11 +77,11 @@ METADATA_FIELDS = {"userId": "user_id", "metadata10": "metadata_10"} -def __get_meta_constraint(project_id, data): +async def __get_meta_constraint(project_id, data): if len(data.get("filters", [])) == 0: return [] constraints = [] - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} for i, f in enumerate(data.get("filters", [])): @@ -2872,13 +2872,13 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n return helper.dict_to_camel_case(row) -def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1), +async def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), value=None, density=20, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} - pg_sub_query = __get_constraints(project_id=project_id, data=args) - pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + pg_sub_query = await __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = await __get_constraints(project_id=project_id, time_constraint=False, project=False, chart=True, data=args, main_table="pages", time_column="timestamp", duration=False) diff --git a/api/chalicelib/core/mobile.py b/api/chalicelib/core/mobile.py index 100e5afba..3b12b1625 100644 --- a/api/chalicelib/core/mobile.py +++ b/api/chalicelib/core/mobile.py @@ -7,7 +7,7 @@ async def sign_keys(project_id, session_id, keys): result = [] project_key = await projects.get_project_key(project_id) for k in keys: - result.append(StorageClient.get_presigned_url_for_sharing(bucket=config("iosBucket"), + result.append(await StorageClient.get_presigned_url_for_sharing(bucket=config("iosBucket"), key=f"{project_key}/{session_id}/{k}", expires_in=60 * 60)) return result diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index 3ff6ae3f4..a45156cb4 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -138,7 +138,7 @@ async def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_aut if __exists_by_name(name=data.name, exclude_id=None): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") if not skip_authorization: - admin = users.get(user_id=user_id, tenant_id=tenant_id) + admin = await users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} return {"data": __create(tenant_id=tenant_id, data=data.model_dump())} @@ -147,7 +147,7 @@ async def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_aut async def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): if __exists_by_name(name=data.name, exclude_id=project_id): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") - admin = users.get(user_id=user_id, tenant_id=tenant_id) + admin = await users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} return {"data": __update(tenant_id=tenant_id, project_id=project_id, @@ -155,7 +155,7 @@ async def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema async def delete(tenant_id, user_id, project_id): - admin = users.get(user_id=user_id, tenant_id=tenant_id) + admin = await users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} @@ -396,7 +396,7 @@ def update_project_conditions(project_id, conditions): if conditions is None: return - existing = get_conditions(project_id)["conditions"] + existing = await get_conditions(project_id)["conditions"] existing_ids = {c.condition_id for c in existing} to_be_updated = [c for c in conditions if c.condition_id in existing_ids] @@ -404,16 +404,16 @@ def update_project_conditions(project_id, conditions): to_be_deleted = existing_ids - {c.condition_id for c in conditions} if to_be_deleted: - delete_project_condition(project_id, to_be_deleted) + await delete_project_condition(project_id, to_be_deleted) if to_be_created: - create_project_conditions(project_id, to_be_created) + await create_project_conditions(project_id, to_be_created) if to_be_updated: print(to_be_updated) - update_project_condition(project_id, to_be_updated) + await update_project_condition(project_id, to_be_updated) - return get_conditions(project_id) + return await get_conditions(project_id) async def get_projects_ids(tenant_id): diff --git a/api/chalicelib/core/reset_password.py b/api/chalicelib/core/reset_password.py index 9470dcaa2..a6eeb6f6b 100644 --- a/api/chalicelib/core/reset_password.py +++ b/api/chalicelib/core/reset_password.py @@ -12,8 +12,8 @@ async def reset(data: schemas.ForgetPasswordPayloadSchema): return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} a_users = await users.get_by_email_only(data.email) if a_users: - invitation_link = users.generate_new_invitation(user_id=a_users["userId"]) - email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) + invitation_link = await users.generate_new_invitation(user_id=a_users["userId"]) + await email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) else: print(f"!!!invalid email address [{data.email}]") return {"data": {"state": "A reset link will be sent if this email exists in our system."}} diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 8f078aeea..561957819 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -83,7 +83,7 @@ async def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, else: sort = 'start_ts' - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions @@ -117,7 +117,7 @@ async def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions @@ -556,7 +556,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, elif filter_type == events.EventType.METADATA.ui_type: # get metadata list only if you need it if meta_keys is None: - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} if f.source in meta_keys.keys(): if is_any: @@ -1097,16 +1097,16 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, async def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): if project_id is None: - all_projects = projects.get_projects(tenant_id=tenant_id) + all_projects = await projects.get_projects(tenant_id=tenant_id) else: all_projects = [ - projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, + await projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, include_gdpr=False)] all_projects = {int(p["projectId"]): p["name"] for p in all_projects} project_ids = list(all_projects.keys()) - available_keys = metadata.get_keys_by_projects(project_ids) + available_keys = await metadata.get_keys_by_projects(project_ids) for i in available_keys: available_keys[i]["user_id"] = schemas.FilterType.user_id available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id @@ -1274,7 +1274,7 @@ async def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: st if session_ids is None or len(session_ids) == 0: return {"total": 0, "sessions": []} async with pg_client.cursor() as cur: - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) params = {"project_id": project_id, "session_ids": tuple(session_ids)} order_direction = 'ASC' if ascending else 'DESC' main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_BASE_COLS} diff --git a/api/chalicelib/core/sessions_assignments.py b/api/chalicelib/core/sessions_assignments.py index 74bef3c74..bf698eec9 100644 --- a/api/chalicelib/core/sessions_assignments.py +++ b/api/chalicelib/core/sessions_assignments.py @@ -25,7 +25,7 @@ async def __get_saved_data(project_id, session_id, issue_id, tool): async def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type, integration_project_id): - error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id) + error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id) if error is not None: return error @@ -36,7 +36,7 @@ async def create_new_assignment(tenant_id, project_id, session_id, creator_id, a link = config("SITE_URL") + f"/{project_id}/session/{session_id}" description += f"\n> {link}" try: - issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description, + issue = await integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description, issue_type=issue_type, integration_project_id=integration_project_id) except integration_base_issue.RequestException as e: @@ -59,7 +59,7 @@ async def create_new_assignment(tenant_id, project_id, session_id, creator_id, a async def get_all(project_id, user_id): - available_integrations = integrations_manager.get_available_integrations(user_id=user_id) + available_integrations = await integrations_manager.get_available_integrations(user_id=user_id) no_integration = not any(available_integrations.values()) if no_integration: return [] @@ -86,7 +86,7 @@ async def get_all(project_id, user_id): async def get_by_session(tenant_id, user_id, project_id, session_id): - available_integrations = integrations_manager.get_available_integrations(user_id=user_id) + available_integrations = await integrations_manager.get_available_integrations(user_id=user_id) if not any(available_integrations.values()): return [] extra_query = ["session_id = %(session_id)s", "provider IN %(providers)s"] @@ -110,49 +110,49 @@ async def get_by_session(tenant_id, user_id, project_id, session_id): "id": i["issue_id"]}) results = [] for tool in issues.keys(): - error, integration = integrations_manager.get_integration(tool=tool, tenant_id=tenant_id, user_id=user_id) + error, integration = await integrations_manager.get_integration(tool=tool, tenant_id=tenant_id, user_id=user_id) if error is not None: return error - i = integration.get() + i = await integration.get() if i is None: print("integration not found") continue - r = integration.issue_handler.get_by_ids(saved_issues=issues[tool]) + r = await integration.issue_handler.get_by_ids(saved_issues=issues[tool]) for i in r["issues"]: i["provider"] = tool results += r["issues"] return results -def get(tenant_id, user_id, project_id, session_id, assignment_id): - error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) +async def get(tenant_id, user_id, project_id, session_id, assignment_id): + error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) if error is not None: return error - l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) + l = await __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) if l is None: return {"errors": ["issue not found"]} - i = integration.get() + i = await integration.get() if i is None: return {"errors": ["integration not found"]} - r = integration.issue_handler.get(integration_project_id=l["providerData"]["integrationProjectId"], + r = await integration.issue_handler.get(integration_project_id=l["providerData"]["integrationProjectId"], assignment_id=assignment_id) r["provider"] = integration.provider.lower() return r -def comment(tenant_id, user_id, project_id, session_id, assignment_id, message): - error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) +async def comment(tenant_id, user_id, project_id, session_id, assignment_id, message): + error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) if error is not None: return error - i = integration.get() + i = await integration.get() if i is None: return {"errors": [f"integration not found"]} - l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) + l = await __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) - return integration.issue_handler.comment(integration_project_id=l["providerData"]["integrationProjectId"], + return await integration.issue_handler.comment(integration_project_id=l["providerData"]["integrationProjectId"], assignment_id=assignment_id, comment=message) diff --git a/api/chalicelib/core/sessions_devtool.py b/api/chalicelib/core/sessions_devtool.py index bb6f70c88..fc5d58365 100644 --- a/api/chalicelib/core/sessions_devtool.py +++ b/api/chalicelib/core/sessions_devtool.py @@ -13,12 +13,12 @@ def __get_devtools_keys(project_id, session_id): ] -def get_urls(session_id, project_id, check_existence: bool = True): +async def get_urls(session_id, project_id, check_existence: bool = True): results = [] for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): + if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k): continue - results.append(StorageClient.get_presigned_url_for_sharing( + results.append(await StorageClient.get_presigned_url_for_sharing( bucket=config("sessions_bucket"), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), key=k @@ -29,4 +29,4 @@ def get_urls(session_id, project_id, check_existence: bool = True): def delete_mobs(project_id, session_ids): for session_id in session_ids: for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) + await StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index 40c621a91..056da3f77 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -66,11 +66,11 @@ SUPPORTED_TYPES = { } -def search(text: str, meta_type: schemas.FilterType, project_id: int): +async def search(text: str, meta_type: schemas.FilterType, project_id: int): rows = [] if meta_type not in list(SUPPORTED_TYPES.keys()): return {"errors": ["unsupported type"]} - rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) + rows += await SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) # for IOS events autocomplete # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 4fe5f6a3d..d957cb5fb 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -28,12 +28,12 @@ def __get_mob_keys_deprecated(session_id): return [str(session_id), str(session_id) + "e"] -def get_urls(project_id, session_id, check_existence: bool = True): +async def get_urls(project_id, session_id, check_existence: bool = True): results = [] for k in __get_mob_keys(project_id=project_id, session_id=session_id): - if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): + if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k): continue - results.append(StorageClient.get_presigned_url_for_sharing( + results.append(await StorageClient.get_presigned_url_for_sharing( bucket=config("sessions_bucket"), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), key=k @@ -41,12 +41,12 @@ def get_urls(project_id, session_id, check_existence: bool = True): return results -def get_urls_depercated(session_id, check_existence: bool = True): +async def get_urls_depercated(session_id, check_existence: bool = True): results = [] for k in __get_mob_keys_deprecated(session_id=session_id): - if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): + if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k): continue - results.append(StorageClient.get_presigned_url_for_sharing( + results.append(await StorageClient.get_presigned_url_for_sharing( bucket=config("sessions_bucket"), expires_in=100000, key=k @@ -54,12 +54,12 @@ def get_urls_depercated(session_id, check_existence: bool = True): return results -def get_ios_videos(session_id, project_id, check_existence=False): +async def get_ios_videos(session_id, project_id, check_existence=False): results = [] for k in __get_ios_video_keys(project_id=project_id, session_id=session_id): - if check_existence and not StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k): + if check_existence and not await StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k): continue - results.append(StorageClient.get_presigned_url_for_sharing( + results.append(await StorageClient.get_presigned_url_for_sharing( bucket=config("IOS_VIDEO_BUCKET"), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), key=k @@ -67,8 +67,8 @@ def get_ios_videos(session_id, project_id, check_existence=False): return results -def delete_mobs(project_id, session_ids): +async def delete_mobs(project_id, session_ids): for session_id in session_ids: for k in __get_mob_keys(project_id=project_id, session_id=session_id) \ + __get_mob_keys_deprecated(session_id=session_id): - StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) + await StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 58401a5a8..b27503fb7 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -140,7 +140,7 @@ async def delete(tenant_id, user_id, project_id, note_id): async def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id): - note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) + note = await get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) if note is None: return {"errors": ["Note not found"]} session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}") @@ -172,7 +172,7 @@ async def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id): async def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id): - note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) + note = await get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) if note is None: return {"errors": ["Note not found"]} session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}") @@ -205,7 +205,7 @@ async def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id): "text": bottom, "size": "Small", "fontType": "Monospace"}) - return MSTeams.send_raw( + return await MSTeams.send_raw( tenant_id=tenant_id, webhook_id=webhook_id, body={"type": "message", diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py index 99dacf99c..0f0d4cac9 100644 --- a/api/chalicelib/core/sessions_replay.py +++ b/api/chalicelib/core/sessions_replay.py @@ -5,10 +5,10 @@ from chalicelib.utils import errors_helper from chalicelib.utils import pg_client, helper -def __group_metadata(session, project_metadata): +async def __group_metadata(session, project_metadata): meta = {} for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: + if project_metadata[m] is not None and await session.get(m) is not None: meta[project_metadata[m]] = session[m] session.pop(m) return meta @@ -48,43 +48,43 @@ async def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, data = helper.dict_to_camel_case(data) if full_data: if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + data['events'] = await events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) for e in data['events']: if e["type"].endswith("_IOS"): e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, + data['crashes'] = await events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = await events_ios.get_customs_by_session_id(project_id=project_id, session_id=session_id) data['mobsUrl'] = [] else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + data['events'] = await events.get_by_session_id(project_id=project_id, session_id=session_id, group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + all_errors = await events.get_errors_by_session_id(session_id=session_id, project_id=project_id) data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] # to keep only the first stack # limit the number of errors to reduce the response-body size data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + data['userEvents'] = await events.get_customs_by_session_id(project_id=project_id, session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, + data['domURL'] = await sessions_mobs.get_urls(session_id=session_id, project_id=project_id, check_existence=False) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + data['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) + data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id, check_existence=False) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + data['resources'] = await resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + data['notes'] = await sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + data['metadata'] = await __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = await issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and await assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) data["inDB"] = True return data elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + return await assist.get_live_session_by_id(project_id=project_id, session_id=session_id) else: return None @@ -123,29 +123,29 @@ async def get_replay(project_id, session_id, context: schemas.CurrentContext, fu if full_data: if data["platform"] == 'ios': data['mobsUrl'] = [] - data['videoURL'] = sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id, + data['videoURL'] = await sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id, check_existence=False) else: - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + data['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) + data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id, check_existence=False) - data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) - if user_testing.has_test_signals(session_id=session_id, project_id=project_id): - data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id, + data['canvasURL'] = await canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) + if await user_testing.has_test_signals(session_id=session_id, project_id=project_id): + data['utxVideo'] = user_await testing.get_ux_webcam_signed_url(session_id=session_id, project_id=project_id, check_existence=False) else: data['utxVideo'] = [] - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, + data['domURL'] = await sessions_mobs.get_urls(session_id=session_id, project_id=project_id, check_existence=False) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + data['metadata'] = await __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and await assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) data["inDB"] = True return data elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + return await assist.get_live_session_by_id(project_id=project_id, session_id=session_id) else: return None @@ -166,30 +166,30 @@ async def get_events(project_id, session_id): s_data = helper.dict_to_camel_case(s_data) data = {} if s_data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + data['events'] = await events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) for e in data['events']: if e["type"].endswith("_IOS"): e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, + data['crashes'] = await events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = await events_ios.get_customs_by_session_id(project_id=project_id, session_id=session_id) data['userTesting'] = [] else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + data['events'] = await events.get_by_session_id(project_id=project_id, session_id=session_id, group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + all_errors = await events.get_errors_by_session_id(session_id=session_id, project_id=project_id) data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] # to keep only the first stack # limit the number of errors to reduce the response-body size data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + data['userEvents'] = await events.get_customs_by_session_id(project_id=project_id, session_id=session_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + data['resources'] = await resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=s_data["startTs"], duration=s_data["duration"]) - data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id) + data['userTesting'] = await user_testing.get_test_signals(session_id=session_id, project_id=project_id) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['issues'] = await issues.get_by_session_id(session_id=session_id, project_id=project_id) data['issues'] = reduce_issues(data['issues']) return data else: diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 6717fac84..1c4bec8c6 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -96,7 +96,7 @@ async def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, projec sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k)) elif filter_type == events.EventType.METADATA.ui_type: if meta_keys is None: - meta_keys = metadata.get(project_id=project_id) + meta_keys = await metadata.get(project_id=project_id) meta_keys = {m["key"]: m["index"] for m in meta_keys} # op = sessions.__get_sql_operator(f["operator"]) if f.source in meta_keys.keys(): @@ -550,7 +550,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): +async def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): output = [] stages = filter_d.events # TODO: handle 1 stage alone @@ -577,13 +577,13 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): # counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d), # project_id=project_id, user_id=None, count_only=True) # last change - counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), + counts = await sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), project_id=project_id, user_id=None, count_only=True) output[0]["sessionsCount"] = counts["countSessions"] output[0]["usersCount"] = counts["countUsers"] return output, 0 # The result of the multi-stage query - rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) + rows = await get_stages_and_events(filter_d=filter_d, project_id=project_id) if len(rows) == 0: return get_stages(stages, []), 0 # Obtain the first part of the output diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index f6e6a32ba..e459ea9d8 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -24,9 +24,9 @@ async def create_tenant(data: schemas.UserSignupSchema): if email is None or len(email) < 5: errors.append("Invalid email address.") else: - if users.email_exists(email): + if await users.email_exists(email): errors.append("Email address already in use.") - if users.get_deleted_user_by_email(email) is not None: + if await users.get_deleted_user_by_email(email) is not None: errors.append("Email address previously deleted.") if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): @@ -75,8 +75,8 @@ async def create_tenant(data: schemas.UserSignupSchema): with pg_client.cursor() as cur: await cur.execute(cur.mogrify(query, params)) - telemetry.new_client() - r = users.authenticate(email, password) + await telemetry.new_client() + r = await users.authenticate(email, password) r["smtp"] = smtp.has_smtp() return { diff --git a/api/chalicelib/core/socket_ios.py b/api/chalicelib/core/socket_ios.py index a452dd893..fb04efed1 100644 --- a/api/chalicelib/core/socket_ios.py +++ b/api/chalicelib/core/socket_ios.py @@ -2,11 +2,11 @@ from decouple import config from chalicelib.core import projects -def start_replay(project_id, session_id, device, os_version, mob_url): +async def start_replay(project_id, session_id, device, os_version, mob_url): async with httpx.AsyncClient() as client: r = await client.post(config("IOS_MIDDLEWARE") + "/replay", json={ "projectId": project_id, - "projectKey": projects.get_project_key(project_id), + "projectKey": await projects.get_project_key(project_id), "session_id": session_id, "device": device, "osVersion": os_version, diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index f0d521a92..255fa9704 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -10,7 +10,7 @@ from chalicelib.utils.storage import StorageClient, generators def presign_share_urls(project_id, urls): results = [] for u in urls: - results.append(StorageClient.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120, + results.append(await StorageClient.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120, key=generators.generate_file_key_from_url(project_id, u), check_exists=True)) return results @@ -19,7 +19,7 @@ def presign_share_urls(project_id, urls): def presign_upload_urls(project_id, urls): results = [] for u in urls: - results.append(StorageClient.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'), + results.append(await StorageClient.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'), expires_in=1800, key=generators.generate_file_key_from_url(project_id, u))) return results @@ -78,7 +78,7 @@ async def url_exists(url): return False -def get_traces_group(project_id, payload): +async def get_traces_group(project_id, payload): frames = format_payload(payload) results = [{}] * len(frames) @@ -96,12 +96,12 @@ def get_traces_group(project_id, payload): payloads[key] = None if key not in payloads: - file_exists_in_bucket = len(file_url) > 0 and StorageClient.exists(config('sourcemaps_bucket'), key) + file_exists_in_bucket = len(file_url) > 0 and await StorageClient.exists(config('sourcemaps_bucket'), key) if len(file_url) > 0 and not file_exists_in_bucket: print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 looking in server") if not file_url.endswith(".map"): file_url += '.map' - file_exists_in_server = url_exists(file_url) + file_exists_in_server = await StorageClienturl_exists(file_url) file_exists_in_bucket = file_exists_in_server all_exists = all_exists and file_exists_in_bucket if not file_exists_in_bucket and not file_exists_in_server: @@ -119,7 +119,7 @@ def get_traces_group(project_id, payload): for key in payloads.keys(): if payloads[key] is None: continue - key_results = sourcemaps_parser.get_original_trace( + key_results = await sourcemaps_parser.get_original_trace( key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key, positions=[o["position"] for o in payloads[key]], is_url=payloads[key][0]["isURL"]) @@ -144,7 +144,7 @@ def get_js_cache_path(fullURL): MAX_COLUMN_OFFSET = 60 -def fetch_missed_contexts(frames): +async def fetch_missed_contexts(frames): source_cache = {} for i in range(len(frames)): if frames[i] and frames[i].get("context") and len(frames[i]["context"]) > 0: @@ -154,7 +154,7 @@ def fetch_missed_contexts(frames): file = source_cache[file_abs_path] else: file_path = get_js_cache_path(file_abs_path) - file = StorageClient.get_file(config('js_cache_bucket'), file_path) + file = await StorageClient.get_file(config('js_cache_bucket'), file_path) if file is None: print(f"Missing abs_path: {file_abs_path}, file {file_path} not found in {config('js_cache_bucket')}") source_cache[file_abs_path] = file diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py index 10316570d..57d452fff 100644 --- a/api/chalicelib/core/sourcemaps_parser.py +++ b/api/chalicelib/core/sourcemaps_parser.py @@ -10,7 +10,7 @@ if '%s' in SMR_URL: SMR_URL = SMR_URL % "smr" -def get_original_trace(key, positions, is_url=False): +async def get_original_trace(key, positions, is_url=False): payload = { "key": key, "positions": positions, diff --git a/api/chalicelib/utils/email_helper.py b/api/chalicelib/utils/email_helper.py index 2c5eb02e2..201556b0d 100644 --- a/api/chalicelib/utils/email_helper.py +++ b/api/chalicelib/utils/email_helper.py @@ -27,9 +27,9 @@ def send_assign_session(recipient, message, link): send_html(BODY_HTML, SUBJECT, recipient) -def alert_email(recipients, subject, data): +async def alert_email(recipients, subject, data): BODY_HTML = __get_html_from_file("chalicelib/utils/html/alert_notification.html", formatting_variables=data) - send_html(BODY_HTML=BODY_HTML, SUBJECT=subject, recipient=recipients) + await send_html(BODY_HTML=BODY_HTML, SUBJECT=subject, recipient=recipients) def __get_color(idx): diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py index 7943844ed..b978e678b 100644 --- a/api/chalicelib/utils/jira_client.py +++ b/api/chalicelib/utils/jira_client.py @@ -1,5 +1,6 @@ import logging import time +import httpx from datetime import datetime from fastapi import HTTPException, status diff --git a/api/chalicelib/utils/storage/s3.py b/api/chalicelib/utils/storage/s3.py index 1e3cffdba..288bc7ba8 100644 --- a/api/chalicelib/utils/storage/s3.py +++ b/api/chalicelib/utils/storage/s3.py @@ -36,8 +36,8 @@ class AmazonS3Storage(ObjectStorage): raise return True - def get_presigned_url_for_sharing(self, bucket, expires_in, key, check_exists=False): - if check_exists and not self.exists(bucket, key): + async def get_presigned_url_for_sharing(self, bucket, expires_in, key, check_exists=False): + if check_exists and not await self.exists(bucket, key): return None return self.client.generate_presigned_url( @@ -79,7 +79,7 @@ class AmazonS3Storage(ObjectStorage): f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields']) return req.url - def get_file(self, source_bucket, source_key): + async def get_file(self, source_bucket, source_key): try: result = self.client.get_object( Bucket=source_bucket, diff --git a/api/routers/core.py b/api/routers/core.py index dd850768b..5e8bc3519 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -53,7 +53,7 @@ async def events_search(projectId: int, q: str, @app.get('/{projectId}/integrations', tags=["integrations"]) -def get_integrations_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_integrations_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = integrations_global.get_global_integrations_status(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId) @@ -61,7 +61,7 @@ def get_integrations_status(projectId: int, context: schemas.CurrentContext = De @app.post('/{projectId}/integrations/{integration}/notify/{webhookId}/{source}/{sourceId}', tags=["integrations"]) -def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str, +async def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str, data: schemas.IntegrationNotificationSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): comment = None @@ -86,144 +86,144 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source: @app.get('/integrations/sentry', tags=["integrations"]) -def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/sentry', tags=["integrations"]) -def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.get(project_id=projectId)} @app.post('/{projectId}/integrations/sentry', tags=["integrations"]) -def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...), +async def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sentry', tags=["integrations"]) -def delete_sentry(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_sentry(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"]) -def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)): +async def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)} @app.get('/integrations/datadog', tags=["integrations"]) -def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/datadog', tags=["integrations"]) -def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.get(project_id=projectId)} @app.post('/{projectId}/integrations/datadog', tags=["integrations"]) -def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...), +async def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/datadog', tags=["integrations"]) -def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/stackdriver', tags=["integrations"]) -def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/stackdriver', tags=["integrations"]) -def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.get(project_id=projectId)} @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) -def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...), +async def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"]) -def delete_stackdriver(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_stackdriver(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/newrelic', tags=["integrations"]) -def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/newrelic', tags=["integrations"]) -def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.get(project_id=projectId)} @app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) -def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...), +async def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/newrelic', tags=["integrations"]) -def delete_newrelic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_newrelic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/rollbar', tags=["integrations"]) -def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/rollbar', tags=["integrations"]) -def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.get(project_id=projectId)} @app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) -def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...), +async def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/rollbar', tags=["integrations"]) -def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) -def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...), +async def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorization_token)} @app.get('/integrations/bugsnag', tags=["integrations"]) -def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/bugsnag', tags=["integrations"]) -def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.get(project_id=projectId)} @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) -def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...), +async def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"]) -def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) -def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...), +async def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, aws_secret_access_key=data.awsSecretAccessKey, @@ -231,77 +231,77 @@ def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body @app.get('/integrations/cloudwatch', tags=["integrations"]) -def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/cloudwatch', tags=["integrations"]) -def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.get(project_id=projectId)} @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) -def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...), +async def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"]) -def delete_cloudwatch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_cloudwatch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/elasticsearch', tags=["integrations"]) -def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_elasticsearch.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/elasticsearch', tags=["integrations"]) -def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_elasticsearch.get(project_id=projectId)} @app.post('/integrations/elasticsearch/test', tags=["integrations"]) -def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...), +async def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, data=data)} @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) -def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...), +async def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"]) -def delete_elasticsearch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_elasticsearch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/sumologic', tags=["integrations"]) -def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/sumologic', tags=["integrations"]) -def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.get(project_id=projectId)} @app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) -def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...), +async def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sumologic', tags=["integrations"]) -def delete_sumologic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_sumologic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/issues', tags=["integrations"]) -def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)): +async def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) if error is not None and integration is None: @@ -310,7 +310,7 @@ def get_integration_status(context: schemas.CurrentContext = Depends(OR_context) @app.get('/integrations/jira', tags=["integrations"]) -def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_context)): +async def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id, tool=integration_jira_cloud.PROVIDER) @@ -320,7 +320,7 @@ def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_con @app.get('/integrations/github', tags=["integrations"]) -def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_context)): +async def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id, tool=integration_github.PROVIDER) @@ -330,7 +330,7 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c @app.post('/integrations/jira', tags=["integrations"]) -def add_edit_jira_cloud(data: schemas.IssueTrackingJiraSchema = Body(...), +async def add_edit_jira_cloud(data: schemas.IssueTrackingJiraSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if not str(data.url).rstrip('/').endswith('atlassian.net'): return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]} @@ -343,7 +343,7 @@ def add_edit_jira_cloud(data: schemas.IssueTrackingJiraSchema = Body(...), @app.post('/integrations/github', tags=["integrations"]) -def add_edit_github(data: schemas.IssueTrackingGithubSchema = Body(...), +async def add_edit_github(data: schemas.IssueTrackingGithubSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, tenant_id=context.tenant_id, @@ -354,7 +354,7 @@ def add_edit_github(data: schemas.IssueTrackingGithubSchema = Body(...), @app.delete('/integrations/issues', tags=["integrations"]) -def delete_default_issue_tracking_tool(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_default_issue_tracking_tool(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) if error is not None and integration is None: @@ -363,7 +363,7 @@ def delete_default_issue_tracking_tool(_=Body(None), context: schemas.CurrentCon @app.delete('/integrations/jira', tags=["integrations"]) -def delete_jira_cloud(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_jira_cloud(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, tenant_id=context.tenant_id, user_id=context.user_id, @@ -374,7 +374,7 @@ def delete_jira_cloud(_=Body(None), context: schemas.CurrentContext = Depends(OR @app.delete('/integrations/github', tags=["integrations"]) -def delete_github(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_github(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, tenant_id=context.tenant_id, user_id=context.user_id, @@ -385,7 +385,7 @@ def delete_github(_=Body(None), context: schemas.CurrentContext = Depends(OR_con @app.get('/integrations/issues/list_projects', tags=["integrations"]) -def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) if error is not None: @@ -397,7 +397,7 @@ def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR @app.get('/integrations/issues/{integrationProjectId}', tags=["integrations"]) -def get_integration_metadata(integrationProjectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_integration_metadata(integrationProjectId: int, context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) if error is not None: @@ -409,7 +409,7 @@ def get_integration_metadata(integrationProjectId: int, context: schemas.Current @app.get('/{projectId}/assignments', tags=["assignment"]) -def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get_all(project_id=projectId, user_id=context.user_id) return { 'data': data @@ -417,7 +417,7 @@ def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depend @app.post('/{projectId}/sessions/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) -def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId, +async def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId, data: schemas.AssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.create_new_assignment(tenant_id=context.tenant_id, project_id=projectId, @@ -434,12 +434,12 @@ def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId @app.get('/{projectId}/gdpr', tags=["projects", "gdpr"]) -def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_gdpr(project_id=projectId)} @app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) -def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), +async def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): result = projects.edit_gdpr(project_id=projectId, gdpr=data) if "errors" in result: @@ -448,14 +448,14 @@ def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), @public_app.post('/password/reset-link', tags=["reset password"]) -def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)): +async def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)): if len(data.email) < 5: return {"errors": ["please provide a valid email address"]} return reset_password.reset(data=data) @app.get('/{projectId}/metadata', tags=["metadata"]) -def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": metadata.get(project_id=projectId)} @@ -466,26 +466,26 @@ def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_co @app.post('/{projectId}/metadata', tags=["metadata"]) -def add_metadata(projectId: int, data: schemas.MetadataSchema = Body(...), +async def add_metadata(projectId: int, data: schemas.MetadataSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) @app.post('/{projectId}/metadata/{index}', tags=["metadata"]) -def edit_metadata(projectId: int, index: int, data: schemas.MetadataSchema = Body(...), +async def edit_metadata(projectId: int, index: int, data: schemas.MetadataSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index, new_name=data.key) @app.delete('/{projectId}/metadata/{index}', tags=["metadata"]) -def delete_metadata(projectId: int, index: int, _=Body(None), +async def delete_metadata(projectId: int, index: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return metadata.delete(tenant_id=context.tenant_id, project_id=projectId, index=index) @app.get('/{projectId}/metadata/search', tags=["metadata"]) -def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentContext = Depends(OR_context)): +async def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentContext = Depends(OR_context)): if len(q) == 0 and len(key) == 0: return {"data": []} if len(q) == 0: @@ -496,136 +496,136 @@ def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentCo @app.get('/{projectId}/integration/sources', tags=["integrations"]) -def search_integrations(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def search_integrations(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return log_tools.search(project_id=projectId) @app.get('/{projectId}/sample_rate', tags=["projects"]) -def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_capture_status(project_id=projectId)} @app.post('/{projectId}/sample_rate', tags=["projects"]) -def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), +async def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.update_capture_status(project_id=projectId, changes=data)} @app.post('/{projectId}/conditions', tags=["projects"]) -def update_conditions(projectId: int, data: schemas.ProjectSettings = Body(...), +async def update_conditions(projectId: int, data: schemas.ProjectSettings = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.update_conditions(project_id=projectId, changes=data)} @app.get('/{projectId}/conditions', tags=["projects"]) -def get_conditions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_conditions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_conditions(project_id=projectId)} @app.get('/announcements', tags=["announcements"]) -def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): return {"data": announcements.get_all(user_id=context.user_id)} @app.get('/announcements/view', tags=["announcements"]) -def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): return {"data": announcements.view(user_id=context.user_id)} @app.get('/show_banner', tags=["banner"]) -def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): +async def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): return {"data": False} @app.post('/{projectId}/alerts', tags=["alerts"]) -def create_alert(projectId: int, data: schemas.AlertSchema = Body(...), +async def create_alert(projectId: int, data: schemas.AlertSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return alerts.create(project_id=projectId, data=data) @app.get('/{projectId}/alerts', tags=["alerts"]) -def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": alerts.get_all(project_id=projectId)} @app.get('/{projectId}/alerts/triggers', tags=["alerts", "customMetrics"]) -def get_alerts_triggers(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_alerts_triggers(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": alerts.get_predefined_values() \ + custom_metrics.get_series_for_alert(project_id=projectId, user_id=context.user_id)} @app.get('/{projectId}/alerts/{alertId}', tags=["alerts"]) -def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": alerts.get(id=alertId)} @app.post('/{projectId}/alerts/{alertId}', tags=["alerts"]) -def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...), +async def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return alerts.update(id=alertId, data=data) @app.delete('/{projectId}/alerts/{alertId}', tags=["alerts"]) -def delete_alert(projectId: int, alertId: int, _=Body(None), +async def delete_alert(projectId: int, alertId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return alerts.delete(project_id=projectId, alert_id=alertId) @app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"]) @app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"]) -def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), +async def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": sourcemaps.presign_upload_urls(project_id=context.project.project_id, urls=data.urls)} @app.get('/config/weekly_report', tags=["weekly report config"]) -def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_context)): +async def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_context)): return {"data": weekly_report.get_config(user_id=context.user_id)} @app.post('/config/weekly_report', tags=["weekly report config"]) -def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...), +async def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)} @app.get('/{projectId}/issue_types', tags=["issues"]) -def issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": issues.get_all_types()} @app.get('/issue_types', tags=["issues"]) -def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): +async def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): return {"data": issues.get_all_types()} @app.get('/{projectId}/assist/sessions', tags=["assist"]) -def get_sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): +async def get_sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId) return {'data': data} @app.post('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...), +async def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_sessions_ws(projectId, body=data) return {'data': data} @app.post('/{projectId}/mobile/{sessionId}/urls', tags=['mobile']) -def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...), +async def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)} @app.post('/projects', tags=['projects'], dependencies=[OR_role("owner", "admin")]) -def create_project(data: schemas.CreateProjectSchema = Body(...), +async def create_project(data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) @app.get('/projects/{projectId}', tags=['projects']) -def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True, include_gdpr=True) if data is None: @@ -634,46 +634,46 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.put('/projects/{projectId}', tags=['projects'], dependencies=[OR_role("owner", "admin")]) -def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...), +async def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId) @app.delete('/projects/{projectId}', tags=['projects'], dependencies=[OR_role("owner", "admin")]) -def delete_project(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_project(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return projects.delete(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId) @app.get('/client/new_api_key', tags=['client']) -def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_context)): +async def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_context)): return { 'data': tenants.generate_new_api_key(context.tenant_id) } @app.post('/users/modules', tags=['users']) -def update_user_module(context: schemas.CurrentContext = Depends(OR_context), +async def update_user_module(context: schemas.CurrentContext = Depends(OR_context), data: schemas.ModuleStatus = Body(...)): return {"data": users.update_user_module(context.user_id, data)} @app.get('/notifications', tags=['notifications']) -def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): +async def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} @app.get('/notifications/count', tags=['notifications']) -def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)): +async def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)} @app.get('/notifications/{notificationId}/view', tags=['notifications']) -def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)} @app.post('/notifications/view', tags=['notifications']) -def batch_view_notifications(data: schemas.NotificationsViewSchema, +async def batch_view_notifications(data: schemas.NotificationsViewSchema, context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=data.ids, startTimestamp=data.startTimestamp, @@ -683,85 +683,85 @@ def batch_view_notifications(data: schemas.NotificationsViewSchema, @app.get('/boarding', tags=['boarding']) -def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)): +async def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)): if config("LOCAL_DEV", cast=bool, default=False): return {"data": ""} return {"data": boarding.get_state(tenant_id=context.tenant_id)} @app.get('/boarding/installing', tags=['boarding']) -def get_boarding_state_installing(context: schemas.CurrentContext = Depends(OR_context)): +async def get_boarding_state_installing(context: schemas.CurrentContext = Depends(OR_context)): return {"data": boarding.get_state_installing(tenant_id=context.tenant_id)} @app.get('/boarding/identify-users', tags=["boarding"]) -def get_boarding_state_identify_users(context: schemas.CurrentContext = Depends(OR_context)): +async def get_boarding_state_identify_users(context: schemas.CurrentContext = Depends(OR_context)): return {"data": boarding.get_state_identify_users(tenant_id=context.tenant_id)} @app.get('/boarding/manage-users', tags=["boarding"]) -def get_boarding_state_manage_users(context: schemas.CurrentContext = Depends(OR_context)): +async def get_boarding_state_manage_users(context: schemas.CurrentContext = Depends(OR_context)): return {"data": boarding.get_state_manage_users(tenant_id=context.tenant_id)} @app.get('/boarding/integrations', tags=["boarding"]) -def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR_context)): +async def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR_context)): return {"data": boarding.get_state_integrations(tenant_id=context.tenant_id)} @app.get('/integrations/slack/channels', tags=["integrations"]) -def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)): +async def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.slack)} @app.get('/integrations/slack/{integrationId}', tags=["integrations"]) -def get_slack_webhook(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_slack_webhook(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId)} @app.delete('/integrations/slack/{integrationId}', tags=["integrations"]) -def delete_slack_integration(integrationId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_slack_integration(integrationId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return webhook.delete(tenant_id=context.tenant_id, webhook_id=integrationId) @app.put('/webhooks', tags=["webhooks"]) -def add_edit_webhook(data: schemas.WebhookSchema = Body(...), +async def add_edit_webhook(data: schemas.WebhookSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data, replace_none=True)} @app.get('/webhooks', tags=["webhooks"]) -def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)): +async def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.get_by_tenant(tenant_id=context.tenant_id, replace_none=True)} @app.delete('/webhooks/{webhookId}', tags=["webhooks"]) -def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId) @app.get('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")]) -def get_members(context: schemas.CurrentContext = Depends(OR_context)): +async def get_members(context: schemas.CurrentContext = Depends(OR_context)): return {"data": users.get_members(tenant_id=context.tenant_id)} @app.get('/client/members/{memberId}/reset', tags=["client"], dependencies=[OR_role("owner", "admin")]) -def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)): return users.reset_member(tenant_id=context.tenant_id, editor_id=context.user_id, user_id_to_update=memberId) @app.delete('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")]) -def delete_member(memberId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_member(memberId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return users.delete_member(tenant_id=context.tenant_id, user_id=context.user_id, id_to_delete=memberId) @app.get('/account/new_api_key', tags=["account"], dependencies=[OR_role("owner", "admin")]) -def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)): +async def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)): return {"data": users.generate_new_api_key(user_id=context.user_id)} @app.post('/account/password', tags=["account"]) -def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), +async def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.change_password(email=context.email, old_password=data.old_password.get_secret_value(), new_password=data.new_password.get_secret_value(), tenant_id=context.tenant_id, @@ -769,35 +769,35 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), @app.post('/{projectId}/saved_search', tags=["savedSearch"]) -def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...), +async def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return saved_search.create(project_id=projectId, user_id=context.user_id, data=data) @app.get('/{projectId}/saved_search', tags=["savedSearch"]) -def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.get_all(project_id=projectId, user_id=context.user_id, details=True)} @app.get('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) -def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.get(project_id=projectId, search_id=search_id, user_id=context.user_id)} @app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) -def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...), +async def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)} @app.delete('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) -def delete_saved_search(projectId: int, search_id: int, _=Body(None), +async def delete_saved_search(projectId: int, search_id: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)} @app.get('/limits', tags=['accounts']) -def get_limits(context: schemas.CurrentContext = Depends(OR_context)): +async def get_limits(context: schemas.CurrentContext = Depends(OR_context)): return { 'data': { "teamMember": -1, @@ -807,12 +807,12 @@ def get_limits(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/integrations/msteams/channels', tags=["integrations"]) -def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)): +async def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.msteams)} @app.post('/integrations/msteams', tags=['integrations']) -def add_msteams_integration(data: schemas.AddCollaborationSchema, +async def add_msteams_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): n = MSTeams.add(tenant_id=context.tenant_id, data=data) if n is None: @@ -824,7 +824,7 @@ def add_msteams_integration(data: schemas.AddCollaborationSchema, @app.post('/integrations/msteams/{webhookId}', tags=['integrations']) -def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...), +async def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if len(data.url.unicode_string()) > 0: old = MSTeams.get_integration(tenant_id=context.tenant_id, integration_id=webhookId) @@ -841,7 +841,7 @@ def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSche @app.delete('/integrations/msteams/{webhookId}', tags=["integrations"]) -def delete_msteams_integration(webhookId: int, _=Body(None), +async def delete_msteams_integration(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId) @@ -869,30 +869,30 @@ async def check_recording_status(project_id: int): @public_app.get('/', tags=["health"]) -def health_check(): +async def health_check(): return {} # tags @app.post('/{projectId}/tags', tags=["tags"]) -def tags_create(projectId: int, data: schemas.TagCreate = Body(), context: schemas.CurrentContext = Depends(OR_context)): +async def tags_create(projectId: int, data: schemas.TagCreate = Body(), context: schemas.CurrentContext = Depends(OR_context)): data = tags.create_tag(project_id=projectId, data=data) return {'data': data} @app.put('/{projectId}/tags/{tagId}', tags=["tags"]) -def tags_update(projectId: int, tagId: int, data: schemas.TagUpdate = Body(), context: schemas.CurrentContext = Depends(OR_context)): +async def tags_update(projectId: int, tagId: int, data: schemas.TagUpdate = Body(), context: schemas.CurrentContext = Depends(OR_context)): data = tags.update_tag(project_id=projectId, tag_id=tagId, data=data) return {'data': data} @app.get('/{projectId}/tags', tags=["tags"]) -def tags_list(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def tags_list(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = tags.list_tags(project_id=projectId) return {'data': data} @app.delete('/{projectId}/tags/{tagId}', tags=["tags"]) -def tags_delete(projectId: int, tagId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def tags_delete(projectId: int, tagId: int, context: schemas.CurrentContext = Depends(OR_context)): data = tags.delete_tag(projectId, tag_id=tagId) return {'data': data} diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index dfadd05c7..3688d65c1 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -47,7 +47,7 @@ if not tenants.tenants_exists_sync(use_pool=False): @public_app.post('/login', tags=["authentication"]) -def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)): +async def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)): if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -82,14 +82,14 @@ def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...) @app.get('/logout', tags=["login"]) -def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)): +async def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)): users.logout(user_id=context.user_id) response.delete_cookie(key="refreshToken", path="/api/refresh") return {"data": "success"} @app.get('/refresh', tags=["login"]) -def refresh_login(context: schemas.CurrentContext = Depends(OR_context)): +async def refresh_login(context: schemas.CurrentContext = Depends(OR_context)): r = users.refresh(user_id=context.user_id) content = {"jwt": r.get("jwt")} response = JSONResponse(content=content) @@ -99,7 +99,7 @@ def refresh_login(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/account', tags=['accounts']) -def get_account(context: schemas.CurrentContext = Depends(OR_context)): +async def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) t = tenants.get_by_tenant_id(context.tenant_id) if t is not None: @@ -116,14 +116,14 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) -def edit_account(data: schemas.EditAccountSchema = Body(...), +async def edit_account(data: schemas.EditAccountSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.edit_account(tenant_id=context.tenant_id, user_id=context.user_id, changes=data) @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -def add_slack_integration(data: schemas.AddCollaborationSchema, +async def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: @@ -134,7 +134,7 @@ def add_slack_integration(data: schemas.AddCollaborationSchema, @app.post('/integrations/slack/{integrationId}', tags=['integrations']) -def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), +async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) @@ -151,14 +151,14 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc @app.post('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")]) -def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), +async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data, background_tasks=background_tasks) @public_app.get('/users/invitation', tags=['users']) -def process_invitation_link(token: str): +async def process_invitation_link(token: str): if token is None or len(token) < 64: return {"errors": ["please provide a valid invitation"]} user = users.get_by_invitation_token(token) @@ -175,7 +175,7 @@ def process_invitation_link(token: str): @public_app.post('/password/reset', tags=["users"]) -def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): +async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: return {"errors": ["please provide a valid invitation & pass"]} user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase) @@ -188,14 +188,14 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.put('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")]) -def edit_member(memberId: int, data: schemas.EditMemberSchema, +async def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) -def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, +async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, context: schemas.CurrentContext = Depends(OR_context)): if key is None or value is None or len(value) == 0 and len(key) == 0: return {"errors": ["please provide a key&value for search"]} @@ -209,13 +209,13 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = @app.get('/projects', tags=['projects']) -def get_projects(context: schemas.CurrentContext = Depends(OR_context)): +async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, gdpr=True, recorded=True)} # for backward compatibility @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) -def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, +async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): return {"errors": ["session not found"]} @@ -234,7 +234,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba @app.post('/{projectId}/sessions/search', tags=["sessions"]) -def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), +async def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, platform=context.project.platform) @@ -242,7 +242,7 @@ def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = @app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) -def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), +async def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True, platform=context.project.platform) @@ -250,7 +250,7 @@ def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) -def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): return {"errors": ["session not found"]} @@ -269,7 +269,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) -def get_session_events(projectId: int, sessionId: Union[int, str], +async def get_session_events(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): return {"errors": ["session not found"]} @@ -285,7 +285,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) -def get_error_trace(projectId: int, sessionId: int, errorId: str, +async def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: @@ -296,7 +296,7 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}', tags=['errors']) -def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, +async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) @@ -307,7 +307,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) -def errors_get_details_sourcemaps(projectId: int, errorId: str, +async def errors_get_details_sourcemaps(projectId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: @@ -318,7 +318,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) -def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), +async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": @@ -336,7 +336,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) -def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, +async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: @@ -351,7 +351,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"]) -def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], +async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} if not sessionId.isnumeric(): @@ -372,7 +372,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"]) -def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], +async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} if not sessionId.isnumeric(): @@ -393,19 +393,19 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) -def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), +async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) -def add_remove_favorite_session2(projectId: int, sessionId: int, +async def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId) @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) -def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): +async def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, tenant_id=context.tenant_id, user_id=context.user_id) @@ -417,7 +417,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) -def assign_session(projectId: int, sessionId: int, issueId: str, +async def assign_session(projectId: int, sessionId: int, issueId: str, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, tenant_id=context.tenant_id, user_id=context.user_id) @@ -429,7 +429,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -def comment_assignment(projectId: int, sessionId: int, issueId: str, +async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, @@ -443,7 +443,7 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) -def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), +async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): return {"errors": ["Session not found"]} @@ -457,7 +457,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema @app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) -def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, user_id=context.user_id) if "errors" in data: @@ -468,7 +468,7 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) -def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), +async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, data=data) @@ -480,28 +480,28 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema @app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) -def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId) return data @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) -def share_note_to_slack(projectId: int, noteId: int, webhookId: int, +async def share_note_to_slack(projectId: int, noteId: int, webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) -def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, +async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.post('/{projectId}/notes', tags=["sessions", "notes"]) -def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), +async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data=data) @@ -511,43 +511,43 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), @app.post('/{projectId}/click_maps/search', tags=["click maps"]) -def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...), +async def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} @app.post('/{project_id}/feature-flags/search', tags=["feature flags"]) -def search_feature_flags(project_id: int, +async def search_feature_flags(project_id: int, data: schemas.SearchFlagsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return feature_flags.search_feature_flags(project_id=project_id, user_id=context.user_id, data=data) @app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) -def get_feature_flag(project_id: int, feature_flag_id: int): +async def get_feature_flag(project_id: int, feature_flag_id: int): return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) @app.post('/{project_id}/feature-flags', tags=["feature flags"]) -def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), +async def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data) @app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) -def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), +async def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id, user_id=context.user_id, feature_flag=data) @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) -def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): +async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} @app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"]) -def update_feature_flag_status(project_id: int, feature_flag_id: int, +async def update_feature_flag_status(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagStatus = Body(...)): return {"data": feature_flags.update_feature_flag_status(project_id=project_id, feature_flag_id=feature_flag_id, is_active=data.is_active)} diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py index 245f039c7..862c60e37 100644 --- a/api/routers/subs/health.py +++ b/api/routers/subs/health.py @@ -8,7 +8,7 @@ public_app, app, app_apikey = get_routers() @app.get('/healthz', tags=["health-check"]) -def get_global_health_status(): +async def get_global_health_status(): if config("LOCAL_DEV", cast=bool, default=False): return {"data": ""} return {"data": health.get_health()} diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index 11314fc8b..2ad24eeec 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -11,18 +11,18 @@ public_app, app, app_apikey = get_routers() @app.post('/{projectId}/dashboards', tags=["dashboard"]) -def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), +async def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) @app.get('/{projectId}/dashboards', tags=["dashboard"]) -def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)} @app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) -def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) if data is None: return {"errors": ["dashboard not found"]} @@ -30,25 +30,25 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) -def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), +async def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, data=data)} @app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) -def delete_dashboard(projectId: int, dashboardId: int, _=Body(None), +async def delete_dashboard(projectId: int, dashboardId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) @app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"]) -def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)} @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) -def add_card_to_dashboard(projectId: int, dashboardId: int, +async def add_card_to_dashboard(projectId: int, dashboardId: int, data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, @@ -57,7 +57,7 @@ def add_card_to_dashboard(projectId: int, dashboardId: int, @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) -def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, +async def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, @@ -65,7 +65,7 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) -def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, +async def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, data: schemas.UpdateWidgetPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, @@ -73,50 +73,50 @@ def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, @app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) -def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None), +async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, widget_id=widgetId) @app.post('/{projectId}/cards/try', tags=["cards"]) -def try_card(projectId: int, data: schemas.CardSchema = Body(...), +async def try_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.get_chart(project_id=projectId, data=data, user_id=context.user_id)} @app.post('/{projectId}/cards/try/sessions', tags=["cards"]) -def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), +async def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, data=data) return {"data": data} @app.post('/{projectId}/cards/try/issues', tags=["cards"]) -def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...), +async def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)} @app.get('/{projectId}/cards', tags=["cards"]) -def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} @app.post('/{projectId}/cards', tags=["cards"]) -def create_card(projectId: int, data: schemas.CardSchema = Body(...), +async def create_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data) @app.post('/{projectId}/cards/search', tags=["cards"]) -def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), +async def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} @app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) -def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): +async def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if metric_id.isnumeric(): metric_id = int(metric_id) else: @@ -128,7 +128,7 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) -def get_card_sessions(projectId: int, metric_id: int, +async def get_card_sessions(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.get_sessions_by_card_id(project_id=projectId, user_id=context.user_id, metric_id=metric_id, @@ -139,7 +139,7 @@ def get_card_sessions(projectId: int, metric_id: int, @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) -def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], +async def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if metric_id.isnumeric(): @@ -155,7 +155,7 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) -def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, +async def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id, @@ -166,7 +166,7 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) -def get_card_errors_list(projectId: int, metric_id: int, +async def get_card_errors_list(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, @@ -177,7 +177,7 @@ def get_card_errors_list(projectId: int, metric_id: int, @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) -def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...), +async def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) @@ -185,7 +185,7 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), +async def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) if data is None: @@ -194,7 +194,7 @@ def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body( @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) -def update_card_state(projectId: int, metric_id: int, +async def update_card_state(projectId: int, metric_id: int, data: schemas.UpdateCardStatusSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { @@ -203,6 +203,6 @@ def update_card_state(projectId: int, metric_id: int, @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -def delete_card(projectId: int, metric_id: int, _=Body(None), +async def delete_card(projectId: int, metric_id: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} diff --git a/api/routers/subs/v1_api.py b/api/routers/subs/v1_api.py index b4e9c2aaa..5ea729f40 100644 --- a/api/routers/subs/v1_api.py +++ b/api/routers/subs/v1_api.py @@ -9,7 +9,7 @@ public_app, app, app_apikey = get_routers() @app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"]) -def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None, +async def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None, context: schemas.CurrentContext = Depends(OR_context)): return { "data": sessions.get_user_sessions( @@ -22,7 +22,7 @@ def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_ @app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"]) -def get_session_events(projectKey: str, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_session_events(projectKey: str, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return { "data": events.get_by_session_id( project_id=context.project.project_id, @@ -32,7 +32,7 @@ def get_session_events(projectKey: str, sessionId: int, context: schemas.Current @app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"]) -def get_user_details(projectKey: str, userId: str, context: schemas.CurrentContext = Depends(OR_context)): +async def get_user_details(projectKey: str, userId: str, context: schemas.CurrentContext = Depends(OR_context)): return { "data": sessions.get_session_user( project_id=context.project.project_id, @@ -42,24 +42,24 @@ def get_user_details(projectKey: str, userId: str, context: schemas.CurrentConte @app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"]) -def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None), +async def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): record = jobs.create(project_id=context.project.project_id, user_id=userId) return {"data": record} @app_apikey.get('/v1/{projectKey}/jobs', tags=["api"]) -def get_jobs(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): +async def get_jobs(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): return {"data": jobs.get_all(project_id=context.project.project_id)} @app_apikey.get('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) -def get_job(projectKey: str, jobId: int, context: schemas.CurrentContext = Depends(OR_context)): +async def get_job(projectKey: str, jobId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": jobs.get(job_id=jobId, project_id=context.project.project_id)} @app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) -def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): +async def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): job = jobs.get(job_id=jobId, project_id=context.project.project_id) job_not_found = len(job.keys()) == 0 @@ -73,7 +73,7 @@ def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.Curre @app_apikey.get('/v1/projects', tags=["api"]) -def get_projects(context: schemas.CurrentContext = Depends(OR_context)): +async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): records = projects.get_projects(tenant_id=context.tenant_id) for record in records: del record['projectId'] @@ -82,14 +82,14 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app_apikey.get('/v1/projects/{projectKey}', tags=["api"]) -def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): +async def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): return { "data": projects.get_by_project_key(project_key=projectKey) } @app_apikey.post('/v1/projects', tags=["api"]) -def create_project(data: schemas.CreateProjectSchema = Body(...), +async def create_project(data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): record = projects.create( tenant_id=context.tenant_id,