Compare commits

...
Sign in to create a new pull request.

17 commits

Author SHA1 Message Date
Amirouche
bcb920bc32 wip 2024-02-06 14:56:42 +01:00
Amirouche
4b09c00e4d requirements: add httpx 2024-02-06 14:27:31 +01:00
Amirouche
6a1567fa78 wip 2024-02-06 14:18:50 +01:00
Amirouche
7e1a9fcf75 wip 2024-02-06 14:18:50 +01:00
Amirouche
f5237c1273 wip 2024-02-06 14:18:50 +01:00
Amirouche
a1c602e2f3 small fixes 2024-02-06 14:18:50 +01:00
Amirouche
1bfe2e153a wip 2024-02-06 14:18:50 +01:00
Amirouche
0366a2593a wip 2024-02-06 14:18:50 +01:00
Amirouche
ba8a875394 wip 2024-02-06 14:18:50 +01:00
Amirouche
09b391ee02 wip 2024-02-06 14:18:50 +01:00
Amirouche
66040ea9f5 wip 2024-02-06 14:18:50 +01:00
Amirouche
cb1533ca6e move notes to development.md 2024-02-06 14:18:50 +01:00
Amirouche
e488da4ada typofix 2024-02-06 14:18:50 +01:00
Amirouche
3ca2c1f546 TODO: fix cron. 2024-02-06 14:18:50 +01:00
Amirouche
7967915b91 fix jira integration. 2024-02-06 14:18:50 +01:00
Amirouche
014a51602a wip 2024-02-06 14:18:50 +01:00
Amirouche
0e2ae898c5 Make async postgresql calls, and http calls. 2024-02-06 14:18:50 +01:00
99 changed files with 2627 additions and 2607 deletions

View file

@ -1,43 +0,0 @@
#### autogenerated api frontend
API can autogenerate a frontend that documents, and allows to play
with, in a limited way, its interface. Make sure you have the
following variables inside the current `.env`:
```
docs_url=/docs
root_path=''
```
If the `.env` that is in-use is based on `env.default` then it is
already the case. Start, or restart the http server, then go to
`https://127.0.0.1:8000/docs`. That is autogenerated documentation
based on pydantic schema, fastapi routes, and docstrings :wink:.
Happy experiments, and then documentation!
#### psycopg3 API
I mis-remember the psycopg v2 vs. v3 API.
For the record, the expected psycopg3's async api looks like the
following pseudo code:
```python
async with app.state.postgresql.connection() as cnx:
async with cnx.transaction():
row = await cnx.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
row = await row.fetchone()
return row["exists"]
```
Mind the following:
- Where `app.state.postgresql` is the postgresql connection pooler.
- Wrap explicit transaction with `async with cnx.transaction():
foobar()`
- Most of the time the transaction object is not used;
- Do execute await operation against `cnx`;
- `await cnx.execute` returns a cursor object;
- Do the `await cursor.fetchqux...` calls against the object return by
a call to execute.

View file

@ -37,7 +37,7 @@ async def lifespan(app: FastAPI):
ap_logger.setLevel(loglevel) ap_logger.setLevel(loglevel)
app.schedule = AsyncIOScheduler() app.schedule = AsyncIOScheduler()
await pg_client.init() pg_client.init()
app.schedule.start() app.schedule.start()
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:

View file

@ -63,4 +63,4 @@ if config("LOCAL_DEV", default=False, cast=bool):
@app.get('/trigger', tags=["private"]) @app.get('/trigger', tags=["private"])
async def trigger_main_cron(): async def trigger_main_cron():
logging.info("Triggering main cron") logging.info("Triggering main cron")
alerts_processor.process() await alerts_processor.process()

View file

@ -18,7 +18,7 @@ class APIKeyAuth(APIKeyHeader):
async def __call__(self, request: Request) -> Optional[CurrentAPIContext]: async def __call__(self, request: Request) -> Optional[CurrentAPIContext]:
api_key: Optional[str] = await super(APIKeyAuth, self).__call__(request) api_key: Optional[str] = await super(APIKeyAuth, self).__call__(request)
r = authorizers.api_key_authorizer(api_key) r = await authorizers.api_key_authorizer(api_key)
if r is None: if r is None:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,

View file

@ -13,8 +13,8 @@ from chalicelib.core import authorizers, users
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext: async def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext:
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1)) user = await users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
if user is None: if user is None:
logger.warning("User not found.") logger.warning("User not found.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
@ -36,7 +36,7 @@ class JWTAuth(HTTPBearer):
jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=refresh_token) jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=refresh_token)
if jwt_payload is None or jwt_payload.get("jti") is None: if jwt_payload is None or jwt_payload.get("jti") is None:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
auth_exists = users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1), auth_exists = await users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1),
jwt_jti=jwt_payload["jti"]) jwt_jti=jwt_payload["jti"])
if not auth_exists: if not auth_exists:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
@ -53,7 +53,7 @@ class JWTAuth(HTTPBearer):
or old_jwt_payload.get("userId") != jwt_payload.get("userId"): or old_jwt_payload.get("userId") != jwt_payload.get("userId"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
return _get_current_auth_context(request=request, jwt_payload=jwt_payload) return await _get_current_auth_context(request=request, jwt_payload=jwt_payload)
else: else:
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
@ -63,7 +63,7 @@ class JWTAuth(HTTPBearer):
detail="Invalid authentication scheme.") detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials) jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
auth_exists = jwt_payload is not None \ auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1), and await users.auth_exists(user_id=jwt_payload.get("userId", -1),
jwt_iat=jwt_payload.get("iat", 100)) jwt_iat=jwt_payload.get("iat", 100))
if jwt_payload is None \ if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
@ -79,7 +79,7 @@ class JWTAuth(HTTPBearer):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
return _get_current_auth_context(request=request, jwt_payload=jwt_payload) return await _get_current_auth_context(request=request, jwt_payload=jwt_payload)
logger.warning("Invalid authorization code.") logger.warning("Invalid authorization code.")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")

View file

@ -23,9 +23,9 @@ class ProjectAuthorizer:
current_project = None current_project = None
if self.project_identifier == "projectId" \ if self.project_identifier == "projectId" \
and (isinstance(value, int) or isinstance(value, str) and value.isnumeric()): and (isinstance(value, int) or isinstance(value, str) and value.isnumeric()):
current_project = projects.get_project(project_id=value, tenant_id=current_user.tenant_id) current_project = await projects.get_project(project_id=value, tenant_id=current_user.tenant_id)
elif self.project_identifier == "projectKey": elif self.project_identifier == "projectKey":
current_project = projects.get_by_project_key(project_key=value) current_project = await projects.get_by_project_key(project_key=value)
if current_project is None: if current_project is None:
logger.debug(f"unauthorized project {self.project_identifier}:{value}") logger.debug(f"unauthorized project {self.project_identifier}:{value}")

View file

@ -13,21 +13,21 @@ from chalicelib.utils import pg_client, helper, email_helper, smtp
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def get(id): async def get(id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
SELECT * SELECT *
FROM public.alerts FROM public.alerts
WHERE alert_id =%(id)s;""", WHERE alert_id =%(id)s;""",
{"id": id}) {"id": id})
) )
a = helper.dict_to_camel_case(cur.fetchone()) a = helper.dict_to_camel_case(await cur.fetchone())
return helper.custom_alert_to_front(__process_circular(a)) return helper.custom_alert_to_front(__process_circular(a))
def get_all(project_id): async def get_all(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""\ query = cur.mogrify("""\
SELECT alerts.*, SELECT alerts.*,
COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count', COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count',
@ -39,8 +39,8 @@ def get_all(project_id):
AND alerts.deleted_at ISNULL AND alerts.deleted_at ISNULL
ORDER BY alerts.created_at;""", ORDER BY alerts.created_at;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
all = helper.list_to_camel_case(cur.fetchall()) all = helper.list_to_camel_case(await cur.fetchall())
for i in range(len(all)): for i in range(len(all)):
all[i] = helper.custom_alert_to_front(__process_circular(all[i])) all[i] = helper.custom_alert_to_front(__process_circular(all[i]))
return all return all
@ -54,29 +54,29 @@ def __process_circular(alert):
return alert return alert
def create(project_id, data: schemas.AlertSchema): async def create(project_id, data: schemas.AlertSchema):
data = data.model_dump() data = data.model_dump()
data["query"] = json.dumps(data["query"]) data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"]) data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id, change) INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id, change)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s, %(change)s) VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s, %(change)s)
RETURNING *;""", RETURNING *;""",
{"project_id": project_id, **data}) {"project_id": project_id, **data})
) )
a = helper.dict_to_camel_case(cur.fetchone()) a = helper.dict_to_camel_case(await cur.fetchone())
return {"data": helper.custom_alert_to_front(helper.dict_to_camel_case(__process_circular(a)))} return {"data": helper.custom_alert_to_front(helper.dict_to_camel_case(__process_circular(a)))}
def update(id, data: schemas.AlertSchema): async def update(id, data: schemas.AlertSchema):
data = data.model_dump() data = data.model_dump()
data["query"] = json.dumps(data["query"]) data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"]) data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""\ query = cur.mogrify("""\
UPDATE public.alerts UPDATE public.alerts
SET name = %(name)s, SET name = %(name)s,
@ -90,12 +90,12 @@ def update(id, data: schemas.AlertSchema):
WHERE alert_id =%(id)s AND deleted_at ISNULL WHERE alert_id =%(id)s AND deleted_at ISNULL
RETURNING *;""", RETURNING *;""",
{"id": id, **data}) {"id": id, **data})
cur.execute(query=query) await cur.execute(query=query)
a = helper.dict_to_camel_case(cur.fetchone()) a = helper.dict_to_camel_case(await cur.fetchone())
return {"data": helper.custom_alert_to_front(__process_circular(a))} return {"data": helper.custom_alert_to_front(__process_circular(a))}
def process_notifications(data): async def process_notifications(data):
full = {} full = {}
for n in data: for n in data:
if "message" in n["options"]: if "message" in n["options"]:
@ -112,7 +112,7 @@ def process_notifications(data):
}) })
elif c["type"] in ["webhook"]: elif c["type"] in ["webhook"]:
full[c["type"]].append({"data": webhook_data, "destination": c["value"]}) full[c["type"]].append({"data": webhook_data, "destination": c["value"]})
notifications.create(data) await notifications.create(data)
BATCH_SIZE = 200 BATCH_SIZE = 200
for t in full.keys(): for t in full.keys():
for i in range(0, len(full[t]), BATCH_SIZE): for i in range(0, len(full[t]), BATCH_SIZE):
@ -122,52 +122,51 @@ def process_notifications(data):
if t == "slack": if t == "slack":
try: try:
send_to_slack_batch(notifications_list=notifications_list) await send_to_slack_batch(notifications_list=notifications_list)
except Exception as e: except Exception as e:
logging.error("!!!Error while sending slack notifications batch") logging.error("!!!Error while sending slack notifications batch")
logging.error(str(e)) logging.error(str(e))
elif t == "msteams": elif t == "msteams":
try: try:
send_to_msteams_batch(notifications_list=notifications_list) await send_to_msteams_batch(notifications_list=notifications_list)
except Exception as e: except Exception as e:
logging.error("!!!Error while sending msteams notifications batch") logging.error("!!!Error while sending msteams notifications batch")
logging.error(str(e)) logging.error(str(e))
elif t == "email": elif t == "email":
try: try:
send_by_email_batch(notifications_list=notifications_list) await send_by_email_batch(notifications_list=notifications_list)
except Exception as e: except Exception as e:
logging.error("!!!Error while sending email notifications batch") logging.error("!!!Error while sending email notifications batch")
logging.error(str(e)) logging.error(str(e))
elif t == "webhook": elif t == "webhook":
try: try:
webhook.trigger_batch(data_list=notifications_list) await webhook.trigger_batch(data_list=notifications_list)
except Exception as e: except Exception as e:
logging.error("!!!Error while sending webhook notifications batch") logging.error("!!!Error while sending webhook notifications batch")
logging.error(str(e)) logging.error(str(e))
def send_by_email(notification, destination): async def send_by_email(notification, destination):
if notification is None: if notification is None:
return return
email_helper.alert_email(recipients=destination, await email_helper.alert_email(recipients=destination,
subject=f'"{notification["title"]}" has been triggered', subject=f'"{notification["title"]}" has been triggered',
data={ data={
"message": f'"{notification["title"]}" {notification["description"]}', "message": f'"{notification["title"]}" {notification["description"]}',
"project_id": notification["options"]["projectId"]}) "project_id": notification["options"]["projectId"]})
def send_by_email_batch(notifications_list): async def send_by_email_batch(notifications_list):
if not smtp.has_smtp(): if not smtp.has_smtp():
logging.info("no SMTP configuration for email notifications") logging.info("no SMTP configuration for email notifications")
if notifications_list is None or len(notifications_list) == 0: if notifications_list is None or len(notifications_list) == 0:
logging.info("no email notifications") logging.info("no email notifications")
return return
for n in notifications_list: for n in notifications_list:
send_by_email(notification=n.get("notification"), destination=n.get("destination")) await send_by_email(notification=n.get("notification"), destination=n.get("destination"))
time.sleep(1)
def send_to_slack_batch(notifications_list): async def send_to_slack_batch(notifications_list):
webhookId_map = {} webhookId_map = {}
for n in notifications_list: for n in notifications_list:
if n.get("destination") not in webhookId_map: if n.get("destination") not in webhookId_map:
@ -178,11 +177,11 @@ def send_to_slack_batch(notifications_list):
"title_link": n["notification"]["buttonUrl"], "title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()}) "ts": datetime.now().timestamp()})
for batch in webhookId_map.keys(): for batch in webhookId_map.keys():
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, await Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"]) attachments=webhookId_map[batch]["batch"])
def send_to_msteams_batch(notifications_list): async def send_to_msteams_batch(notifications_list):
webhookId_map = {} webhookId_map = {}
for n in notifications_list: for n in notifications_list:
if n.get("destination") not in webhookId_map: if n.get("destination") not in webhookId_map:
@ -207,13 +206,13 @@ def send_to_msteams_batch(notifications_list):
} }
) )
for batch in webhookId_map.keys(): for batch in webhookId_map.keys():
MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch, await MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"]) attachments=webhookId_map[batch]["batch"])
def delete(project_id, alert_id): async def delete(project_id, alert_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(""" UPDATE public.alerts cur.mogrify(""" UPDATE public.alerts
SET deleted_at = timezone('utc'::text, now()), SET deleted_at = timezone('utc'::text, now()),
active = FALSE active = FALSE

View file

@ -1,8 +1,8 @@
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
def get_all_alerts(): async def get_all_alerts():
with pg_client.PostgresClient(long_query=True) as cur: async with pg_client.cursor() as cur:
query = """SELECT -1 AS tenant_id, query = """SELECT -1 AS tenant_id,
alert_id, alert_id,
projects.project_id, projects.project_id,
@ -27,6 +27,6 @@ def get_all_alerts():
AND projects.deleted_at ISNULL AND projects.deleted_at ISNULL
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL) AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
ORDER BY alerts.created_at;""" ORDER BY alerts.created_at;"""
cur.execute(query=query) await cur.execute(query=query)
all_alerts = helper.list_to_camel_case(cur.fetchall()) all_alerts = helper.list_to_camel_case(await cur.fetchall())
return all_alerts return all_alerts

View file

@ -102,7 +102,7 @@ def can_check(a) -> bool:
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
def Build(a): async def Build(a):
now = TimeUTC.now() now = TimeUTC.now()
params = {"project_id": a["projectId"], "now": now} params = {"project_id": a["projectId"], "now": now}
full_args = {} full_args = {}
@ -120,7 +120,7 @@ def Build(a):
logging.warning(a["filter"]) logging.warning(a["filter"])
raise raise
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False, full_args, query_part = await sessions.search_query_parts(data=data, error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, issue=None, project_id=a["projectId"], user_id=None,
favorite_only=False) favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value subQ = f"""SELECT COUNT(session_id) AS value
@ -187,12 +187,17 @@ def Build(a):
def process(): def process():
import asyncio
asyncio.run(_process())
async def _process():
notifications = [] notifications = []
all_alerts = alerts_listener.get_all_alerts() all_alerts = alerts_listener.get_all_alerts()
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
for alert in all_alerts: for alert in all_alerts:
if can_check(alert): if can_check(alert):
query, params = Build(alert) query, params = await Build(alert)
try: try:
query = cur.mogrify(query, params) query = cur.mogrify(query, params)
except Exception as e: except Exception as e:
@ -203,8 +208,8 @@ def process():
logging.debug(alert) logging.debug(alert)
logging.debug(query) logging.debug(query)
try: try:
cur.execute(query) await cur.execute(query)
result = cur.fetchone() result = await cur.fetchone()
if result["valid"]: if result["valid"]:
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}") logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
notifications.append(generate_notification(alert, result)) notifications.append(generate_notification(alert, result))
@ -213,14 +218,13 @@ def process():
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(query) logging.error(query)
logging.error(e) logging.error(e)
cur = cur.recreate(rollback=True)
if len(notifications) > 0: if len(notifications) > 0:
cur.execute( await cur.execute(
cur.mogrify(f"""UPDATE public.alerts cur.mogrify(f"""UPDATE public.alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])})) WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0: if len(notifications) > 0:
alerts.process_notifications(notifications) await alerts.process_notificationsq(notifications)
def __format_value(x): def __format_value(x):

View file

@ -4,8 +4,8 @@ from decouple import config
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def get_all(user_id): async def get_all(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(""" query = cur.mogrify("""
SELECT a.*, u.last >= (EXTRACT(EPOCH FROM a.created_at)*1000) AS viewed SELECT a.*, u.last >= (EXTRACT(EPOCH FROM a.created_at)*1000) AS viewed
FROM public.announcements AS a, FROM public.announcements AS a,
@ -15,10 +15,10 @@ def get_all(user_id):
LIMIT 1) AS u(last) LIMIT 1) AS u(last)
ORDER BY a.created_at DESC;""", ORDER BY a.created_at DESC;""",
{"userId": user_id}) {"userId": user_id})
cur.execute( await cur.execute(
query query
) )
announcements = helper.list_to_camel_case(cur.fetchall()) announcements = helper.list_to_camel_case(await cur.fetchall())
for a in announcements: for a in announcements:
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"]) a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
if a["imageUrl"] is not None and len(a["imageUrl"]) > 0: if a["imageUrl"] is not None and len(a["imageUrl"]) > 0:
@ -26,8 +26,8 @@ def get_all(user_id):
return announcements return announcements
def view(user_id): async def view(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(""" query = cur.mogrify("""
UPDATE public.users UPDATE public.users
SET data=data || SET data=data ||
@ -36,7 +36,7 @@ def view(user_id):
'}')::jsonb '}')::jsonb
WHERE user_id = %(userId)s;""", WHERE user_id = %(userId)s;""",
{"userId": user_id}) {"userId": user_id})
cur.execute( await cur.execute(
query query
) )
return True return True

View file

@ -2,7 +2,7 @@ from os import access, R_OK
from os.path import exists as path_exists, getsize from os.path import exists as path_exists, getsize
import jwt import jwt
import requests import httpx
from decouple import config from decouple import config
from fastapi import HTTPException, status from fastapi import HTTPException, status
@ -28,24 +28,24 @@ SESSION_PROJECTION_COLS = """s.project_id,
""" """
def get_live_sessions_ws_user_id(project_id, user_id): async def get_live_sessions_ws_user_id(project_id, user_id):
data = { data = {
"filter": {"userId": user_id} if user_id else {} "filter": {"userId": user_id} if user_id else {}
} }
return __get_live_sessions_ws(project_id=project_id, data=data) return await __get_live_sessions_ws(project_id=project_id, data=data)
def get_live_sessions_ws_test_id(project_id, test_id): async def get_live_sessions_ws_test_id(project_id, test_id):
data = { data = {
"filter": { "filter": {
'uxtId': test_id, 'uxtId': test_id,
'operator': 'is' 'operator': 'is'
} }
} }
return __get_live_sessions_ws(project_id=project_id, data=data) return await __get_live_sessions_ws(project_id=project_id, data=data)
def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema): async def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema):
data = { data = {
"filter": {}, "filter": {},
"pagination": {"limit": body.limit, "page": body.page}, "pagination": {"limit": body.limit, "page": body.page},
@ -57,22 +57,20 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
else: else:
data["filter"][f.type] = {"values": f.value, "operator": f.operator} data["filter"][f.type] = {"values": f.value, "operator": f.operator}
return __get_live_sessions_ws(project_id=project_id, data=data) return await __get_live_sessions_ws(project_id=project_id, data=data)
def __get_live_sessions_ws(project_id, data): async def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
try: try:
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}", async with httpx.AsyncClient() as client:
results = await client.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5)) json=data, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200: if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws") print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
print(results.text) print(results.text)
return {"total": 0, "sessions": []} return {"total": 0, "sessions": []}
live_peers = results.json().get("data", []) live_peers = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
live_peers = {"total": 0, "sessions": []}
except Exception as e: except Exception as e:
print("!! Issue getting Live-Assist response") print("!! Issue getting Live-Assist response")
print(str(e)) print(str(e))
@ -110,10 +108,11 @@ def __get_agent_token(project_id, project_key, session_id):
) )
def get_live_session_by_id(project_id, session_id): async def get_live_session_by_id(project_id, session_id):
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
try: try:
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}", async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5)) timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200: if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id") print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
@ -124,9 +123,6 @@ def get_live_session_by_id(project_id, session_id):
return None return None
results["live"] = True results["live"] = True
results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id) results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id)
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return None
except Exception as e: except Exception as e:
print("!! Issue getting Assist response") print("!! Issue getting Assist response")
print(str(e)) print(str(e))
@ -139,20 +135,18 @@ def get_live_session_by_id(project_id, session_id):
return results return results
def is_live(project_id, session_id, project_key=None): async def is_live(project_id, session_id, project_key=None):
if project_key is None: if project_key is None:
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
try: try:
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}", async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5)) timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200: if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for is_live") print(f"!! issue with the peer-server code:{results.status_code} for is_live")
print(results.text) print(results.text)
return False return False
results = results.json().get("data") results = results.json().get("data")
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return False
except Exception as e: except Exception as e:
print("!! Issue getting Assist response") print("!! Issue getting Assist response")
print(str(e)) print(str(e))
@ -165,13 +159,14 @@ def is_live(project_id, session_id, project_key=None):
return str(session_id) == results return str(session_id) == results
def autocomplete(project_id, q: str, key: str = None): async def autocomplete(project_id, q: str, key: str = None):
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
params = {"q": q} params = {"q": q}
if key: if key:
params["key"] = key params["key"] = key
try: try:
results = requests.get( async with httpx.AsyncClient() as client:
results = await client.get(
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete", ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5)) params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200: if results.status_code != 200:
@ -179,9 +174,6 @@ def autocomplete(project_id, q: str, key: str = None):
print(results.text) print(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]} return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", []) results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e: except Exception as e:
print("!! Issue getting Assist response") print("!! Issue getting Assist response")
print(str(e)) print(str(e))
@ -252,10 +244,11 @@ def get_raw_devtools_by_id(project_id, session_id):
return None return None
def session_exists(project_id, session_id): async def session_exists(project_id, session_id):
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
try: try:
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}", async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5)) timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200: if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for session_exists") print(f"!! issue with the peer-server code:{results.status_code} for session_exists")
@ -265,9 +258,6 @@ def session_exists(project_id, session_id):
if results is None: if results is None:
return False return False
return True return True
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return False
except Exception as e: except Exception as e:
print("!! Issue getting Assist response") print("!! Issue getting Assist response")
print(str(e)) print(str(e))

View file

@ -25,7 +25,7 @@ def jwt_authorizer(scheme: str, token: str, leeway=0):
except jwt.ExpiredSignatureError: except jwt.ExpiredSignatureError:
logger.debug("! JWT Expired signature") logger.debug("! JWT Expired signature")
return None return None
except BaseException as e: except Exception as e:
logger.warning("! JWT Base Exception") logger.warning("! JWT Base Exception")
logger.debug(e) logger.debug(e)
return None return None
@ -45,15 +45,15 @@ def jwt_refresh_authorizer(scheme: str, token: str):
except jwt.ExpiredSignatureError: except jwt.ExpiredSignatureError:
logger.debug("! JWT-refresh Expired signature") logger.debug("! JWT-refresh Expired signature")
return None return None
except BaseException as e: except Exception as e:
logger.warning("! JWT-refresh Base Exception") logger.warning("! JWT-refresh Base Exception")
logger.debug(e) logger.debug(e)
return None return None
return payload return payload
def jwt_context(context): async def jwt_context(context):
user = users.get(user_id=context["userId"], tenant_id=context["tenantId"]) user = await users.get(user_id=context["userId"], tenant_id=context["tenantId"])
if user is None: if user is None:
return None return None
return { return {
@ -96,8 +96,8 @@ def generate_jwt_refresh(user_id, tenant_id, iat, aud, jwt_jti):
return token return token
def api_key_authorizer(token): async def api_key_authorizer(token):
t = tenants.get_by_api_key(token) t = await tenants.get_by_api_key(token)
if t is not None: if t is not None:
t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"]) t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"])
return t return t

View file

@ -7,7 +7,7 @@ from chalicelib.utils.event_filter_definition import Event
TABLE = "public.autocomplete" TABLE = "public.autocomplete"
def __get_autocomplete_table(value, project_id): async def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id, autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click, schemas.EventType.click,
schemas.FilterType.user_device, schemas.FilterType.user_device,
@ -48,7 +48,7 @@ def __get_autocomplete_table(value, project_id):
AND value ILIKE %(value)s AND value ILIKE %(value)s
ORDER BY value ORDER BY value
LIMIT 5)""") LIMIT 5)""")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";", query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";",
{"project_id": project_id, {"project_id": project_id,
"value": helper.string_to_sql_like(value), "value": helper.string_to_sql_like(value),
@ -56,7 +56,7 @@ def __get_autocomplete_table(value, project_id):
"c_list": tuple(c_list) "c_list": tuple(c_list)
}) })
try: try:
cur.execute(query) await cur.execute(query)
except Exception as err: except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8')) print(query.decode('UTF-8'))
@ -64,7 +64,7 @@ def __get_autocomplete_table(value, project_id):
print(value) print(value)
print("--------------------") print("--------------------")
raise err raise err
results = cur.fetchall() results = await cur.fetchall()
for r in results: for r in results:
r["type"] = r.pop("_type") r["type"] = r.pop("_type")
results = helper.list_to_camel_case(results) results = helper.list_to_camel_case(results)
@ -110,20 +110,20 @@ def __generic_query(typename, value_length=None):
def __generic_autocomplete(event: Event): def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None): async def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = __generic_query(event.ui_type, value_length=len(value)) query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value), params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)} "svalue": helper.string_to_sql_like("^" + value)}
cur.execute(cur.mogrify(query, params)) await cur.execute(cur.mogrify(query, params))
return helper.list_to_camel_case(cur.fetchall()) return helper.list_to_camel_case(await cur.fetchall())
return f return f
def __generic_autocomplete_metas(typename): def __generic_autocomplete_metas(typename):
def f(project_id, text): async def f(project_id, text):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
params = {"project_id": project_id, "value": helper.string_to_sql_like(text), params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)} "svalue": helper.string_to_sql_like("^" + text)}
@ -133,8 +133,8 @@ def __generic_autocomplete_metas(typename):
return [] return []
query = cur.mogrify(__generic_query(typename, value_length=len(text)), params) query = cur.mogrify(__generic_query(typename, value_length=len(text)), params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
return f return f
@ -214,19 +214,19 @@ def __errors_query(source=None, value_length=None):
LIMIT 5));""" LIMIT 5));"""
def __search_errors(project_id, value, key=None, source=None): async def __search_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(__errors_query(source, cur.mogrify(__errors_query(source,
value_length=len(value)), value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value), {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value), "svalue": helper.string_to_sql_like("^" + value),
"source": source})) "source": source}))
results = helper.list_to_camel_case(cur.fetchall()) results = helper.list_to_camel_case(await cur.fetchall())
return results return results
def __search_errors_ios(project_id, value, key=None, source=None): async def __search_errors_ios(project_id, value, key=None, source=None):
if len(value) > 2: if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason) query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value, lg.reason AS value,
@ -287,15 +287,15 @@ def __search_errors_ios(project_id, value, key=None, source=None):
AND lg.project_id = %(project_id)s AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s AND lg.name ILIKE %(svalue)s
LIMIT 5);""" LIMIT 5);"""
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), await cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})) "svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall()) results = helper.list_to_camel_case(await cur.fetchall())
return results return results
def __search_metadata(project_id, value, key=None, source=None): async def __search_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys} meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys(): if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
return [] return []
@ -321,11 +321,11 @@ def __search_metadata(project_id, value, key=None, source=None):
FROM public.sessions FROM public.sessions
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""") AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(f"""\ await cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})) "svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall()) results = helper.list_to_camel_case(await cur.fetchall())
return results return results

View file

@ -4,23 +4,23 @@ from chalicelib.core import projects, log_tool_datadog, log_tool_stackdriver, lo
from chalicelib.core import users from chalicelib.core import users
def get_state(tenant_id): async def get_state(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id) pids = await projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
recorded = False recorded = False
meta = False meta = False
if len(pids) > 0: if len(pids) > 0:
cur.execute( await cur.execute(
cur.mogrify("""SELECT EXISTS(( SELECT 1 cur.mogrify("""SELECT EXISTS(( SELECT 1
FROM public.sessions AS s FROM public.sessions AS s
WHERE s.project_id IN %(ids)s)) AS exists;""", WHERE s.project_id IN %(ids)s)) AS exists;""",
{"ids": tuple(pids)}) {"ids": tuple(pids)})
) )
recorded = cur.fetchone()["exists"] recorded = await cur.fetchone()["exists"]
meta = False meta = False
if recorded: if recorded:
cur.execute("""SELECT EXISTS((SELECT 1 await cur.execute("""SELECT EXISTS((SELECT 1
FROM public.projects AS p FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1 LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions FROM public.sessions
@ -36,7 +36,7 @@ def get_state(tenant_id):
OR p.metadata_10 IS NOT NULL ) OR p.metadata_10 IS NOT NULL )
)) AS exists;""") )) AS exists;""")
meta = cur.fetchone()["exists"] meta = await cur.fetchone()["exists"]
return [ return [
{"task": "Install OpenReplay", {"task": "Install OpenReplay",
@ -46,38 +46,38 @@ def get_state(tenant_id):
"done": meta, "done": meta,
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"}, "URL": "https://docs.openreplay.com/data-privacy-security/metadata"},
{"task": "Invite Team Members", {"task": "Invite Team Members",
"done": len(users.get_members(tenant_id=tenant_id)) > 1, "done": len(await users.get_members(tenant_id=tenant_id)) > 1,
"URL": "https://app.openreplay.com/client/manage-users"}, "URL": "https://app.openreplay.com/client/manage-users"},
{"task": "Integrations", {"task": "Integrations",
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ "done": len(await log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ or len(await log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, or len(await log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
"URL": "https://docs.openreplay.com/integrations"} "URL": "https://docs.openreplay.com/integrations"}
] ]
def get_state_installing(tenant_id): async def get_state_installing(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id) pids = await projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
recorded = False recorded = False
if len(pids) > 0: if len(pids) > 0:
cur.execute( await cur.execute(
cur.mogrify("""SELECT EXISTS(( SELECT 1 cur.mogrify("""SELECT EXISTS(( SELECT 1
FROM public.sessions AS s FROM public.sessions AS s
WHERE s.project_id IN %(ids)s)) AS exists;""", WHERE s.project_id IN %(ids)s)) AS exists;""",
{"ids": tuple(pids)}) {"ids": tuple(pids)})
) )
recorded = cur.fetchone()["exists"] recorded = await cur.fetchone()["exists"]
return {"task": "Install OpenReplay", return {"task": "Install OpenReplay",
"done": recorded, "done": recorded,
"URL": "https://docs.openreplay.com/getting-started/quick-start"} "URL": "https://docs.openreplay.com/getting-started/quick-start"}
def get_state_identify_users(tenant_id): async def get_state_identify_users(tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute("""SELECT EXISTS((SELECT 1 await cur.execute("""SELECT EXISTS((SELECT 1
FROM public.projects AS p FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1 LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions FROM public.sessions
@ -93,22 +93,22 @@ def get_state_identify_users(tenant_id):
OR p.metadata_10 IS NOT NULL ) OR p.metadata_10 IS NOT NULL )
)) AS exists;""") )) AS exists;""")
meta = cur.fetchone()["exists"] meta = await cur.fetchone()["exists"]
return {"task": "Identify Users", return {"task": "Identify Users",
"done": meta, "done": meta,
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"} "URL": "https://docs.openreplay.com/data-privacy-security/metadata"}
def get_state_manage_users(tenant_id): async def get_state_manage_users(tenant_id):
return {"task": "Invite Team Members", return {"task": "Invite Team Members",
"done": len(users.get_members(tenant_id=tenant_id)) > 1, "done": len(await users.get_members(tenant_id=tenant_id)) > 1,
"URL": "https://app.openreplay.com/client/manage-users"} "URL": "https://app.openreplay.com/client/manage-users"}
def get_state_integrations(tenant_id): async def get_state_integrations(tenant_id):
return {"task": "Integrations", return {"task": "Integrations",
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ "done": len(await log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ or len(await log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, or len(await log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
"URL": "https://docs.openreplay.com/integrations"} "URL": "https://docs.openreplay.com/integrations"}

View file

@ -3,16 +3,16 @@ from chalicelib.utils.storage import StorageClient
from decouple import config from decouple import config
def get_canvas_presigned_urls(session_id, project_id): async def get_canvas_presigned_urls(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT * SELECT *
FROM events.canvas_recordings FROM events.canvas_recordings
WHERE session_id = %(session_id)s WHERE session_id = %(session_id)s
ORDER BY timestamp;""", ORDER BY timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows = cur.fetchall() rows = await cur.fetchall()
for i in range(len(rows)): for i in range(len(rows)):
params = { params = {
@ -21,7 +21,7 @@ def get_canvas_presigned_urls(session_id, project_id):
"recordingId": rows[i]["recording_id"] "recordingId": rows[i]["recording_id"]
} }
key = config("CANVAS_PATTERN", default="%(sessionId)s/%(recordingId)s.mp4") % params key = config("CANVAS_PATTERN", default="%(sessionId)s/%(recordingId)s.mp4") % params
rows[i] = StorageClient.get_presigned_url_for_sharing( rows[i] = await StorageClient.get_presigned_url_for_sharing(
bucket=config("CANVAS_BUCKET", default=config("sessions_bucket")), bucket=config("CANVAS_BUCKET", default=config("sessions_bucket")),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=key key=key

View file

@ -27,7 +27,7 @@ COALESCE((SELECT TRUE
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True): async def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True):
no_platform = True no_platform = True
for f in data.filters: for f in data.filters:
if f.type == schemas.FilterType.platform: if f.type == schemas.FilterType.platform:
@ -38,11 +38,11 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
value=[schemas.PlatformType.desktop], value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is)) operator=schemas.SearchEventOperator._is))
full_args, query_part = sessions_search.search_query_parts(data=data, error_status=None, errors_only=False, full_args, query_part = await sessions_search.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None, favorite_only=data.bookmarked, issue=None,
project_id=project_id, user_id=user_id) project_id=project_id, user_id=user_id)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
data.order = schemas.SortOrderType.desc data.order = schemas.SortOrderType.desc
data.sort = 'duration' data.sort = 'duration'
@ -57,7 +57,7 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
# print(main_query) # print(main_query)
# print("--------------------") # print("--------------------")
try: try:
cur.execute(main_query) await cur.execute(main_query)
except Exception as err: except Exception as err:
print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------") print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8')) print(main_query.decode('UTF-8'))
@ -66,12 +66,12 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
print("--------------------") print("--------------------")
raise err raise err
session = cur.fetchone() session = await cur.fetchone()
if session: if session:
if include_mobs: if include_mobs:
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) session['domURL'] = await sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) session['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session["session_id"])
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"], session['events'] = await events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
event_type=schemas.EventType.location) event_type=schemas.EventType.location)
return helper.dict_to_camel_case(session) return helper.dict_to_camel_case(session)

View file

@ -1,6 +1,6 @@
import logging import logging
import requests import httpx
from decouple import config from decouple import config
from fastapi import HTTPException, status from fastapi import HTTPException, status
@ -13,20 +13,21 @@ logger = logging.getLogger(__name__)
class MSTeams(BaseCollaboration): class MSTeams(BaseCollaboration):
@classmethod @classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema): async def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, if await webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.msteams): webhook_type=schemas.WebhookType.msteams):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url): if await cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id, return await webhook.add(tenant_id=tenant_id,
endpoint=data.url.unicode_string(), endpoint=data.url.unicode_string(),
webhook_type=schemas.WebhookType.msteams, webhook_type=schemas.WebhookType.msteams,
name=data.name) name=data.name)
return None return None
@classmethod @classmethod
def say_hello(cls, url): async def say_hello(cls, url):
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=url, url=url,
json={ json={
"@type": "MessageCard", "@type": "MessageCard",
@ -41,12 +42,13 @@ class MSTeams(BaseCollaboration):
return True return True
@classmethod @classmethod
def send_raw(cls, tenant_id, webhook_id, body): async def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None: if integration is None:
return {"errors": ["msteams integration not found"]} return {"errors": ["msteams integration not found"]}
try: try:
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"], url=integration["endpoint"],
json=body, json=body,
timeout=5) timeout=5)
@ -54,9 +56,6 @@ class MSTeams(BaseCollaboration):
logging.warning(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}") logging.warning(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
logging.warning(r.text) logging.warning(r.text)
return None return None
except requests.exceptions.Timeout:
logging.warning(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
return None
except Exception as e: except Exception as e:
logging.warning(f"!! Issue sending msteams raw webhookId:{webhook_id}") logging.warning(f"!! Issue sending msteams raw webhookId:{webhook_id}")
logging.warning(e) logging.warning(e)
@ -64,8 +63,8 @@ class MSTeams(BaseCollaboration):
return {"data": r.text} return {"data": r.text}
@classmethod @classmethod
def send_batch(cls, tenant_id, webhook_id, attachments): async def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None: if integration is None:
return {"errors": ["msteams integration not found"]} return {"errors": ["msteams integration not found"]}
logging.debug(f"====> sending msteams batch notification: {len(attachments)}") logging.debug(f"====> sending msteams batch notification: {len(attachments)}")
@ -74,7 +73,8 @@ class MSTeams(BaseCollaboration):
for j in range(1, len(part), 2): for j in range(1, len(part), 2):
part.insert(j, {"text": "***"}) part.insert(j, {"text": "***"})
r = requests.post(url=integration["endpoint"], async with httpx.AsyncClient() as client:
r = await client.post(url=integration["endpoint"],
json={ json={
"@type": "MessageCard", "@type": "MessageCard",
"@context": "http://schema.org/extensions", "@context": "http://schema.org/extensions",
@ -86,13 +86,14 @@ class MSTeams(BaseCollaboration):
logging.warning(r.text) logging.warning(r.text)
@classmethod @classmethod
def __share(cls, tenant_id, integration_id, attachement, extra=None): async def __share(cls, tenant_id, integration_id, attachement, extra=None):
if extra is None: if extra is None:
extra = {} extra = {}
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None: if integration is None:
return {"errors": ["Microsoft Teams integration not found"]} return {"errors": ["Microsoft Teams integration not found"]}
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"], url=integration["endpoint"],
json={ json={
"@type": "MessageCard", "@type": "MessageCard",
@ -104,7 +105,7 @@ class MSTeams(BaseCollaboration):
return r.text return r.text
@classmethod @classmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): async def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None):
title = f"*{user}* has shared the below session!" title = f"*{user}* has shared the below session!"
link = f"{config('SITE_URL')}/{project_id}/session/{session_id}" link = f"{config('SITE_URL')}/{project_id}/session/{session_id}"
args = { args = {
@ -123,13 +124,13 @@ class MSTeams(BaseCollaboration):
"name": "Comment:", "name": "Comment:",
"value": comment "value": comment
}) })
data = cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) data = await cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title})
if "errors" in data: if "errors" in data:
return data return data
return {"data": data} return {"data": data}
@classmethod @classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): async def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None):
title = f"*{user}* has shared the below error!" title = f"*{user}* has shared the below error!"
link = f"{config('SITE_URL')}/{project_id}/errors/{error_id}" link = f"{config('SITE_URL')}/{project_id}/errors/{error_id}"
args = { args = {
@ -148,18 +149,18 @@ class MSTeams(BaseCollaboration):
"name": "Comment:", "name": "Comment:",
"value": comment "value": comment
}) })
data = cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title}) data = await cls.__share(tenant_id, integration_id, attachement=args, extra={"summary": title})
if "errors" in data: if "errors" in data:
return data return data
return {"data": data} return {"data": data}
@classmethod @classmethod
def get_integration(cls, tenant_id, integration_id=None): async def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None: if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, return await webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.msteams) webhook_type=schemas.WebhookType.msteams)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams) integrations = await webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams)
if integrations is None or len(integrations) == 0: if integrations is None or len(integrations) == 0:
return None return None
return integrations[0] return integrations[0]

View file

@ -1,9 +1,9 @@
from datetime import datetime from datetime import datetime
import requests
from decouple import config from decouple import config
from fastapi import HTTPException, status from fastapi import HTTPException, status
import httpx
import schemas import schemas
from chalicelib.core import webhook from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration from chalicelib.core.collaboration_base import BaseCollaboration
@ -11,20 +11,21 @@ from chalicelib.core.collaboration_base import BaseCollaboration
class Slack(BaseCollaboration): class Slack(BaseCollaboration):
@classmethod @classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema): async def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, if await webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.slack): webhook_type=schemas.WebhookType.slack):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url): if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id, return await webhook.add(tenant_id=tenant_id,
endpoint=data.url.unicode_string(), endpoint=data.url.unicode_string(),
webhook_type=schemas.WebhookType.slack, webhook_type=schemas.WebhookType.slack,
name=data.name) name=data.name)
return None return None
@classmethod @classmethod
def say_hello(cls, url): async def say_hello(cls, url):
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=url, url=url,
json={ json={
"attachments": [ "attachments": [
@ -41,12 +42,13 @@ class Slack(BaseCollaboration):
return True return True
@classmethod @classmethod
def send_raw(cls, tenant_id, webhook_id, body): async def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None: if integration is None:
return {"errors": ["slack integration not found"]} return {"errors": ["slack integration not found"]}
try: try:
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"], url=integration["endpoint"],
json=body, json=body,
timeout=5) timeout=5)
@ -54,9 +56,6 @@ class Slack(BaseCollaboration):
print(f"!! issue sending slack raw; webhookId:{webhook_id} code:{r.status_code}") print(f"!! issue sending slack raw; webhookId:{webhook_id} code:{r.status_code}")
print(r.text) print(r.text)
return None return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending slack raw webhookId:{webhook_id}")
return None
except Exception as e: except Exception as e:
print(f"!! Issue sending slack raw webhookId:{webhook_id}") print(f"!! Issue sending slack raw webhookId:{webhook_id}")
print(str(e)) print(str(e))
@ -64,13 +63,14 @@ class Slack(BaseCollaboration):
return {"data": r.text} return {"data": r.text}
@classmethod @classmethod
def send_batch(cls, tenant_id, webhook_id, attachments): async def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None: if integration is None:
return {"errors": ["slack integration not found"]} return {"errors": ["slack integration not found"]}
print(f"====> sending slack batch notification: {len(attachments)}") print(f"====> sending slack batch notification: {len(attachments)}")
for i in range(0, len(attachments), 100): for i in range(0, len(attachments), 100):
r = requests.post( async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"], url=integration["endpoint"],
json={"attachments": attachments[i:i + 100]}) json={"attachments": attachments[i:i + 100]})
if r.status_code != 200: if r.status_code != 200:
@ -80,47 +80,48 @@ class Slack(BaseCollaboration):
print(r.text) print(r.text)
@classmethod @classmethod
def __share(cls, tenant_id, integration_id, attachement, extra=None): async def __share(cls, tenant_id, integration_id, attachement, extra=None):
if extra is None: if extra is None:
extra = {} extra = {}
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id) integration = await cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None: if integration is None:
return {"errors": ["slack integration not found"]} return {"errors": ["slack integration not found"]}
attachement["ts"] = datetime.now().timestamp() attachement["ts"] = datetime.now().timestamp()
r = requests.post(url=integration["endpoint"], json={"attachments": [attachement], **extra}) async with httpx.AsyncClient() as client:
r = await client.post(url=integration["endpoint"], json={"attachments": [attachement], **extra})
return r.text return r.text
@classmethod @classmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None): async def share_session(cls, tenant_id, project_id, session_id, user, comment, project_name=None, integration_id=None):
args = {"fallback": f"{user} has shared the below session!", args = {"fallback": f"{user} has shared the below session!",
"pretext": f"{user} has shared the below session!", "pretext": f"{user} has shared the below session!",
"title": f"{config('SITE_URL')}/{project_id}/session/{session_id}", "title": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}", "title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"text": comment} "text": comment}
data = cls.__share(tenant_id, integration_id, attachement=args) data = await cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data: if "errors" in data:
return data return data
return {"data": data} return {"data": data}
@classmethod @classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None): async def share_error(cls, tenant_id, project_id, error_id, user, comment, project_name=None, integration_id=None):
args = {"fallback": f"{user} has shared the below error!", args = {"fallback": f"{user} has shared the below error!",
"pretext": f"{user} has shared the below error!", "pretext": f"{user} has shared the below error!",
"title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", "title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", "title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"text": comment} "text": comment}
data = cls.__share(tenant_id, integration_id, attachement=args) data = await cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data: if "errors" in data:
return data return data
return {"data": data} return {"data": data}
@classmethod @classmethod
def get_integration(cls, tenant_id, integration_id=None): async def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None: if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, return await webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.slack) webhook_type=schemas.WebhookType.slack)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack) integrations = await webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack)
if integrations is None or len(integrations) == 0: if integrations is None or len(integrations) == 0:
return None return None
return integrations[0] return integrations[0]

View file

@ -19,10 +19,10 @@ PIE_CHART_GROUP = 5
# timeseries / # timeseries /
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs # table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
# remove "table of" calls from this function # remove "table of" calls from this function
def __try_live(project_id, data: schemas.CardSchema): async def __try_live(project_id, data: schemas.CardSchema):
results = [] results = []
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, results.append(await sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value)) metric_of=data.metric_of, metric_value=data.metric_value))
if data.view_type == schemas.MetricTimeseriesViewType.progress: if data.view_type == schemas.MetricTimeseriesViewType.progress:
@ -30,7 +30,7 @@ def __try_live(project_id, data: schemas.CardSchema):
diff = s.filter.endTimestamp - s.filter.startTimestamp diff = s.filter.endTimestamp - s.filter.startTimestamp
s.filter.endTimestamp = s.filter.startTimestamp s.filter.endTimestamp = s.filter.startTimestamp
s.filter.startTimestamp = s.filter.endTimestamp - diff s.filter.startTimestamp = s.filter.endTimestamp - diff
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, r["previousCount"] = await sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value) metric_of=data.metric_of, metric_value=data.metric_value)
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
@ -48,64 +48,64 @@ def __try_live(project_id, data: schemas.CardSchema):
return results return results
def __get_table_of_series(project_id, data: schemas.CardSchema): async def __get_table_of_series(project_id, data: schemas.CardSchema):
results = [] results = []
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, results.append(await sessions.search2_table(data=s.filter, project_id=project_id, density=data.density,
metric_of=data.metric_of, metric_value=data.metric_value)) metric_of=data.metric_of, metric_value=data.metric_value))
return results return results
def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None): async def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None):
if len(data.series) == 0: if len(data.series) == 0:
return { return {
"stages": [], "stages": [],
"totalDropDueToIssues": 0 "totalDropDueToIssues": 0
} }
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) return await funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __get_errors_list(project_id, user_id, data: schemas.CardSchema): async def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
if len(data.series) == 0: if len(data.series) == 0:
return { return {
"total": 0, "total": 0,
"errors": [] "errors": []
} }
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) return await errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __get_sessions_list(project_id, user_id, data: schemas.CardSchema): async def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
if len(data.series) == 0: if len(data.series) == 0:
logger.debug("empty series") logger.debug("empty series")
return { return {
"total": 0, "total": 0,
"sessions": [] "sessions": []
} }
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id) return await sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True): async def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True):
if len(data.series) == 0: if len(data.series) == 0:
return None return None
return click_maps.search_short_session(project_id=project_id, user_id=user_id, return await click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.ClickMapSessionsSearch( data=schemas.ClickMapSessionsSearch(
**data.series[0].filter.model_dump()), **data.series[0].filter.model_dump()),
include_mobs=include_mobs) include_mobs=include_mobs)
def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis): async def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0: if len(data.series) == 0:
data.series.append( data.series.append(
schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp)) schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema): elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
data.series[0].filter = schemas.PathAnalysisSchema() data.series[0].filter = schemas.PathAnalysisSchema()
return product_analytics.path_analysis(project_id=project_id, data=data) return await product_analytics.path_analysis(project_id=project_id, data=data)
def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None): async def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None):
series_charts = __try_live(project_id=project_id, data=data) series_charts = await __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress: if data.view_type == schemas.MetricTimeseriesViewType.progress:
return series_charts return series_charts
results = [{}] * len(series_charts[0]) results = [{}] * len(series_charts[0])
@ -120,39 +120,39 @@ def not_supported(**args):
raise Exception("not supported") raise Exception("not supported")
def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id): async def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) return await __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int): async def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data) return await __get_errors_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None): async def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
return __get_table_of_series(project_id=project_id, data=data) return await __get_table_of_series(project_id=project_id, data=data)
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int): async def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
supported = { supported = {
schemas.MetricOfTable.sessions: __get_table_of_sessions, schemas.MetricOfTable.sessions: __get_table_of_sessions,
schemas.MetricOfTable.errors: __get_table_of_errors, schemas.MetricOfTable.errors: __get_table_of_errors,
@ -163,12 +163,12 @@ def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
schemas.MetricOfTable.user_country: __get_table_of_countries, schemas.MetricOfTable.user_country: __get_table_of_countries,
schemas.MetricOfTable.visited_url: __get_table_of_urls, schemas.MetricOfTable.visited_url: __get_table_of_urls,
} }
return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id) return await supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id)
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int): async def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
if data.is_predefined: if data.is_predefined:
return custom_metrics_predefined.get_metric(key=data.metric_of, return await custom_metrics_predefined.get_metric(key=data.metric_of,
project_id=project_id, project_id=project_id,
data=data.model_dump()) data=data.model_dump())
@ -180,7 +180,7 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
schemas.MetricType.insights: not_supported, schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart schemas.MetricType.pathAnalysis: __get_path_analysis_chart
} }
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) return await supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
def __merge_metric_with_data(metric: schemas.CardSchema, def __merge_metric_with_data(metric: schemas.CardSchema,
@ -200,8 +200,8 @@ def __merge_metric_with_data(metric: schemas.CardSchema,
return metric return metric
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): async def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) card: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if card is None: if card is None:
return None return None
metric: schemas.CardSchema = schemas.CardSchema(**card) metric: schemas.CardSchema = schemas.CardSchema(**card)
@ -209,13 +209,13 @@ def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSe
results = [] results = []
for s in metric.series: for s in metric.series:
results.append({"seriesId": s.series_id, "seriesName": s.name, results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) **await sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): async def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) raw_metric: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None: if raw_metric is None:
return None return None
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
@ -224,11 +224,11 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessions
return None return None
for s in metric.series: for s in metric.series:
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} **await funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): async def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) raw_metric: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None: if raw_metric is None:
return None return None
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
@ -237,10 +237,10 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc
return None return None
for s in metric.series: for s in metric.series:
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)} **await errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema): async def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = [] results = []
if len(data.series) == 0: if len(data.series) == 0:
return results return results
@ -250,21 +250,21 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True)) s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
results.append({"seriesId": None, "seriesName": s.name, results.append({"seriesId": None, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) **await sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results return results
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel): async def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
if len(data.series) == 0: if len(data.series) == 0:
return {"data": []} return {"data": []}
data.series[0].filter.startTimestamp = data.startTimestamp data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endTimestamp = data.endTimestamp data.series[0].filter.endTimestamp = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) data = await funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
return {"data": data} return {"data": data}
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis): async def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.filters) > 0 or len(data.series) > 0: if len(data.filters) > 0 or len(data.series) > 0:
filters = [f.model_dump(by_alias=True) for f in data.filters] \ filters = [f.model_dump(by_alias=True) for f in data.filters] \
+ [f.model_dump(by_alias=True) for f in data.series[0].filter.filters] + [f.model_dump(by_alias=True) for f in data.series[0].filter.filters]
@ -283,15 +283,15 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type, search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on, operator=schemas.SearchEventOperator._not_on,
value=s.value)) value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data) result = await sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result return result
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema): async def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
if data.is_predefined: if data.is_predefined:
return not_supported() return not_supported()
if data.metric_of == schemas.MetricOfTable.issues: if data.metric_of == schemas.MetricOfTable.issues:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data) return await __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
supported = { supported = {
schemas.MetricType.timeseries: not_supported, schemas.MetricType.timeseries: not_supported,
schemas.MetricType.table: not_supported, schemas.MetricType.table: not_supported,
@ -300,7 +300,7 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
schemas.MetricType.insights: not_supported, schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_issues, schemas.MetricType.pathAnalysis: __get_path_analysis_issues,
} }
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) return await supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis): def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
@ -311,11 +311,11 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
return r return r
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): async def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
session_data = None session_data = None
if data.metric_type == schemas.MetricType.click_map: if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id, session_data = await __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False) data=data, include_mobs=False)
if session_data is not None: if session_data is not None:
session_data = json.dumps(session_data) session_data = json.dumps(session_data)
@ -330,7 +330,7 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
params["default_config"] = json.dumps(data.default_config.model_dump()) params["default_config"] = json.dumps(data.default_config.model_dump())
params["card_info"] = None params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis: if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) params["card_info"] = json.dumps(await __get_path_analysis_card_info(data=data))
query = """INSERT INTO metrics (project_id, user_id, name, is_public, query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value, view_type, metric_type, metric_of, metric_value,
@ -349,15 +349,15 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
RETURNING metric_id;""" RETURNING metric_id;"""
query = cur.mogrify(query, params) query = cur.mogrify(query, params)
cur.execute(query) await cur.execute(query)
r = cur.fetchone() r = await cur.fetchone()
if dashboard: if dashboard:
return r["metric_id"] return r["metric_id"]
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} return {"data": await get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): async def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) metric: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None: if metric is None:
return None return None
series_ids = [r["seriesId"] for r in metric["series"]] series_ids = [r["seriesId"] for r in metric["series"]]
@ -391,9 +391,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
params["d_series_ids"] = tuple(d_series_ids) params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis: if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) params["card_info"] = json.dumps(await __get_path_analysis_card_info(data=data))
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_queries = [] sub_queries = []
if len(n_series) > 0: if len(n_series) > 0:
sub_queries.append(f"""\ sub_queries.append(f"""\
@ -430,11 +430,11 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
AND project_id = %(project_id)s AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public) AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params) RETURNING metric_id;""", params)
cur.execute(query) await cur.execute(query)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id) return await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False): async def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s", constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"] "metrics.deleted_at ISNULL"]
params = {"project_id": project_id, "user_id": user_id, params = {"project_id": project_id, "user_id": user_id,
@ -451,7 +451,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)") constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query, params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains) op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_join = "" sub_join = ""
if include_series: if include_series:
sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
@ -481,8 +481,8 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
WHERE {" AND ".join(constraints)} WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order.value} ORDER BY created_at {data.order.value}
LIMIT %(limit)s OFFSET %(offset)s;""", params) LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
if include_series: if include_series:
for r in rows: for r in rows:
for s in r["series"]: for s in r["series"]:
@ -495,20 +495,20 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
return rows return rows
def get_all(project_id, user_id): async def get_all(project_id, user_id):
default_search = schemas.SearchCardsSchema() default_search = schemas.SearchCardsSchema()
result = rows = search_all(project_id=project_id, user_id=user_id, data=default_search) result = rows = await search_all(project_id=project_id, user_id=user_id, data=default_search)
while len(rows) == default_search.limit: while len(rows) == default_search.limit:
default_search.page += 1 default_search.page += 1
rows = search_all(project_id=project_id, user_id=user_id, data=default_search) rows = await search_all(project_id=project_id, user_id=user_id, data=default_search)
result += rows result += rows
return result return result
def delete_card(project_id, metric_id, user_id): async def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
UPDATE public.metrics UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
@ -530,8 +530,8 @@ def __get_path_analysis_attributes(row):
return row return row
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False): async def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type, f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
view_type, metric_of, metric_value, metric_format, is_pinned, default_config, view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
@ -563,8 +563,8 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
ORDER BY created_at;""", ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id} {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
) )
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return None return None
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
@ -578,9 +578,9 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
return row return row
def get_series_for_alert(project_id, user_id): async def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT series_id AS value, """SELECT series_id AS value,
metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name, metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
@ -598,13 +598,13 @@ def get_series_for_alert(project_id, user_id):
{"project_id": project_id, "user_id": user_id} {"project_id": project_id, "user_id": user_id}
) )
) )
rows = cur.fetchall() rows = await cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)
def change_state(project_id, metric_id, user_id, status): async def change_state(project_id, metric_id, user_id, status):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
UPDATE public.metrics UPDATE public.metrics
SET active = %(status)s SET active = %(status)s
@ -612,14 +612,14 @@ def change_state(project_id, metric_id, user_id, status):
AND (user_id = %(user_id)s OR is_public);""", AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "status": status, "user_id": user_id}) {"metric_id": metric_id, "status": status, "user_id": user_id})
) )
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id) return await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, async def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CardSessionsSchema data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None # , range_value=None, start_date=None, end_date=None
): ):
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) card: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if card is None: if card is None:
return None return None
metric: schemas.CardSchema = schemas.CardSchema(**card) metric: schemas.CardSchema = schemas.CardSchema(**card)
@ -631,7 +631,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
s.filter.endTimestamp = data.endTimestamp s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit s.filter.limit = data.limit
s.filter.page = data.page s.filter.page = data.page
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) issues_list = await funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", []) issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
issue = None issue = None
for i in issues_list: for i in issues_list:
@ -639,7 +639,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
issue = i issue = i
break break
if issue is None: if issue is None:
issue = issues.get(project_id=project_id, issue_id=issue_id) issue = await issues.get(project_id=project_id, issue_id=issue_id)
if issue is not None: if issue is not None:
issue = {**issue, issue = {**issue,
"affectedSessions": 0, "affectedSessions": 0,
@ -648,14 +648,14 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"lostConversions": 0, "lostConversions": 0,
"unaffectedSessions": 0} "unaffectedSessions": 0}
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, "sessions": await sessions.search_sessions(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter) issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []}, if issue is not None else {"total": 0, "sessions": []},
"issue": issue} "issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): async def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True) raw_metric: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None: if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
@ -666,7 +666,7 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
if metric.is_predefined: if metric.is_predefined:
return custom_metrics_predefined.get_metric(key=metric.metric_of, return await custom_metrics_predefined.get_metric(key=metric.metric_of,
project_id=project_id, project_id=project_id,
data=data.model_dump()) data=data.model_dump())
elif metric.metric_type == schemas.MetricType.click_map: elif metric.metric_type == schemas.MetricType.click_map:
@ -675,14 +675,14 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"]) __get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])
mob_exists = False mob_exists = False
for k in keys: for k in keys:
if StorageClient.exists(bucket=config("sessions_bucket"), key=k): if await StorageClient.exists(bucket=config("sessions_bucket"), key=k):
mob_exists = True mob_exists = True
break break
if mob_exists: if mob_exists:
raw_metric["data"]['domURL'] = sessions_mobs.get_urls(session_id=raw_metric["data"]["sessionId"], raw_metric["data"]['domURL'] = await sessions_mobs.get_urls(session_id=raw_metric["data"]["sessionId"],
project_id=project_id) project_id=project_id)
raw_metric["data"]['mobsUrl'] = sessions_mobs.get_urls_depercated( raw_metric["data"]['mobsUrl'] = await sessions_mobs.get_urls_depercated(
session_id=raw_metric["data"]["sessionId"]) session_id=raw_metric["data"]["sessionId"])
return raw_metric["data"] return raw_metric["data"]
return get_chart(project_id=project_id, data=metric, user_id=user_id) return await get_chart(project_id=project_id, data=metric, user_id=user_id)

View file

@ -10,7 +10,7 @@ from chalicelib.core import metrics
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ async def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
@ -58,4 +58,4 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data) return await supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -7,8 +7,8 @@ from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): async def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description) pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s) VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *""" RETURNING *"""
@ -25,28 +25,28 @@ def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
# params[f"config_{i}"]["position"] = i # params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"]) # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i}) params[f"config_{i}"] = json.dumps({"position": i})
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return {"errors": ["something went wrong while creating the dashboard"]} return {"errors": ["something went wrong while creating the dashboard"]}
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])} return {"data": await get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
def get_dashboards(project_id, user_id): async def get_dashboards(project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = f"""SELECT * pg_query = f"""SELECT *
FROM dashboards FROM dashboards
WHERE deleted_at ISNULL WHERE deleted_at ISNULL
AND project_id = %(projectId)s AND project_id = %(projectId)s
AND (user_id = %(userId)s OR is_public);""" AND (user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id} params = {"userId": user_id, "projectId": project_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = await cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)
def get_dashboard(project_id, user_id, dashboard_id): async def get_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
FROM dashboards FROM dashboards
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
@ -77,8 +77,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
AND dashboard_id = %(dashboard_id)s AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);""" AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
if row is not None: if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
for w in row["widgets"]: for w in row["widgets"]:
@ -92,26 +92,26 @@ def get_dashboard(project_id, user_id, dashboard_id):
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def delete_dashboard(project_id, user_id, dashboard_id): async def delete_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboards pg_query = """UPDATE dashboards
SET deleted_at = timezone('utc'::text, now()) SET deleted_at = timezone('utc'::text, now())
WHERE dashboards.project_id = %(projectId)s WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);""" AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}} return {"data": {"success": True}}
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema): async def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """SELECT COALESCE(COUNT(*),0) AS count pg_query = """SELECT COALESCE(COUNT(*),0) AS count
FROM dashboard_widgets FROM dashboard_widgets
WHERE dashboard_id = %(dashboard_id)s;""" WHERE dashboard_id = %(dashboard_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
offset = row["count"] offset = row["count"]
pg_query = f"""UPDATE dashboards pg_query = f"""UPDATE dashboards
SET name = %(name)s, SET name = %(name)s,
@ -137,15 +137,15 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"]) # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i + offset}) params[f"config_{i}"] = json.dumps({"position": i + offset})
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
if row: if row:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def get_widget(project_id, user_id, dashboard_id, widget_id): async def get_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """SELECT metrics.*, metric_series.series pg_query = """SELECT metrics.*, metric_series.series
FROM dashboard_widgets FROM dashboard_widgets
INNER JOIN dashboards USING (dashboard_id) INNER JOIN dashboards USING (dashboard_id)
@ -163,13 +163,13 @@ def get_widget(project_id, user_id, dashboard_id, widget_id):
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL) AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
AND (metrics.is_public OR metrics.user_id = %(userId)s);""" AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema): async def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id, SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config %(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
@ -180,13 +180,13 @@ def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashb
RETURNING *;""" RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
params["config"] = json.dumps(data.config) params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema): async def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboard_widgets pg_query = """UPDATE dashboard_widgets
SET config= %(config)s SET config= %(config)s
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
@ -194,22 +194,22 @@ def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.Up
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
"widget_id": widget_id, **data.model_dump()} "widget_id": widget_id, **data.model_dump()}
params["config"] = json.dumps(data.config) params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def remove_widget(project_id, user_id, dashboard_id, widget_id): async def remove_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """DELETE FROM dashboard_widgets pg_query = """DELETE FROM dashboard_widgets
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;""" WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}} return {"data": {"success": True}}
def pin_dashboard(project_id, user_id, dashboard_id): async def pin_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboards pg_query = """UPDATE dashboards
SET is_pinned = FALSE SET is_pinned = FALSE
WHERE project_id=%(project_id)s; WHERE project_id=%(project_id)s;
@ -218,14 +218,14 @@ def pin_dashboard(project_id, user_id, dashboard_id):
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
RETURNING *;""" RETURNING *;"""
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id} params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params)) await cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema): async def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema):
metric_id = custom_metrics.create_card(project_id=project_id, user_id=user_id, data=data, dashboard=True) metric_id = await custom_metrics.create_card(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id, return await add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id)) data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
# def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema): # def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema):

View file

@ -93,44 +93,44 @@ class DatabaseRequestHandler:
logging.info(f"Query: {query}") logging.info(f"Query: {query}")
return query return query
def execute_query(self, query, data=None): async def execute_query(self, query, data=None):
try: try:
with self.client.PostgresClient() as cur: async with self.client.cursor() as cur:
mogrified_query = cur.mogrify(query, {**data, **self.params} if data else self.params) mogrified_query = cur.mogrify(query, {**data, **self.params} if data else self.params)
cur.execute(mogrified_query) await cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None return await cur.fetchall() if cur.description else None
except Exception as e: except Exception as e:
self.logger.error(f"Database operation failed: {e}") self.logger.error(f"Database operation failed: {e}")
raise raise
def fetchall(self): async def fetchall(self):
query = self.build_query() query = self.build_query()
return self.execute_query(query) return await self.execute_query(query)
def fetchone(self): async def fetchone(self):
query = self.build_query() query = self.build_query()
result = self.execute_query(query) result = await self.execute_query(query)
return result[0] if result else None return result[0] if result else None
def insert(self, data): async def insert(self, data):
query = self.build_query(action="insert", data=data) query = self.build_query(action="insert", data=data)
query += " RETURNING *;" query += " RETURNING *;"
result = self.execute_query(query, data) result = await self.execute_query(query, data)
return result[0] if result else None return result[0] if result else None
def update(self, data): async def update(self, data):
query = self.build_query(action="update", data=data) query = self.build_query(action="update", data=data)
query += " RETURNING *;" query += " RETURNING *;"
result = self.execute_query(query, data) result = await self.execute_query(query, data)
return result[0] if result else None return result[0] if result else None
def delete(self): async def delete(self):
query = self.build_query(action="delete") query = self.build_query(action="delete")
return self.execute_query(query) return await self.execute_query(query)
def batch_insert(self, items): async def batch_insert(self, items):
if not items: if not items:
return None return None
@ -145,27 +145,27 @@ class DatabaseRequestHandler:
query = f"INSERT INTO {self.table_name} ({columns}) VALUES {all_values_query} RETURNING *;" query = f"INSERT INTO {self.table_name} ({columns}) VALUES {all_values_query} RETURNING *;"
try: try:
with self.client.PostgresClient() as cur: async with self.client.cursor() as cur:
# Flatten items into a single dictionary with unique keys # Flatten items into a single dictionary with unique keys
combined_params = {f"{k}_{i}": v for i, item in enumerate(items) for k, v in item.items()} combined_params = {f"{k}_{i}": v for i, item in enumerate(items) for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params) mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query) await cur.execute(mogrified_query)
return cur.fetchall() return await cur.fetchall()
except Exception as e: except Exception as e:
self.logger.error(f"Database batch insert operation failed: {e}") self.logger.error(f"Database batch insert operation failed: {e}")
raise raise
def raw_query(self, query, params=None): async def raw_query(self, query, params=None):
try: try:
with self.client.PostgresClient() as cur: async with self.client.cursor() as cur:
mogrified_query = cur.mogrify(query, params) mogrified_query = cur.mogrify(query, params)
cur.execute(mogrified_query) await cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None return await cur.fetchall() if cur.description else None
except Exception as e: except Exception as e:
self.logger.error(f"Database operation failed: {e}") self.logger.error(f"Database operation failed: {e}")
raise raise
def batch_update(self, items): async def batch_update(self, items):
if not items: if not items:
return None return None
@ -192,11 +192,11 @@ class DatabaseRequestHandler:
""" """
try: try:
with self.client.PostgresClient() as cur: async with self.client.cursor() as cur:
# Flatten items into a single dictionary for mogrify # Flatten items into a single dictionary for mogrify
combined_params = {k: v for item in items for k, v in item.items()} combined_params = {k: v for item in items for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params) mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query) await cur.execute(mogrified_query)
except Exception as e: except Exception as e:
self.logger.error(f"Database batch update operation failed: {e}") self.logger.error(f"Database batch update operation failed: {e}")
raise raise

View file

@ -8,24 +8,24 @@ from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size from chalicelib.utils.metrics_helper import __get_step_size
def get(error_id, family=False): async def get(error_id, family=False):
if family: if family:
return get_batch([error_id]) return get_batch([error_id])
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;", "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
{"error_id": error_id}) {"error_id": error_id})
cur.execute(query=query) await cur.execute(query=query)
result = cur.fetchone() result = await cur.fetchone()
if result is not None: if result is not None:
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"]) result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
return helper.dict_to_camel_case(result) return helper.dict_to_camel_case(result)
def get_batch(error_ids): async def get_batch(error_ids):
if len(error_ids) == 0: if len(error_ids) == 0:
return [] return []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
""" """
WITH RECURSIVE error_family AS ( WITH RECURSIVE error_family AS (
@ -40,8 +40,8 @@ def get_batch(error_ids):
SELECT * SELECT *
FROM error_family;""", FROM error_family;""",
{"error_ids": tuple(error_ids)}) {"error_ids": tuple(error_ids)})
cur.execute(query=query) await cur.execute(query=query)
errors = cur.fetchall() errors = await cur.fetchall()
for e in errors: for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors) return helper.list_to_camel_case(errors)
@ -81,7 +81,7 @@ def __process_tags(row):
] ]
def get_details(project_id, error_id, user_id, **data): async def get_details(project_id, error_id, user_id, **data):
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
pg_sub_query24.append("error_id = %(error_id)s") pg_sub_query24.append("error_id = %(error_id)s")
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
@ -101,7 +101,7 @@ def get_details(project_id, error_id, user_id, **data):
pg_sub_query30.append("error_id = %(error_id)s") pg_sub_query30.append("error_id = %(error_id)s")
pg_basic_query = __get_basic_constraints(time_constraint=False) pg_basic_query = __get_basic_constraints(time_constraint=False)
pg_basic_query.append("error_id = %(error_id)s") pg_basic_query.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
data["startDate24"] = TimeUTC.now(-1) data["startDate24"] = TimeUTC.now(-1)
data["endDate24"] = TimeUTC.now() data["endDate24"] = TimeUTC.now()
data["startDate30"] = TimeUTC.now(-30) data["startDate30"] = TimeUTC.now(-30)
@ -253,8 +253,8 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------") # print("--------------------")
# print(cur.mogrify(main_pg_query, params)) # print(cur.mogrify(main_pg_query, params))
# print("--------------------") # print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params)) await cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return {"errors": ["error not found"]} return {"errors": ["error not found"]}
row["tags"] = __process_tags(row) row["tags"] = __process_tags(row)
@ -274,8 +274,8 @@ def get_details(project_id, error_id, user_id, **data):
ORDER BY start_ts DESC ORDER BY start_ts DESC
LIMIT 1;""", LIMIT 1;""",
{"project_id": project_id, "error_id": error_id, "user_id": user_id}) {"project_id": project_id, "error_id": error_id, "user_id": user_id})
cur.execute(query=query) await cur.execute(query=query)
status = cur.fetchone() status = await cur.fetchone()
if status is not None: if status is not None:
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack") row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
@ -294,12 +294,12 @@ def get_details(project_id, error_id, user_id, **data):
return {"data": helper.dict_to_camel_case(row)} return {"data": helper.dict_to_camel_case(row)}
def get_details_chart(project_id, error_id, user_id, **data): async def get_details_chart(project_id, error_id, user_id, **data):
pg_sub_query = __get_basic_constraints() pg_sub_query = __get_basic_constraints()
pg_sub_query.append("error_id = %(error_id)s") pg_sub_query.append("error_id = %(error_id)s")
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True) pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
pg_sub_query_chart.append("error_id = %(error_id)s") pg_sub_query_chart.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if data.get("startDate") is None: if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7) data["startDate"] = TimeUTC.now(-7)
else: else:
@ -398,8 +398,8 @@ def get_details_chart(project_id, error_id, user_id, **data):
GROUP BY generated_timestamp GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);""" ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
cur.execute(cur.mogrify(main_pg_query, params)) await cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return {"errors": ["error not found"]} return {"errors": ["error not found"]}
row["tags"] = __process_tags(row) row["tags"] = __process_tags(row)
@ -434,7 +434,7 @@ def __get_sort_key(key):
}.get(key, 'max_datetime') }.get(key, 'max_datetime')
def search(data: schemas.SearchErrorsSchema, project_id, user_id): async def search(data: schemas.SearchErrorsSchema, project_id, user_id):
empty_response = { empty_response = {
'total': 0, 'total': 0,
'errors': [] 'errors': []
@ -460,12 +460,12 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
data.endTimestamp = TimeUTC.now(1) data.endTimestamp = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0: if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors") print("-- searching for sessions before errors")
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, statuses = await sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status) error_status=data.status)
if len(statuses) == 0: if len(statuses) == 0:
return empty_response return empty_response
error_ids = [e["errorId"] for e in statuses] error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1) step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime') sort = __get_sort_key('datetime')
if data.sort is not None: if data.sort is not None:
@ -547,8 +547,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
# print(cur.mogrify(main_pg_query, params)) # print(cur.mogrify(main_pg_query, params))
# print("--------------------") # print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params)) await cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall() rows = await cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"] total = 0 if len(rows) == 0 else rows[0]["full_count"]
if total == 0: if total == 0:
@ -565,8 +565,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id}) "user_id": user_id})
cur.execute(query=query) await cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall()) statuses = helper.list_to_camel_case(await cur.fetchall())
statuses = { statuses = {
s["errorId"]: s for s in statuses s["errorId"]: s for s in statuses
} }
@ -584,18 +584,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
} }
def __save_stacktrace(error_id, data): async def __save_stacktrace(error_id, data):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""UPDATE public.errors """UPDATE public.errors
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now()) SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
WHERE error_id = %(error_id)s;""", WHERE error_id = %(error_id)s;""",
{"error_id": error_id, "data": json.dumps(data)}) {"error_id": error_id, "data": json.dumps(data)})
cur.execute(query=query) await cur.execute(query=query)
def get_trace(project_id, error_id): async def get_trace(project_id, error_id):
error = get(error_id=error_id, family=False) error = await get(error_id=error_id, family=False)
if error is None: if error is None:
return {"errors": ["error not found"]} return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception": if error.get("source", "") != "js_exception":
@ -606,15 +606,15 @@ def get_trace(project_id, error_id):
return {"sourcemapUploaded": True, return {"sourcemapUploaded": True,
"trace": error.get("stacktrace"), "trace": error.get("stacktrace"),
"preparsed": True} "preparsed": True}
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"]) trace, all_exists = await sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
if all_exists: if all_exists:
__save_stacktrace(error_id=error_id, data=trace) await __save_stacktrace(error_id=error_id, data=trace)
return {"sourcemapUploaded": all_exists, return {"sourcemapUploaded": all_exists,
"trace": trace, "trace": trace,
"preparsed": False} "preparsed": False}
def get_sessions(start_date, end_date, project_id, user_id, error_id): async def get_sessions(start_date, end_date, project_id, user_id, error_id):
extra_constraints = ["s.project_id = %(project_id)s", extra_constraints = ["s.project_id = %(project_id)s",
"s.start_ts >= %(startDate)s", "s.start_ts >= %(startDate)s",
"s.start_ts <= %(endDate)s", "s.start_ts <= %(endDate)s",
@ -630,7 +630,7 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
"project_id": project_id, "project_id": project_id,
"userId": user_id, "userId": user_id,
"error_id": error_id} "error_id": error_id}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""SELECT s.project_id, f"""SELECT s.project_id,
s.session_id::text AS session_id, s.session_id::text AS session_id,
@ -659,13 +659,13 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
WHERE {" AND ".join(extra_constraints)} WHERE {" AND ".join(extra_constraints)}
ORDER BY s.start_ts DESC;""", ORDER BY s.start_ts DESC;""",
params) params)
cur.execute(query=query) await cur.execute(query=query)
sessions_list = [] sessions_list = []
total = cur.rowcount total = cur.rowcount
row = cur.fetchone() row = await cur.fetchone()
while row is not None and len(sessions_list) < 100: while row is not None and len(sessions_list) < 100:
sessions_list.append(row) sessions_list.append(row)
row = cur.fetchone() row = await cur.fetchone()
return { return {
'total': total, 'total': total,
@ -680,8 +680,8 @@ ACTION_STATE = {
} }
def change_state(project_id, user_id, error_id, action): async def change_state(project_id, user_id, error_id, action):
errors = get(error_id, family=True) errors = await get(error_id, family=True)
print(len(errors)) print(len(errors))
status = ACTION_STATE.get(action) status = ACTION_STATE.get(action)
if errors is None or len(errors) == 0: if errors is None or len(errors) == 0:
@ -696,15 +696,15 @@ def change_state(project_id, user_id, error_id, action):
"userId": user_id, "userId": user_id,
"error_ids": tuple([e["errorId"] for e in errors]), "error_ids": tuple([e["errorId"] for e in errors]),
"status": status} "status": status}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""UPDATE public.errors """UPDATE public.errors
SET status = %(status)s SET status = %(status)s
WHERE error_id IN %(error_ids)s WHERE error_id IN %(error_ids)s
RETURNING status""", RETURNING status""",
params) params)
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
if row is not None: if row is not None:
for e in errors: for e in errors:
e["status"] = row["status"] e["status"] = row["status"]

View file

@ -1,9 +1,9 @@
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def add_favorite_error(project_id, user_id, error_id): async def add_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""INSERT INTO public.user_favorite_errors(user_id, error_id) cur.mogrify(f"""INSERT INTO public.user_favorite_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""", VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id}) {"userId": user_id, "error_id": error_id})
@ -11,9 +11,9 @@ def add_favorite_error(project_id, user_id, error_id):
return {"errorId": error_id, "favorite": True} return {"errorId": error_id, "favorite": True}
def remove_favorite_error(project_id, user_id, error_id): async def remove_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""DELETE FROM public.user_favorite_errors cur.mogrify(f"""DELETE FROM public.user_favorite_errors
WHERE WHERE
user_id = %(userId)s user_id = %(userId)s
@ -23,18 +23,18 @@ def remove_favorite_error(project_id, user_id, error_id):
return {"errorId": error_id, "favorite": False} return {"errorId": error_id, "favorite": False}
def favorite_error(project_id, user_id, error_id): async def favorite_error(project_id, user_id, error_id):
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id) exists, favorite = await error_exists_and_favorite(user_id=user_id, error_id=error_id)
if not exists: if not exists:
return {"errors": ["cannot bookmark non-rehydrated errors"]} return {"errors": ["cannot bookmark non-rehydrated errors"]}
if favorite: if favorite:
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id) return await remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id) return await add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
def error_exists_and_favorite(user_id, error_id): async def error_exists_and_favorite(user_id, error_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite """SELECT errors.error_id AS exists, ufe.error_id AS favorite
FROM public.errors FROM public.errors
@ -42,7 +42,7 @@ def error_exists_and_favorite(user_id, error_id):
WHERE error_id = %(error_id)s;""", WHERE error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id}) {"userId": user_id, "error_id": error_id})
) )
r = cur.fetchone() r = await cur.fetchone()
if r is None: if r is None:
return False, False return False, False
return True, r.get("favorite") is not None return True, r.get("favorite") is not None

View file

@ -1,17 +1,17 @@
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def add_viewed_error(project_id, user_id, error_id): async def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id) cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""", VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id}) {"userId": user_id, "error_id": error_id})
) )
def viewed_error_exists(user_id, error_id): async def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""SELECT """SELECT
errors.error_id AS hydrated, errors.error_id AS hydrated,
@ -22,16 +22,16 @@ def viewed_error_exists(user_id, error_id):
FROM public.errors FROM public.errors
WHERE error_id = %(error_id)s""", WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id}) {"userId": user_id, "error_id": error_id})
cur.execute( await cur.execute(
query=query query=query
) )
r = cur.fetchone() r = await cur.fetchone()
if r: if r:
return r.get("viewed") return r.get("viewed")
return True return True
def viewed_error(project_id, user_id, error_id): async def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id): if await viewed_error_exists(user_id=user_id, error_id=error_id):
return None return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id) return await add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -9,9 +9,9 @@ from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event from chalicelib.utils.event_filter_definition import SupportedFilter, Event
def get_customs_by_session_id(session_id, project_id): async def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT SELECT
c.*, c.*,
'CUSTOM' AS type 'CUSTOM' AS type
@ -21,7 +21,7 @@ def get_customs_by_session_id(session_id, project_id):
ORDER BY c.timestamp;""", ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows = cur.fetchall() rows = await cur.fetchall()
return helper.dict_to_camel_case(rows) return helper.dict_to_camel_case(rows)
@ -31,8 +31,8 @@ def __merge_cells(rows, start, count, replacement):
return rows return rows
def __get_grouped_clickrage(rows, session_id, project_id): async def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) click_rage_issues = await issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0: if len(click_rage_issues) == 0:
return rows return rows
@ -52,11 +52,11 @@ def __get_grouped_clickrage(rows, session_id, project_id):
return rows return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None): async def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
rows = [] rows = []
if event_type is None or event_type == schemas.EventType.click: if event_type is None or event_type == schemas.EventType.click:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT SELECT
c.*, c.*,
'CLICK' AS type 'CLICK' AS type
@ -66,11 +66,11 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
ORDER BY c.timestamp;""", ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows += cur.fetchall() rows += await cur.fetchall()
if group_clickrage: if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) rows = await __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input: if event_type is None or event_type == schemas.EventType.input:
cur.execute(cur.mogrify(""" await cur.execute(cur.mogrify("""
SELECT SELECT
i.*, i.*,
'INPUT' AS type 'INPUT' AS type
@ -80,9 +80,9 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
ORDER BY i.timestamp;""", ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows += cur.fetchall() rows += await cur.fetchall()
if event_type is None or event_type == schemas.EventType.location: if event_type is None or event_type == schemas.EventType.location:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT SELECT
l.*, l.*,
l.path AS value, l.path AS value,
@ -92,14 +92,14 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
WHERE WHERE
l.session_id = %(session_id)s l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall() rows += await cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"])) rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows return rows
def _search_tags(project_id, value, key=None, source=None): async def _search_tags(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = f""" query = f"""
SELECT public.tags.name SELECT public.tags.name
'{events.EventType.TAG.ui_type}' AS type '{events.EventType.TAG.ui_type}' AS type
@ -109,8 +109,8 @@ def _search_tags(project_id, value, key=None, source=None):
LIMIT 10 LIMIT 10
""" """
query = cur.mogrify(query, {'project_id': project_id, 'value': value}) query = cur.mogrify(query, {'project_id': project_id, 'value': value})
cur.execute(query) await cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall()) results = helper.list_to_camel_case(await cur.fetchall())
return results return results
@ -182,35 +182,35 @@ SUPPORTED_TYPES = {
} }
def get_errors_by_session_id(session_id, project_id): async def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(f"""\ await cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall() errors = await cur.fetchall()
for e in errors: for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors) return helper.list_to_camel_case(errors)
def search(text, event_type, project_id, source, key): async def search(text, event_type, project_id, source, key):
if not event_type: if not event_type:
return {"data": autocomplete.__get_autocomplete_table(text, project_id)} return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys(): if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) rows = await SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for IOS events autocomplete # for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys(): # if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source) # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) rows = await SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id) return await sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \ elif event_type.endswith("_IOS") \
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys(): and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id) return await sessions_metas.search(text, event_type, project_id)
else: else:
return {"errors": ["unsupported event"]} return {"errors": ["unsupported event"]}

View file

@ -2,13 +2,13 @@ from chalicelib.utils import pg_client, helper
from chalicelib.core import events from chalicelib.core import events
def get_customs_by_session_id(session_id, project_id): async def get_customs_by_session_id(session_id, project_id):
return events.get_customs_by_session_id(session_id=session_id, project_id=project_id) return await events.get_customs_by_session_id(session_id=session_id, project_id=project_id)
def get_by_sessionId(session_id, project_id): async def get_by_sessionId(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(f""" await cur.execute(cur.mogrify(f"""
SELECT SELECT
c.*, c.*,
'TAP' AS type 'TAP' AS type
@ -18,9 +18,9 @@ def get_by_sessionId(session_id, project_id):
ORDER BY c.timestamp;""", ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows = cur.fetchall() rows = await cur.fetchall()
cur.execute(cur.mogrify(f""" await cur.execute(cur.mogrify(f"""
SELECT SELECT
i.*, i.*,
'INPUT' AS type 'INPUT' AS type
@ -30,8 +30,8 @@ def get_by_sessionId(session_id, project_id):
ORDER BY i.timestamp;""", ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows += cur.fetchall() rows += await cur.fetchall()
cur.execute(cur.mogrify(f""" await cur.execute(cur.mogrify(f"""
SELECT SELECT
v.*, v.*,
'VIEW' AS type 'VIEW' AS type
@ -39,8 +39,8 @@ def get_by_sessionId(session_id, project_id):
WHERE WHERE
v.session_id = %(session_id)s v.session_id = %(session_id)s
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id})) ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall() rows += await cur.fetchall()
cur.execute(cur.mogrify(f""" await cur.execute(cur.mogrify(f"""
SELECT SELECT
s.*, s.*,
'SWIPE' AS type 'SWIPE' AS type
@ -48,15 +48,15 @@ def get_by_sessionId(session_id, project_id):
WHERE WHERE
s.session_id = %(session_id)s s.session_id = %(session_id)s
ORDER BY s.timestamp;""", {"project_id": project_id, "session_id": session_id})) ORDER BY s.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall() rows += await cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["timestamp"]) rows = sorted(rows, key=lambda k: k["timestamp"])
return rows return rows
def get_crashes_by_session_id(session_id): async def get_crashes_by_session_id(session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(f""" await cur.execute(cur.mogrify(f"""
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
FROM {events.EventType.CRASH_IOS.table} AS cr FROM {events.EventType.CRASH_IOS.table} AS cr
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id) INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
@ -64,5 +64,5 @@ def get_crashes_by_session_id(session_id):
WHERE WHERE
cr.session_id = %(session_id)s cr.session_id = %(session_id)s
ORDER BY timestamp;""", {"session_id": session_id})) ORDER BY timestamp;""", {"session_id": session_id}))
errors = cur.fetchall() errors = await cur.fetchall()
return helper.list_to_camel_case(errors) return helper.list_to_camel_case(errors)

View file

@ -22,8 +22,8 @@ feature_flag_columns = (
) )
def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) -> bool: async def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.feature_flags FROM public.feature_flags
WHERE deleted_at IS NULL WHERE deleted_at IS NULL
@ -31,29 +31,29 @@ def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) ->
{"AND feature_flag_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""", {"AND feature_flag_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"flag_key": flag_key, "exclude_id": exclude_id, "project_id": project_id}) {"flag_key": flag_key, "exclude_id": exclude_id, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
return row["exists"] return row["exists"]
def update_feature_flag_status(project_id: int, feature_flag_id: int, is_active: bool) -> Dict[str, Any]: async def update_feature_flag_status(project_id: int, feature_flag_id: int, is_active: bool) -> Dict[str, Any]:
try: try:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE feature_flags query = cur.mogrify(f"""UPDATE feature_flags
SET is_active = %(is_active)s, updated_at=NOW() SET is_active = %(is_active)s, updated_at=NOW()
WHERE feature_flag_id=%(feature_flag_id)s AND project_id=%(project_id)s WHERE feature_flag_id=%(feature_flag_id)s AND project_id=%(project_id)s
RETURNING is_active;""", RETURNING is_active;""",
{"feature_flag_id": feature_flag_id, "is_active": is_active, "project_id": project_id}) {"feature_flag_id": feature_flag_id, "is_active": is_active, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
return {"is_active": cur.fetchone()["is_active"]} return {"is_active": await cur.fetchone()["is_active"]}
except Exception as e: except Exception as e:
logging.error(f"Failed to update feature flag status: {e}") logging.error(f"Failed to update feature flag status: {e}")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Failed to update feature flag status") detail="Failed to update feature flag status")
def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]: async def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]:
""" """
Get all feature flags and their total count. Get all feature flags and their total count.
""" """
@ -67,10 +67,10 @@ def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlag
LIMIT %(limit)s OFFSET %(offset)s; LIMIT %(limit)s OFFSET %(offset)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
if len(rows) == 0: if len(rows) == 0:
return {"data": {"total": 0, "list": []}} return {"data": {"total": 0, "list": []}}
@ -110,12 +110,12 @@ def prepare_constraints_params_to_search(data, project_id, user_id):
return constraints, params return constraints, params
def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]: async def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]:
if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant and len(feature_flag_data.variants) == 0: if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant and len(feature_flag_data.variants) == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Variants are required for multi variant flag") detail="Variants are required for multi variant flag")
validate_unique_flag_key(feature_flag_data, project_id) await validate_unique_flag_key(feature_flag_data, project_id)
validate_multi_variant_flag(feature_flag_data) validate_multi_variant_flag(feature_flag_data)
insert_columns = ( insert_columns = (
@ -176,19 +176,19 @@ def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schema
SELECT feature_flag_id FROM inserted_flag; SELECT feature_flag_id FROM inserted_flag;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, params) query = cur.mogrify(query, params)
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return None return None
return get_feature_flag(project_id=project_id, feature_flag_id=row["feature_flag_id"]) return await get_feature_flag(project_id=project_id, feature_flag_id=row["feature_flag_id"])
def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None): async def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None):
if exists_by_name(project_id=project_id, flag_key=feature_flag_data.flag_key, exclude_id=exclude_id): if await exists_by_name(project_id=project_id, flag_key=feature_flag_data.flag_key, exclude_id=exclude_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Feature flag with key already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Feature flag with key already exists.")
@ -238,7 +238,7 @@ def prepare_conditions_values(feature_flag_data):
return conditions_data return conditions_data
def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]: async def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]:
conditions_query = """ conditions_query = """
SELECT COALESCE(jsonb_agg(ffc ORDER BY condition_id), '[]'::jsonb) AS conditions SELECT COALESCE(jsonb_agg(ffc ORDER BY condition_id), '[]'::jsonb) AS conditions
FROM feature_flags_conditions AS ffc FROM feature_flags_conditions AS ffc
@ -261,10 +261,10 @@ def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str
AND ff.deleted_at IS NULL; AND ff.deleted_at IS NULL;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "project_id": project_id}) query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "project_id": project_id})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
return {"errors": ["Feature flag not found"]} return {"errors": ["Feature flag not found"]}
@ -275,7 +275,7 @@ def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str
return {"data": helper.dict_to_camel_case(row)} return {"data": helper.dict_to_camel_case(row)}
def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> List[Dict[str, Any]]: async def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> List[Dict[str, Any]]:
""" """
Create new feature flag conditions and return their data. Create new feature flag conditions and return their data.
""" """
@ -297,23 +297,23 @@ def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag
RETURNING condition_id, {", ".join(columns)} RETURNING condition_id, {", ".join(columns)}
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
params = [ params = [
(feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.model_dump() for filter_ in c.filters])) (feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.model_dump() for filter_ in c.filters]))
for c in conditions] for c in conditions]
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
def update_feature_flag(project_id: int, feature_flag_id: int, async def update_feature_flag(project_id: int, feature_flag_id: int,
feature_flag: schemas.FeatureFlagSchema, user_id: int): feature_flag: schemas.FeatureFlagSchema, user_id: int):
""" """
Update an existing feature flag and return its updated data. Update an existing feature flag and return its updated data.
""" """
validate_unique_flag_key(feature_flag_data=feature_flag, project_id=project_id, exclude_id=feature_flag_id) await validate_unique_flag_key(feature_flag_data=feature_flag, project_id=project_id, exclude_id=feature_flag_id)
validate_multi_variant_flag(feature_flag_data=feature_flag) validate_multi_variant_flag(feature_flag_data=feature_flag)
columns = ( columns = (
@ -342,23 +342,23 @@ def update_feature_flag(project_id: int, feature_flag_id: int,
RETURNING feature_flag_id, {", ".join(columns)}, created_at, updated_at RETURNING feature_flag_id, {", ".join(columns)}, created_at, updated_at
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is None: if row is None:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Feature flag not found") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Feature flag not found")
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"]) row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"])
row['conditions'] = check_conditions(feature_flag_id, feature_flag.conditions) row['conditions'] = await check_conditions(feature_flag_id, feature_flag.conditions)
row['variants'] = check_variants(feature_flag_id, feature_flag.variants) row['variants'] = await check_variants(feature_flag_id, feature_flag.variants)
return {"data": helper.dict_to_camel_case(row)} return {"data": helper.dict_to_camel_case(row)}
def get_conditions(feature_flag_id: int): async def get_conditions(feature_flag_id: int):
""" """
Get all conditions for a feature flag. Get all conditions for a feature flag.
""" """
@ -374,15 +374,15 @@ def get_conditions(feature_flag_id: int):
ORDER BY condition_id; ORDER BY condition_id;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id}) query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id})
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any: async def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
existing_ids = [ev.get("variant_id") for ev in get_variants(feature_flag_id)] existing_ids = [ev.get("variant_id") for ev in get_variants(feature_flag_id)]
to_be_deleted = [] to_be_deleted = []
to_be_updated = [] to_be_updated = []
@ -399,18 +399,18 @@ def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVaria
to_be_updated.append(variant) to_be_updated.append(variant)
if len(to_be_created) > 0: if len(to_be_created) > 0:
create_variants(feature_flag_id=feature_flag_id, variants=to_be_created) await create_variants(feature_flag_id=feature_flag_id, variants=to_be_created)
if len(to_be_updated) > 0: if len(to_be_updated) > 0:
update_variants(feature_flag_id=feature_flag_id, variants=to_be_updated) await update_variants(feature_flag_id=feature_flag_id, variants=to_be_updated)
if len(to_be_deleted) > 0: if len(to_be_deleted) > 0:
delete_variants(feature_flag_id=feature_flag_id, ids=to_be_deleted) await delete_variants(feature_flag_id=feature_flag_id, ids=to_be_deleted)
return get_variants(feature_flag_id) return await get_variants(feature_flag_id)
def get_variants(feature_flag_id: int): async def get_variants(feature_flag_id: int):
sql = """ sql = """
SELECT SELECT
variant_id, variant_id,
@ -423,15 +423,15 @@ def get_variants(feature_flag_id: int):
ORDER BY variant_id; ORDER BY variant_id;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id}) query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id})
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> List[Dict[str, Any]]: async def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> List[Dict[str, Any]]:
""" """
Create new feature flag variants and return their data. Create new feature flag variants and return their data.
""" """
@ -454,16 +454,16 @@ def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVari
RETURNING variant_id, {", ".join(columns)} RETURNING variant_id, {", ".join(columns)}
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
params = [(feature_flag_id, v.value, v.description, json.dumps(v.payload), v.rollout_percentage) for v in variants] params = [(feature_flag_id, v.value, v.description, json.dumps(v.payload), v.rollout_percentage) for v in variants]
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any: async def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
""" """
Update existing feature flag variants and return their updated data. Update existing feature flag variants and return their updated data.
""" """
@ -485,12 +485,12 @@ def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVari
WHERE c.variant_id = feature_flags_variants.variant_id AND feature_flag_id = %(feature_flag_id)s; WHERE c.variant_id = feature_flags_variants.variant_id AND feature_flag_id = %(feature_flag_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
def delete_variants(feature_flag_id: int, ids: List[int]) -> None: async def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
""" """
Delete existing feature flag variants and return their data. Delete existing feature flag variants and return their data.
""" """
@ -500,12 +500,12 @@ def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
AND feature_flag_id= %(feature_flag_id)s; AND feature_flag_id= %(feature_flag_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)}) query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)})
cur.execute(query) await cur.execute(query)
def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any: async def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
existing_ids = [ec.get("condition_id") for ec in get_conditions(feature_flag_id)] existing_ids = [ec.get("condition_id") for ec in get_conditions(feature_flag_id)]
to_be_deleted = [] to_be_deleted = []
to_be_updated = [] to_be_updated = []
@ -522,18 +522,18 @@ def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagC
to_be_updated.append(condition) to_be_updated.append(condition)
if len(to_be_created) > 0: if len(to_be_created) > 0:
create_conditions(feature_flag_id=feature_flag_id, conditions=to_be_created) await create_conditions(feature_flag_id=feature_flag_id, conditions=to_be_created)
if len(to_be_updated) > 0: if len(to_be_updated) > 0:
update_conditions(feature_flag_id=feature_flag_id, conditions=to_be_updated) await update_conditions(feature_flag_id=feature_flag_id, conditions=to_be_updated)
if len(to_be_deleted) > 0: if len(to_be_deleted) > 0:
delete_conditions(feature_flag_id=feature_flag_id, ids=to_be_deleted) await delete_conditions(feature_flag_id=feature_flag_id, ids=to_be_deleted)
return get_conditions(feature_flag_id) return await get_conditions(feature_flag_id)
def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any: async def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
""" """
Update existing feature flag conditions and return their updated data. Update existing feature flag conditions and return their updated data.
""" """
@ -555,12 +555,12 @@ def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag
WHERE c.condition_id = feature_flags_conditions.condition_id AND feature_flag_id = %(feature_flag_id)s; WHERE c.condition_id = feature_flags_conditions.condition_id AND feature_flag_id = %(feature_flag_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
def delete_conditions(feature_flag_id: int, ids: List[int]) -> None: async def delete_conditions(feature_flag_id: int, ids: List[int]) -> None:
""" """
Delete feature flag conditions. Delete feature flag conditions.
""" """
@ -570,12 +570,12 @@ def delete_conditions(feature_flag_id: int, ids: List[int]) -> None:
AND feature_flag_id= %(feature_flag_id)s; AND feature_flag_id= %(feature_flag_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)}) query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)})
cur.execute(query) await cur.execute(query)
def delete_feature_flag(project_id: int, feature_flag_id: int): async def delete_feature_flag(project_id: int, feature_flag_id: int):
""" """
Delete a feature flag. Delete a feature flag.
""" """
@ -584,10 +584,10 @@ def delete_feature_flag(project_id: int, feature_flag_id: int):
"feature_flags.feature_flag_id=%(feature_flag_id)s" "feature_flags.feature_flag_id=%(feature_flag_id)s"
] ]
params = {"project_id": project_id, "feature_flag_id": feature_flag_id} params = {"project_id": project_id, "feature_flag_id": feature_flag_id}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE feature_flags query = cur.mogrify(f"""UPDATE feature_flags
SET deleted_at= (now() at time zone 'utc'), is_active=false SET deleted_at= (now() at time zone 'utc'), is_active=false
WHERE {" AND ".join(conditions)};""", params) WHERE {" AND ".join(conditions)};""", params)
cur.execute(query) await cur.execute(query)
return {"state": "success"} return {"state": "success"}

View file

@ -36,12 +36,12 @@ def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): # def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): async def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events)) data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events) data.events = __fix_stages(data.events)
if len(data.events) == 0: if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0} return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id) insights, total_drop_due_to_issues = await significance.get_top_insights(filter_d=data, project_id=project_id)
insights = helper.list_to_camel_case(insights) insights = helper.list_to_camel_case(insights)
if len(insights) > 0: if len(insights) > 0:
# TODO: check if this correct # TODO: check if this correct
@ -56,7 +56,7 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): # def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): async def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events) data.events = filter_stages(data.events)
data.events = __fix_stages(data.events) data.events = __fix_stages(data.events)
if len(data.events) < 0: if len(data.events) < 0:
@ -64,5 +64,5 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem
return { return {
"issues": helper.dict_to_camel_case( "issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1, await significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1,
last_stage=len(data.events)))} last_stage=len(data.events)))}

View file

@ -1,7 +1,7 @@
from urllib.parse import urlparse from urllib.parse import urlparse
import redis import redis
import requests import httpx
from decouple import config from decouple import config
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
@ -34,24 +34,24 @@ HEALTH_ENDPOINTS = {
} }
def __check_database_pg(*_): async def __check_database_pg(*_):
fail_response = { fail_response = {
"health": False, "health": False,
"details": { "details": {
"errors": ["Postgres health-check failed"] "errors": ["Postgres health-check failed"]
} }
} }
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
try: try:
cur.execute("SHOW server_version;") await cur.execute("SHOW server_version;")
server_version = cur.fetchone() server_version = await cur.fetchone()
except Exception as e: except Exception as e:
print("!! health failed: postgres not responding") print("!! health failed: postgres not responding")
print(str(e)) print(str(e))
return fail_response return fail_response
try: try:
cur.execute("SELECT openreplay_version() AS version;") await cur.execute("SELECT openreplay_version() AS version;")
schema_version = cur.fetchone() schema_version = await cur.fetchone()
except Exception as e: except Exception as e:
print("!! health failed: openreplay_version not defined") print("!! health failed: openreplay_version not defined")
print(str(e)) print(str(e))
@ -77,7 +77,7 @@ def __always_healthy(*_):
def __check_be_service(service_name): def __check_be_service(service_name):
def fn(*_): async def fn(*_):
fail_response = { fail_response = {
"health": False, "health": False,
"details": { "details": {
@ -85,16 +85,13 @@ def __check_be_service(service_name):
} }
} }
try: try:
results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) async with httpx.AsyncClient() as client:
results = await client.get(HEALTH_ENDPOINTS.get(service_name), timeout=2)
if results.status_code != 200: if results.status_code != 200:
print(f"!! issue with the {service_name}-health code:{results.status_code}") print(f"!! issue with the {service_name}-health code:{results.status_code}")
print(results.text) print(results.text)
# fail_response["details"]["errors"].append(results.text) # fail_response["details"]["errors"].append(results.text)
return fail_response return fail_response
except requests.exceptions.Timeout:
print(f"!! Timeout getting {service_name}-health")
# fail_response["details"]["errors"].append("timeout")
return fail_response
except Exception as e: except Exception as e:
print(f"!! Issue getting {service_name}-health response") print(f"!! Issue getting {service_name}-health response")
print(str(e)) print(str(e))
@ -113,7 +110,7 @@ def __check_be_service(service_name):
return fn return fn
def __check_redis(*_): async def __check_redis(*_):
fail_response = { fail_response = {
"health": False, "health": False,
"details": {"errors": ["server health-check failed"]} "details": {"errors": ["server health-check failed"]}
@ -139,7 +136,7 @@ def __check_redis(*_):
} }
def __check_SSL(*_): async def __check_SSL(*_):
fail_response = { fail_response = {
"health": False, "health": False,
"details": { "details": {
@ -147,7 +144,8 @@ def __check_SSL(*_):
} }
} }
try: try:
requests.get(config("SITE_URL"), verify=True, allow_redirects=True) async with httpx.AsyncClient() as client:
await client.get(config("SITE_URL"), follow_redirects=True)
except Exception as e: except Exception as e:
print("!! health failed: SSL Certificate") print("!! health failed: SSL Certificate")
print(str(e)) print(str(e))
@ -158,23 +156,23 @@ def __check_SSL(*_):
} }
def __get_sessions_stats(*_): async def __get_sessions_stats(*_):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
constraints = ["projects.deleted_at IS NULL"] constraints = ["projects.deleted_at IS NULL"]
query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c, query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c,
COALESCE(SUM(events_count),0) AS e_c COALESCE(SUM(events_count),0) AS e_c
FROM public.projects_stats FROM public.projects_stats
INNER JOIN public.projects USING(project_id) INNER JOIN public.projects USING(project_id)
WHERE {" AND ".join(constraints)};""") WHERE {" AND ".join(constraints)};""")
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return { return {
"numberOfSessionsCaptured": row["s_c"], "numberOfSessionsCaptured": row["s_c"],
"numberOfEventCaptured": row["e_c"] "numberOfEventCaptured": row["e_c"]
} }
def get_health(): async def get_health():
health_map = { health_map = {
"databases": { "databases": {
"postgres": __check_database_pg "postgres": __check_database_pg
@ -202,10 +200,10 @@ def get_health():
"details": __get_sessions_stats, "details": __get_sessions_stats,
"ssl": __check_SSL "ssl": __check_SSL
} }
return __process_health(health_map=health_map) return await __process_health(health_map=health_map)
def __process_health(health_map): async def __process_health(health_map):
response = dict(health_map) response = dict(health_map)
for parent_key in health_map.keys(): for parent_key in health_map.keys():
if config(f"SKIP_H_{parent_key.upper()}", cast=bool, default=False): if config(f"SKIP_H_{parent_key.upper()}", cast=bool, default=False):
@ -215,14 +213,14 @@ def __process_health(health_map):
if config(f"SKIP_H_{parent_key.upper()}_{element_key.upper()}", cast=bool, default=False): if config(f"SKIP_H_{parent_key.upper()}_{element_key.upper()}", cast=bool, default=False):
response[parent_key].pop(element_key) response[parent_key].pop(element_key)
else: else:
response[parent_key][element_key] = health_map[parent_key][element_key]() response[parent_key][element_key] = await health_map[parent_key][element_key]()
else: else:
response[parent_key] = health_map[parent_key]() response[parent_key] = await health_map[parent_key]()
return response return response
def cron(): async def cron():
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT projects.project_id, query = cur.mogrify("""SELECT projects.project_id,
projects.created_at, projects.created_at,
projects.sessions_last_check_at, projects.sessions_last_check_at,
@ -232,8 +230,8 @@ def cron():
LEFT JOIN public.projects_stats USING (project_id) LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL WHERE projects.deleted_at IS NULL
ORDER BY project_id;""") ORDER BY project_id;""")
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
for r in rows: for r in rows:
insert = False insert = False
if r["last_update_at"] is None: if r["last_update_at"] is None:
@ -266,8 +264,8 @@ def cron():
AND start_ts<=%(end_ts)s AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""", AND duration IS NOT NULL;""",
params) params)
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is not None: if row is not None:
params["sessions_count"] = row["sessions_count"] params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"] params["events_count"] = row["events_count"]
@ -283,20 +281,20 @@ def cron():
last_update_at=(now() AT TIME ZONE 'utc'::text) last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""", WHERE project_id=%(project_id)s;""",
params) params)
cur.execute(query) await cur.execute(query)
# this cron is used to correct the sessions&events count every week # this cron is used to correct the sessions&events count every week
def weekly_cron(): async def weekly_cron():
with pg_client.PostgresClient(long_query=True) as cur: async with pg_client.cursor(long_query=True) as cur:
query = cur.mogrify("""SELECT project_id, query = cur.mogrify("""SELECT project_id,
projects_stats.last_update_at projects_stats.last_update_at
FROM public.projects FROM public.projects
LEFT JOIN public.projects_stats USING (project_id) LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL WHERE projects.deleted_at IS NULL
ORDER BY project_id;""") ORDER BY project_id;""")
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
for r in rows: for r in rows:
if r["last_update_at"] is None: if r["last_update_at"] is None:
continue continue
@ -313,16 +311,16 @@ def weekly_cron():
AND start_ts<=%(end_ts)s AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""", AND duration IS NOT NULL;""",
params) params)
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
if row is not None: if row is not None:
params["sessions_count"] = row["sessions_count"] params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"] params["events_count"] = row["events_count"]
query = cur.mogrify("""UPDATE public.projects_stats query = await cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=%(sessions_count)s, SET sessions_count=%(sessions_count)s,
events_count=%(events_count)s, events_count=%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text) last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""", WHERE project_id=%(project_id)s;""",
params) params)
cur.execute(query) await cur.execute(query)

View file

@ -3,7 +3,7 @@ import schemas
from chalicelib.utils import helper, pg_client from chalicelib.utils import helper, pg_client
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): async def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp, args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
"project_id": project_id, "url": data.url} "project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s", constraints = ["sessions.project_id = %(project_id)s",
@ -52,7 +52,7 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage" q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage"
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id) query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)""" LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT selector, {q_count} query = cur.mogrify(f"""SELECT selector, {q_count}
FROM {query_from} FROM {query_from}
WHERE {" AND ".join(constraints)} WHERE {" AND ".join(constraints)}
@ -62,7 +62,7 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
# print(query.decode('UTF-8')) # print(query.decode('UTF-8'))
# print("---------") # print("---------")
try: try:
cur.execute(query) await cur.execute(query)
except Exception as err: except Exception as err:
print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------") print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8')) print(query.decode('UTF-8'))
@ -70,5 +70,5 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
print(data) print(data)
print("--------------------") print("--------------------")
raise err raise err
rows = cur.fetchall() rows = await cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)

View file

@ -8,6 +8,11 @@ class BaseIntegration(ABC):
def __init__(self, user_id, ISSUE_CLASS): def __init__(self, user_id, ISSUE_CLASS):
self._user_id = user_id self._user_id = user_id
self._issue_handler = ISSUE_CLASS(self.integration_token) self._issue_handler = ISSUE_CLASS(self.integration_token)
self.integration = None
async def init():
integration = await self.get()
self.integration = integration
@property @property
@abstractmethod @abstractmethod
@ -21,39 +26,38 @@ class BaseIntegration(ABC):
@property @property
def integration_token(self): def integration_token(self):
integration = self.get()
if integration is None: if integration is None:
print("no token configured yet") print("no token configured yet")
return None return None
return integration["token"] return integration["token"]
def get(self): async def get(self):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT * """SELECT *
FROM public.oauth_authentication FROM public.oauth_authentication
WHERE user_id=%(user_id)s AND provider=%(provider)s;""", WHERE user_id=%(user_id)s AND provider=%(provider)s;""",
{"user_id": self._user_id, "provider": self.provider.lower()}) {"user_id": self._user_id, "provider": self.provider.lower()})
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
@abstractmethod @abstractmethod
def get_obfuscated(self): def get_obfuscated(self):
pass pass
@abstractmethod @abstractmethod
def update(self, changes, obfuscate=False): async def update(self, changes, obfuscate=False):
pass pass
@abstractmethod @abstractmethod
def _add(self, data): async def _add(self, data):
pass pass
@abstractmethod @abstractmethod
def delete(self): async def delete(self):
pass pass
@abstractmethod @abstractmethod
def add_edit(self, data): async def add_edit(self, data):
pass pass

View file

@ -20,16 +20,16 @@ class GitHubIntegration(integration_base.BaseIntegration):
def issue_handler(self): def issue_handler(self):
return self._issue_handler return self._issue_handler
def get_obfuscated(self): async def get_obfuscated(self):
integration = self.get() integration = await self.get()
if integration is None: if integration is None:
return None return None
return {"token": helper.obfuscate(text=integration["token"]), "provider": self.provider.lower()} return {"token": helper.obfuscate(text=integration["token"]), "provider": self.provider.lower()}
def update(self, changes, obfuscate=False): async def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()] sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
UPDATE public.oauth_authentication UPDATE public.oauth_authentication
SET {','.join(sub_query)} SET {','.join(sub_query)}
@ -38,7 +38,7 @@ class GitHubIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id, {"user_id": self._user_id,
**changes}) **changes})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if w and w.get("token") and obfuscate: if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"]) w["token"] = helper.obfuscate(w["token"])
return w return w
@ -46,9 +46,9 @@ class GitHubIntegration(integration_base.BaseIntegration):
def _add(self, data): def _add(self, data):
pass pass
def add(self, token, obfuscate=False): async def add(self, token, obfuscate=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
INSERT INTO public.oauth_authentication(user_id, provider, provider_user_id, token) INSERT INTO public.oauth_authentication(user_id, provider, provider_user_id, token)
VALUES(%(user_id)s, 'github', '', %(token)s) VALUES(%(user_id)s, 'github', '', %(token)s)
@ -56,15 +56,15 @@ class GitHubIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id, {"user_id": self._user_id,
"token": token}) "token": token})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if w and w.get("token") and obfuscate: if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"]) w["token"] = helper.obfuscate(w["token"])
return w return w
# TODO: make a revoke token call # TODO: make a revoke token call
def delete(self): async def delete(self):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
DELETE FROM public.oauth_authentication DELETE FROM public.oauth_authentication
WHERE user_id=%(user_id)s AND provider=%(provider)s;""", WHERE user_id=%(user_id)s AND provider=%(provider)s;""",
@ -72,10 +72,10 @@ class GitHubIntegration(integration_base.BaseIntegration):
) )
return {"state": "success"} return {"state": "success"}
def add_edit(self, data: schemas.IssueTrackingGithubSchema): async def add_edit(self, data: schemas.IssueTrackingGithubSchema):
s = self.get() s = await self.get()
if s is not None: if s is not None:
return self.update( return await self.update(
changes={ changes={
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
else s.token else s.token
@ -83,4 +83,4 @@ class GitHubIntegration(integration_base.BaseIntegration):
obfuscate=True obfuscate=True
) )
else: else:
return self.add(token=data.token, obfuscate=True) return await self.add(token=data.token, obfuscate=True)

View file

@ -8,13 +8,13 @@ class GithubIntegrationIssue(BaseIntegrationIssue):
self.__client = github_client_v3.githubV3Request(integration_token) self.__client = github_client_v3.githubV3Request(integration_token)
super(GithubIntegrationIssue, self).__init__("GITHUB", integration_token) super(GithubIntegrationIssue, self).__init__("GITHUB", integration_token)
def get_current_user(self): async def get_current_user(self):
return formatter.user(self.__client.get("/user")) return formatter.user(await self.__client.get("/user"))
def get_meta(self, repoId): async def get_meta(self, repoId):
current_user = self.get_current_user() current_user = await self.get_current_user()
try: try:
users = self.__client.get(f"/repositories/{repoId}/collaborators") users = await self.__client.get(f"/repositories/{repoId}/collaborators")
except Exception as e: except Exception as e:
users = [] users = []
users = [formatter.user(u) for u in users] users = [formatter.user(u) for u in users]
@ -23,18 +23,18 @@ class GithubIntegrationIssue(BaseIntegrationIssue):
meta = { meta = {
'users': users, 'users': users,
'issueTypes': [formatter.label(l) for l in 'issueTypes': [formatter.label(l) for l in
self.__client.get(f"/repositories/{repoId}/labels")] await self.__client.get(f"/repositories/{repoId}/labels")]
} }
return meta return meta
def create_new_assignment(self, integration_project_id, title, description, assignee, async def create_new_assignment(self, integration_project_id, title, description, assignee,
issue_type): issue_type):
repoId = integration_project_id repoId = integration_project_id
assignees = [assignee] assignees = [assignee]
labels = [str(issue_type)] labels = [str(issue_type)]
metas = self.get_meta(repoId) metas = await self.get_meta(repoId)
real_assignees = [] real_assignees = []
for a in assignees: for a in assignees:
for u in metas["users"]: for u in metas["users"]:
@ -51,38 +51,38 @@ class GithubIntegrationIssue(BaseIntegrationIssue):
break break
if not found: if not found:
real_labels.append(l) real_labels.append(l)
issue = self.__client.post(f"/repositories/{repoId}/issues", body={"title": title, "body": description, issue = await self.__client.post(f"/repositories/{repoId}/issues", body={"title": title, "body": description,
"assignees": real_assignees, "assignees": real_assignees,
"labels": real_labels}) "labels": real_labels})
return formatter.issue(issue) return formatter.issue(issue)
def get_by_ids(self, saved_issues): async def get_by_ids(self, saved_issues):
results = [] results = []
for i in saved_issues: for i in saved_issues:
results.append(self.get(integration_project_id=i["integrationProjectId"], assignment_id=i["id"])) results.append(await self.get(integration_project_id=i["integrationProjectId"], assignment_id=i["id"]))
return {"issues": results} return {"issues": results}
def get(self, integration_project_id, assignment_id): async def get(self, integration_project_id, assignment_id):
repoId = integration_project_id repoId = integration_project_id
issueNumber = assignment_id issueNumber = assignment_id
issue = self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}") issue = await self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}")
issue = formatter.issue(issue) issue = formatter.issue(issue)
if issue["commentsCount"] > 0: if issue["commentsCount"] > 0:
issue["comments"] = [formatter.comment(c) for c in issue["comments"] = [formatter.comment(c) for c in
self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}/comments")] await self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}/comments")]
return issue return issue
def comment(self, integration_project_id, assignment_id, comment): async def comment(self, integration_project_id, assignment_id, comment):
repoId = integration_project_id repoId = integration_project_id
issueNumber = assignment_id issueNumber = assignment_id
commentCreated = self.__client.post(f"/repositories/{repoId}/issues/{issueNumber}/comments", commentCreated = await self.__client.post(f"/repositories/{repoId}/issues/{issueNumber}/comments",
body={"body": comment}) body={"body": comment})
return formatter.comment(commentCreated) return formatter.comment(commentCreated)
def get_metas(self, integration_project_id): async def get_metas(self, integration_project_id):
current_user = self.get_current_user() current_user = await self.get_current_user()
try: try:
users = self.__client.get(f"/repositories/{integration_project_id}/collaborators") users = await self.__client.get(f"/repositories/{integration_project_id}/collaborators")
except Exception as e: except Exception as e:
users = [] users = []
users = [formatter.user(u) for u in users] users = [formatter.user(u) for u in users]
@ -92,9 +92,9 @@ class GithubIntegrationIssue(BaseIntegrationIssue):
return {"provider": self.provider.lower(), return {"provider": self.provider.lower(),
'users': users, 'users': users,
'issueTypes': [formatter.label(l) for l in 'issueTypes': [formatter.label(l) for l in
self.__client.get(f"/repositories/{integration_project_id}/labels")] await self.__client.get(f"/repositories/{integration_project_id}/labels")]
} }
def get_projects(self): async def get_projects(self):
repos = self.__client.get("/user/repos") repos = await self.__client.get("/user/repos")
return [formatter.repo(r) for r in repos] return [formatter.repo(r) for r in repos]

View file

@ -11,14 +11,19 @@ def obfuscate_string(string):
class JIRAIntegration(integration_base.BaseIntegration): class JIRAIntegration(integration_base.BaseIntegration):
def __init__(self, tenant_id, user_id): def __init__(self, tenant_id, user_id):
self.__tenant_id = tenant_id self.__tenant_id = tenant_id
# TODO: enable super-constructor when OAuth is done # TODO: enable super-constructor when OAuth is done
# super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy) # super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy)
self._issue_handler = None self._issue_handler = None
self._user_id = user_id self._user_id = user_id
self.integration = self.get() self.integeration = None
async def init(self):
if self.integration is not None:
return
self.integration = await self.get()
if self.integration is None: if self.integration is None:
return return
self.integration["valid"] = True self.integration["valid"] = True
@ -42,16 +47,16 @@ class JIRAIntegration(integration_base.BaseIntegration):
return self._issue_handler return self._issue_handler
# TODO: remove this once jira-oauth is done # TODO: remove this once jira-oauth is done
def get(self): async def get(self):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT username, token, url """SELECT username, token, url
FROM public.jira_cloud FROM public.jira_cloud
WHERE user_id=%(user_id)s;""", WHERE user_id=%(user_id)s;""",
{"user_id": self._user_id}) {"user_id": self._user_id})
) )
data = helper.dict_to_camel_case(cur.fetchone()) data = helper.dict_to_camel_case(await cur.fetchone())
if data is None: if data is None:
return return
@ -68,10 +73,10 @@ class JIRAIntegration(integration_base.BaseIntegration):
integration["provider"] = self.provider.lower() integration["provider"] = self.provider.lower()
return integration return integration
def update(self, changes, obfuscate=False): async def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()] sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
UPDATE public.jira_cloud UPDATE public.jira_cloud
SET {','.join(sub_query)} SET {','.join(sub_query)}
@ -80,19 +85,19 @@ class JIRAIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id, {"user_id": self._user_id,
**changes}) **changes})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if obfuscate: if obfuscate:
w["token"] = obfuscate_string(w["token"]) w["token"] = obfuscate_string(w["token"])
return self.get() return await self.get()
# TODO: make this generic for all issue tracking integrations # TODO: make this generic for all issue tracking integrations
def _add(self, data): def _add(self, data):
print("a pretty defined abstract method") print("a pretty defined abstract method")
return return
def add(self, username, token, url): async def add(self, username, token, url):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
INSERT INTO public.jira_cloud(username, token, user_id,url) INSERT INTO public.jira_cloud(username, token, user_id,url)
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s) VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
@ -100,12 +105,12 @@ class JIRAIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id, "username": username, {"user_id": self._user_id, "username": username,
"token": token, "url": url}) "token": token, "url": url})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
return self.get() return self.get()
def delete(self): async def delete(self):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
DELETE FROM public.jira_cloud DELETE FROM public.jira_cloud
WHERE user_id=%(user_id)s;""", WHERE user_id=%(user_id)s;""",
@ -113,9 +118,9 @@ class JIRAIntegration(integration_base.BaseIntegration):
) )
return {"state": "success"} return {"state": "success"}
def add_edit(self, data: schemas.IssueTrackingJiraSchema): async def add_edit(self, data: schemas.IssueTrackingJiraSchema):
if self.integration is not None: if self.integration is not None:
return self.update( return await self.update(
changes={ changes={
"username": data.username, "username": data.username,
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
@ -125,7 +130,7 @@ class JIRAIntegration(integration_base.BaseIntegration):
obfuscate=True obfuscate=True
) )
else: else:
return self.add( return await self.add(
username=data.username, username=data.username,
token=data.token, token=data.token,
url=str(data.url) url=str(data.url)

View file

@ -9,7 +9,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
self._client = jira_client.JiraManager(self.url, self.username, token, None) self._client = jira_client.JiraManager(self.url, self.username, token, None)
super(JIRACloudIntegrationIssue, self).__init__("JIRA", token) super(JIRACloudIntegrationIssue, self).__init__("JIRA", token)
def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type): async def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type):
self._client.set_jira_project_id(integration_project_id) self._client.set_jira_project_id(integration_project_id)
data = { data = {
'summary': title, 'summary': title,
@ -20,7 +20,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
} }
return self._client.create_issue(data) return self._client.create_issue(data)
def get_by_ids(self, saved_issues): async def get_by_ids(self, saved_issues):
projects_map = {} projects_map = {}
for i in saved_issues: for i in saved_issues:
if i["integrationProjectId"] not in projects_map.keys(): if i["integrationProjectId"] not in projects_map.keys():
@ -37,20 +37,20 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
results += issues results += issues
return {"issues": results} return {"issues": results}
def get(self, integration_project_id, assignment_id): async def get(self, integration_project_id, assignment_id):
self._client.set_jira_project_id(integration_project_id) self._client.set_jira_project_id(integration_project_id)
return self._client.get_issue_v3(assignment_id) return self._client.get_issue_v3(assignment_id)
def comment(self, integration_project_id, assignment_id, comment): async def comment(self, integration_project_id, assignment_id, comment):
self._client.set_jira_project_id(integration_project_id) self._client.set_jira_project_id(integration_project_id)
return self._client.add_comment_v3(assignment_id, comment) return self._client.add_comment_v3(assignment_id, comment)
def get_metas(self, integration_project_id): async def get_metas(self, integration_project_id):
meta = {} meta = {}
self._client.set_jira_project_id(integration_project_id) self._client.set_jira_project_id(integration_project_id)
meta['issueTypes'] = self._client.get_issue_types() meta['issueTypes'] = self._client.get_issue_types()
meta['users'] = self._client.get_assignable_users() meta['users'] = self._client.get_assignable_users()
return {"provider": self.provider.lower(), **meta} return {"provider": self.provider.lower(), **meta}
def get_projects(self): async def get_projects(self):
return self._client.get_projects() return self._client.get_projects()

View file

@ -2,9 +2,9 @@ import schemas
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def get_global_integrations_status(tenant_id, user_id, project_id): async def get_global_integrations_status(tenant_id, user_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
SELECT EXISTS((SELECT 1 SELECT EXISTS((SELECT 1
FROM public.oauth_authentication FROM public.oauth_authentication
@ -57,7 +57,7 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""", WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
) )
current_integrations = cur.fetchone() current_integrations = await cur.fetchone()
result = [] result = []
for k in current_integrations.keys(): for k in current_integrations.keys():
result.append({"name": k, "integrated": current_integrations[k]}) result.append({"name": k, "integrated": current_integrations[k]})

View file

@ -4,9 +4,9 @@ from chalicelib.utils import pg_client
SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER] SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER]
def get_available_integrations(user_id): async def get_available_integrations(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
SELECT EXISTS((SELECT 1 SELECT EXISTS((SELECT 1
FROM public.oauth_authentication FROM public.oauth_authentication
@ -17,19 +17,19 @@ def get_available_integrations(user_id):
WHERE user_id = %(user_id)s)) AS jira;""", WHERE user_id = %(user_id)s)) AS jira;""",
{"user_id": user_id}) {"user_id": user_id})
) )
current_integrations = cur.fetchone() current_integrations = await cur.fetchone()
return dict(current_integrations) return dict(current_integrations)
def __get_default_integration(user_id): async def __get_default_integration(user_id):
current_integrations = get_available_integrations(user_id) current_integrations = await get_available_integrations(user_id)
return integration_github.PROVIDER if current_integrations["github"] else integration_jira_cloud.PROVIDER if \ return integration_github.PROVIDER if current_integrations["github"] else integration_jira_cloud.PROVIDER if \
current_integrations["jira"] else None current_integrations["jira"] else None
def get_integration(tenant_id, user_id, tool=None, for_delete=False): async def get_integration(tenant_id, user_id, tool=None, for_delete=False):
if tool is None: if tool is None:
tool = __get_default_integration(user_id=user_id) tool = await __get_default_integration(user_id=user_id)
if tool is None: if tool is None:
return {"errors": [f"no issue tracking tool found"]}, None return {"errors": [f"no issue tracking tool found"]}, None
tool = tool.upper() tool = tool.upper()
@ -37,6 +37,7 @@ def get_integration(tenant_id, user_id, tool=None, for_delete=False):
return {"errors": [f"issue tracking tool not supported yet, available: {SUPPORTED_TOOLS}"]}, None return {"errors": [f"issue tracking tool not supported yet, available: {SUPPORTED_TOOLS}"]}, None
if tool == integration_jira_cloud.PROVIDER: if tool == integration_jira_cloud.PROVIDER:
integration = integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id) integration = integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id)
await integration.init()
if not for_delete and integration.integration is not None and not integration.integration.get("valid", True): if not for_delete and integration.integration is not None and not integration.integration.get("valid", True):
return {"errors": ["JIRA: connexion issue/unauthorized"]}, integration return {"errors": ["JIRA: connexion issue/unauthorized"]}, integration
return None, integration return None, integration

View file

@ -28,8 +28,8 @@ NAME_QUERY = """\
""" """
def get(project_id, issue_id): async def get(project_id, issue_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""\ """\
SELECT SELECT
@ -39,16 +39,16 @@ def get(project_id, issue_id):
AND issue_id = %(issue_id)s;""", AND issue_id = %(issue_id)s;""",
{"project_id": project_id, "issue_id": issue_id} {"project_id": project_id, "issue_id": issue_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchone() data = await cur.fetchone()
if data is not None: if data is not None:
data["title"] = helper.get_issue_title(data["type"]) data["title"] = helper.get_issue_title(data["type"])
return helper.dict_to_camel_case(data) return helper.dict_to_camel_case(data)
def get_by_session_id(session_id, project_id, issue_type=None): async def get_by_session_id(session_id, project_id, issue_type=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
SELECT * SELECT *
FROM events_common.issues FROM events_common.issues
@ -59,12 +59,12 @@ def get_by_session_id(session_id, project_id, issue_type=None):
ORDER BY timestamp;""", ORDER BY timestamp;""",
{"session_id": session_id, "project_id": project_id, "type": issue_type}) {"session_id": session_id, "project_id": project_id, "type": issue_type})
) )
return helper.list_to_camel_case(cur.fetchall()) return helper.list_to_camel_case(await cur.fetchall())
def get_types_by_project(project_id): async def get_types_by_project(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""SELECT type, cur.mogrify(f"""SELECT type,
{ORDER_QUERY}>=0 AS visible, {ORDER_QUERY}>=0 AS visible,
{ORDER_QUERY} AS order, {ORDER_QUERY} AS order,
@ -73,7 +73,7 @@ def get_types_by_project(project_id):
FROM public.issues FROM public.issues
WHERE project_id = %(project_id)s) AS types WHERE project_id = %(project_id)s) AS types
ORDER BY "order";""", {"project_id": project_id})) ORDER BY "order";""", {"project_id": project_id}))
return helper.list_to_camel_case(cur.fetchall()) return helper.list_to_camel_case(await cur.fetchall())
def get_all_types(): def get_all_types():

View file

@ -14,8 +14,8 @@ class JobStatus:
CANCELLED = "cancelled" CANCELLED = "cancelled"
def get(job_id, project_id): async def get(job_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""SELECT * """SELECT *
FROM public.jobs FROM public.jobs
@ -23,8 +23,8 @@ def get(job_id, project_id):
AND project_id= %(project_id)s;""", AND project_id= %(project_id)s;""",
{"job_id": job_id, "project_id": project_id} {"job_id": job_id, "project_id": project_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchone() data = await cur.fetchone()
if data is None: if data is None:
return {} return {}
@ -33,23 +33,23 @@ def get(job_id, project_id):
return helper.dict_to_camel_case(data) return helper.dict_to_camel_case(data)
def get_all(project_id): async def get_all(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""SELECT * """SELECT *
FROM public.jobs FROM public.jobs
WHERE project_id = %(project_id)s;""", WHERE project_id = %(project_id)s;""",
{"project_id": project_id} {"project_id": project_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchall() data = await cur.fetchall()
for record in data: for record in data:
format_datetime(record) format_datetime(record)
return helper.list_to_camel_case(data) return helper.list_to_camel_case(data)
def create(project_id, user_id): async def create(project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
job = {"status": "scheduled", job = {"status": "scheduled",
"project_id": project_id, "project_id": project_id,
"action": Actions.DELETE_USER_DATA, "action": Actions.DELETE_USER_DATA,
@ -62,21 +62,21 @@ def create(project_id, user_id):
VALUES (%(project_id)s, %(description)s, %(status)s, %(action)s,%(reference_id)s, %(start_at)s) VALUES (%(project_id)s, %(description)s, %(status)s, %(action)s,%(reference_id)s, %(start_at)s)
RETURNING *;""", job) RETURNING *;""", job)
cur.execute(query=query) await cur.execute(query=query)
r = cur.fetchone() r = await cur.fetchone()
format_datetime(r) format_datetime(r)
record = helper.dict_to_camel_case(r) record = helper.dict_to_camel_case(r)
return record return record
def cancel_job(job_id, job): async def cancel_job(job_id, job):
job["status"] = JobStatus.CANCELLED job["status"] = JobStatus.CANCELLED
update(job_id=job_id, job=job) await update(job_id=job_id, job=job)
def update(job_id, job): async def update(job_id, job):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
job_data = { job_data = {
"job_id": job_id, "job_id": job_id,
"errors": job.get("errors"), "errors": job.get("errors"),
@ -91,9 +91,9 @@ def update(job_id, job):
WHERE job_id = %(job_id)s WHERE job_id = %(job_id)s
RETURNING *;""", job_data) RETURNING *;""", job_data)
cur.execute(query=query) await cur.execute(query=query)
r = cur.fetchone() r = await cur.fetchone()
format_datetime(r) format_datetime(r)
record = helper.dict_to_camel_case(r) record = helper.dict_to_camel_case(r)
return record return record
@ -105,8 +105,8 @@ def format_datetime(r):
r["start_at"] = TimeUTC.datetime_to_timestamp(r["start_at"]) r["start_at"] = TimeUTC.datetime_to_timestamp(r["start_at"])
def __get_session_ids_by_user_ids(project_id, user_ids): async def __get_session_ids_by_user_ids(project_id, user_ids):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""SELECT session_id """SELECT session_id
FROM public.sessions FROM public.sessions
@ -114,51 +114,51 @@ def __get_session_ids_by_user_ids(project_id, user_ids):
AND user_id IN %(userId)s AND user_id IN %(userId)s
LIMIT 1000;""", LIMIT 1000;""",
{"project_id": project_id, "userId": tuple(user_ids)}) {"project_id": project_id, "userId": tuple(user_ids)})
cur.execute(query=query) await cur.execute(query=query)
ids = cur.fetchall() ids = await cur.fetchall()
return [s["session_id"] for s in ids] return [s["session_id"] for s in ids]
def __delete_sessions_by_session_ids(session_ids): async def __delete_sessions_by_session_ids(session_ids):
with pg_client.PostgresClient(unlimited_query=True) as cur: async with pg_client.cursor(unlimited_query=True) as cur:
query = cur.mogrify( query = cur.mogrify(
"""DELETE FROM public.sessions """DELETE FROM public.sessions
WHERE session_id IN %(session_ids)s""", WHERE session_id IN %(session_ids)s""",
{"session_ids": tuple(session_ids)} {"session_ids": tuple(session_ids)}
) )
cur.execute(query=query) await cur.execute(query=query)
def __delete_session_mobs_by_session_ids(session_ids, project_id): async def __delete_session_mobs_by_session_ids(session_ids, project_id):
sessions_mobs.delete_mobs(session_ids=session_ids, project_id=project_id) await sessions_mobs.delete_mobs(session_ids=session_ids, project_id=project_id)
sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id) await sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id)
def get_scheduled_jobs(): async def get_scheduled_jobs():
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""SELECT * """SELECT *
FROM public.jobs FROM public.jobs
WHERE status = %(status)s WHERE status = %(status)s
AND start_at <= (now() at time zone 'utc');""", AND start_at <= (now() at time zone 'utc');""",
{"status": JobStatus.SCHEDULED}) {"status": JobStatus.SCHEDULED})
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchall() data = await cur.fetchall()
return helper.list_to_camel_case(data) return helper.list_to_camel_case(data)
def execute_jobs(): async def execute_jobs():
jobs = get_scheduled_jobs() jobs = await get_scheduled_jobs()
for job in jobs: for job in jobs:
print(f"Executing jobId:{job['jobId']}") print(f"Executing jobId:{job['jobId']}")
try: try:
if job["action"] == Actions.DELETE_USER_DATA: if job["action"] == Actions.DELETE_USER_DATA:
session_ids = __get_session_ids_by_user_ids(project_id=job["projectId"], session_ids = await __get_session_ids_by_user_ids(project_id=job["projectId"],
user_ids=[job["referenceId"]]) user_ids=[job["referenceId"]])
if len(session_ids) > 0: if len(session_ids) > 0:
print(f"Deleting {len(session_ids)} sessions") print(f"Deleting {len(session_ids)} sessions")
__delete_sessions_by_session_ids(session_ids=session_ids) await __delete_sessions_by_session_ids(session_ids=session_ids)
__delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"]) await __delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
else: else:
raise Exception(f"The action '{job['action']}' not supported.") raise Exception(f"The action '{job['action']}' not supported.")
@ -169,4 +169,4 @@ def execute_jobs():
job["errors"] = str(e) job["errors"] = str(e)
print(f"Job failed {job['jobId']}") print(f"Job failed {job['jobId']}")
update(job["jobId"], job) await update(job["jobId"], job)

View file

@ -1,13 +1,14 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
import requests import httpx
from schemas import schemas from schemas import schemas
IN_TY = "bugsnag" IN_TY = "bugsnag"
def list_projects(auth_token): async def list_projects(auth_token):
r = requests.get(url="https://api.bugsnag.com/user/organizations", async with httpx.AsyncClient() as client:
r = await client.get(url="https://api.bugsnag.com/user/organizations",
params={"per_page": "100"}, params={"per_page": "100"},
headers={"Authorization": "token " + auth_token, "X-Version": "2"}) headers={"Authorization": "token " + auth_token, "X-Version": "2"})
if r.status_code != 200: if r.status_code != 200:
@ -19,8 +20,8 @@ def list_projects(auth_token):
orgs = [] orgs = []
for i in r.json(): for i in r.json():
async with httpx.AsyncClient() as client:
pr = requests.get(url="https://api.bugsnag.com/organizations/%s/projects" % i["id"], pr = await client.get(url="https://api.bugsnag.com/organizations/%s/projects" % i["id"],
params={"per_page": "100"}, params={"per_page": "100"},
headers={"Authorization": "token " + auth_token, "X-Version": "2"}) headers={"Authorization": "token " + auth_token, "X-Version": "2"})
if pr.status_code != 200: if pr.status_code != 200:
@ -33,43 +34,43 @@ def list_projects(auth_token):
return orgs return orgs
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "authorizationToken" in changes: if "authorizationToken" in changes:
options["authorizationToken"] = changes.pop("authorizationToken") options["authorizationToken"] = changes.pop("authorizationToken")
if "bugsnagProjectId" in changes: if "bugsnagProjectId" in changes:
options["bugsnagProjectId"] = changes.pop("bugsnagProjectId") options["bugsnagProjectId"] = changes.pop("bugsnagProjectId")
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, authorization_token, bugsnag_project_id): async def add(tenant_id, project_id, authorization_token, bugsnag_project_id):
options = { options = {
"bugsnagProjectId": bugsnag_project_id, "bugsnagProjectId": bugsnag_project_id,
"authorizationToken": authorization_token, "authorizationToken": authorization_token,
} }
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ): async def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"authorizationToken": data.authorization_token, changes={"authorizationToken": data.authorization_token,
"bugsnagProjectId": data.bugsnag_project_id}) "bugsnagProjectId": data.bugsnag_project_id})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
authorization_token=data.authorization_token, authorization_token=data.authorization_token,
bugsnag_project_id=data.bugsnag_project_id) bugsnag_project_id=data.bugsnag_project_id)

View file

@ -29,14 +29,14 @@ def __make_stream_filter(start_time, end_time):
return __valid_stream return __valid_stream
def __find_streams(project_id, log_group, client, token, stream_filter): async def __find_streams(project_id, log_group, client, token, stream_filter):
d_args = {"logGroupName": log_group, "orderBy": 'LastEventTime', 'limit': 50} d_args = {"logGroupName": log_group, "orderBy": 'LastEventTime', 'limit': 50}
if token is not None and len(token) > 0: if token is not None and len(token) > 0:
d_args["nextToken"] = token d_args["nextToken"] = token
data = client.describe_log_streams(**d_args) data = client.describe_log_streams(**d_args)
streams = list(filter(stream_filter, data['logStreams'])) streams = list(filter(stream_filter, data['logStreams']))
if 'nextToken' not in data: if 'nextToken' not in data:
save_new_token(project_id=project_id, token=token) await save_new_token(project_id=project_id, token=token)
return streams return streams
return streams + __find_streams(project_id, log_group, client, data['nextToken'], stream_filter) return streams + __find_streams(project_id, log_group, client, data['nextToken'], stream_filter)
@ -69,15 +69,15 @@ def list_log_groups(aws_access_key_id, aws_secret_access_key, region):
return __find_groups(logs, None) return __find_groups(logs, None)
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "authorization_token" in changes: if "authorization_token" in changes:
options["authorization_token"] = changes.pop("authorization_token") options["authorization_token"] = changes.pop("authorization_token")
@ -85,34 +85,34 @@ def update(tenant_id, project_id, changes):
options["project_id"] = changes.pop("project_id") options["project_id"] = changes.pop("project_id")
if len(options.keys()) > 0: if len(options.keys()) > 0:
changes["options"] = options changes["options"] = options
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
def add(tenant_id, project_id, aws_access_key_id, aws_secret_access_key, log_group_name, region): async def add(tenant_id, project_id, aws_access_key_id, aws_secret_access_key, log_group_name, region):
return log_tools.add(project_id=project_id, integration=IN_TY, return await log_tools.add(project_id=project_id, integration=IN_TY,
options={"awsAccessKeyId": aws_access_key_id, options={"awsAccessKeyId": aws_access_key_id,
"awsSecretAccessKey": aws_secret_access_key, "awsSecretAccessKey": aws_secret_access_key,
"logGroupName": log_group_name, "region": region}) "logGroupName": log_group_name, "region": region})
def save_new_token(project_id, token): async def save_new_token(project_id, token):
update(tenant_id=None, project_id=project_id, changes={"last_token": token}) await update(tenant_id=None, project_id=project_id, changes={"last_token": token})
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"awsAccessKeyId": data.aws_access_key_id, changes={"awsAccessKeyId": data.aws_access_key_id,
"awsSecretAccessKey": data.aws_secret_access_key, "awsSecretAccessKey": data.aws_secret_access_key,
"logGroupName": data.log_group_name, "logGroupName": data.log_group_name,
"region": data.region}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
aws_access_key_id=data.aws_access_key_id, aws_access_key_id=data.aws_access_key_id,
aws_secret_access_key=data.aws_secret_access_key, aws_secret_access_key=data.aws_secret_access_key,

View file

@ -4,41 +4,41 @@ from schemas import schemas
IN_TY = "datadog" IN_TY = "datadog"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "apiKey" in changes: if "apiKey" in changes:
options["apiKey"] = changes["apiKey"] options["apiKey"] = changes["apiKey"]
if "applicationKey" in changes: if "applicationKey" in changes:
options["applicationKey"] = changes["applicationKey"] options["applicationKey"] = changes["applicationKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, api_key, application_key): async def add(tenant_id, project_id, api_key, application_key):
options = {"apiKey": api_key, "applicationKey": application_key} options = {"apiKey": api_key, "applicationKey": application_key}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"apiKey": data.api_key, changes={"apiKey": data.api_key,
"applicationKey": data.application_key}) "applicationKey": data.application_key})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
api_key=data.api_key, api_key=data.api_key,
application_key=data.application_key) application_key=data.application_key)

View file

@ -9,15 +9,15 @@ logging.getLogger('elasticsearch').level = logging.ERROR
IN_TY = "elasticsearch" IN_TY = "elasticsearch"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "host" in changes: if "host" in changes:
@ -31,28 +31,28 @@ def update(tenant_id, project_id, changes):
if "port" in changes: if "port" in changes:
options["port"] = changes["port"] options["port"] = changes["port"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, host, api_key_id, api_key, indexes, port): async def add(tenant_id, project_id, host, api_key_id, api_key, indexes, port):
options = { options = {
"host": host, "apiKeyId": api_key_id, "apiKey": api_key, "indexes": indexes, "port": port "host": host, "apiKeyId": api_key_id, "apiKey": api_key, "indexes": indexes, "port": port
} }
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"host": data.host, "apiKeyId": data.api_key_id, "apiKey": data.api_key, changes={"host": data.host, "apiKeyId": data.api_key_id, "apiKey": data.api_key,
"indexes": data.indexes, "port": data.port}) "indexes": data.indexes, "port": data.port})
else: else:
return add(tenant_id=tenant_id, project_id=project_id, return await add(tenant_id=tenant_id, project_id=project_id,
host=data.host, api_key=data.api_key, api_key_id=data.api_key_id, host=data.host, api_key=data.api_key, api_key_id=data.api_key_id,
indexes=data.indexes, port=data.port) indexes=data.indexes, port=data.port)

View file

@ -4,15 +4,15 @@ from schemas import schemas
IN_TY = "newrelic" IN_TY = "newrelic"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "region" in changes: if "region" in changes:
options["region"] = changes["region"] options["region"] = changes["region"]
@ -21,28 +21,28 @@ def update(tenant_id, project_id, changes):
if "xQueryKey" in changes: if "xQueryKey" in changes:
options["xQueryKey"] = changes["xQueryKey"] options["xQueryKey"] = changes["xQueryKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, application_id, x_query_key, region): async def add(tenant_id, project_id, application_id, x_query_key, region):
# region=False => US; region=True => EU # region=False => US; region=True => EU
options = {"applicationId": application_id, "xQueryKey": x_query_key, "region": region} options = {"applicationId": application_id, "xQueryKey": x_query_key, "region": region}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"applicationId": data.application_id, changes={"applicationId": data.application_id,
"xQueryKey": data.x_query_key, "xQueryKey": data.x_query_key,
"region": data.region}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
application_id=data.application_id, application_id=data.application_id,
x_query_key=data.x_query_key, x_query_key=data.x_query_key,

View file

@ -4,36 +4,36 @@ from schemas import schemas
IN_TY = "rollbar" IN_TY = "rollbar"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "accessToken" in changes: if "accessToken" in changes:
options["accessToken"] = changes["accessToken"] options["accessToken"] = changes["accessToken"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, access_token): async def add(tenant_id, project_id, access_token):
options = {"accessToken": access_token} options = {"accessToken": access_token}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"accessToken": data.access_token}) changes={"accessToken": data.access_token})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
access_token=data.access_token) access_token=data.access_token)

View file

@ -1,19 +1,20 @@
import requests import httpx
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas from schemas import schemas
IN_TY = "sentry" IN_TY = "sentry"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "organizationSlug" in changes: if "organizationSlug" in changes:
options["organizationSlug"] = changes["organizationSlug"] options["organizationSlug"] = changes["organizationSlug"]
@ -22,40 +23,41 @@ def update(tenant_id, project_id, changes):
if "token" in changes: if "token" in changes:
options["token"] = changes["token"] options["token"] = changes["token"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
def add(tenant_id, project_id, project_slug, organization_slug, token): async def add(tenant_id, project_id, project_slug, organization_slug, token):
options = { options = {
"organizationSlug": organization_slug, "projectSlug": project_slug, "token": token "organizationSlug": organization_slug, "projectSlug": project_slug, "token": token
} }
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"projectSlug": data.project_slug, changes={"projectSlug": data.project_slug,
"organizationSlug": data.organization_slug, "organizationSlug": data.organization_slug,
"token": data.token}) "token": data.token})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
project_slug=data.project_slug, project_slug=data.project_slug,
organization_slug=data.organization_slug, organization_slug=data.organization_slug,
token=data.token) token=data.token)
def proxy_get(tenant_id, project_id, event_id): async def proxy_get(tenant_id, project_id, event_id):
i = get(project_id) i = await get(project_id)
if i is None: if i is None:
return {} return {}
r = requests.get( async with httpx.AsyncClient() as client:
r = await client.get(
url="https://sentry.io/api/0/projects/%(organization_slug)s/%(project_slug)s/events/%(event_id)s/" % { url="https://sentry.io/api/0/projects/%(organization_slug)s/%(project_slug)s/events/%(event_id)s/" % {
"organization_slug": i["organizationSlug"], "project_slug": i["projectSlug"], "event_id": event_id}, "organization_slug": i["organizationSlug"], "project_slug": i["projectSlug"], "event_id": event_id},
headers={"Authorization": "Bearer " + i["token"]}) headers={"Authorization": "Bearer " + i["token"]})

View file

@ -4,39 +4,39 @@ from schemas import schemas
IN_TY = "stackdriver" IN_TY = "stackdriver"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "serviceAccountCredentials" in changes: if "serviceAccountCredentials" in changes:
options["serviceAccountCredentials"] = changes["serviceAccountCredentials"] options["serviceAccountCredentials"] = changes["serviceAccountCredentials"]
if "logName" in changes: if "logName" in changes:
options["logName"] = changes["logName"] options["logName"] = changes["logName"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, service_account_credentials, log_name): async def add(tenant_id, project_id, service_account_credentials, log_name):
options = {"serviceAccountCredentials": service_account_credentials, "logName": log_name} options = {"serviceAccountCredentials": service_account_credentials, "logName": log_name}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"serviceAccountCredentials": data.service_account_credentials, changes={"serviceAccountCredentials": data.service_account_credentials,
"logName": data.log_name}) "logName": data.log_name})
else: else:
return add(tenant_id=tenant_id, project_id=project_id, return await add(tenant_id=tenant_id, project_id=project_id,
service_account_credentials=data.service_account_credentials, service_account_credentials=data.service_account_credentials,
log_name=data.log_name) log_name=data.log_name)

View file

@ -4,15 +4,15 @@ from schemas import schemas
IN_TY = "sumologic" IN_TY = "sumologic"
def get_all(tenant_id): async def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY) return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id): async def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY) return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes): async def update(tenant_id, project_id, changes):
options = {} options = {}
if "region" in changes: if "region" in changes:
@ -23,31 +23,31 @@ def update(tenant_id, project_id, changes):
if "accessKey" in changes: if "accessKey" in changes:
options["accessKey"] = changes["accessKey"] options["accessKey"] = changes["accessKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options) return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, access_id, access_key, region): async def add(tenant_id, project_id, access_id, access_key, region):
options = { options = {
"accessId": access_id, "accessId": access_id,
"accessKey": access_key, "accessKey": access_key,
"region": region "region": region
} }
return log_tools.add(project_id=project_id, integration=IN_TY, options=options) return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id): async def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema): async def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema):
s = get(project_id) s = await get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return await update(tenant_id=tenant_id, project_id=project_id,
changes={"accessId": data.access_id, changes={"accessId": data.access_id,
"accessKey": data.access_key, "accessKey": data.access_key,
"region": data.region}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return await add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
access_id=data.access_id, access_id=data.access_id,
access_key=data.access_key, access_key=data.access_key,

View file

@ -4,10 +4,10 @@ import json
EXCEPT = ["jira_server", "jira_cloud"] EXCEPT = ["jira_server", "jira_cloud"]
def search(project_id): async def search(project_id):
result = [] result = []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""\ """\
SELECT supported_integrations.name, SELECT supported_integrations.name,
@ -21,17 +21,17 @@ def search(project_id):
FROM unnest(enum_range(NULL::integration_provider)) AS supported_integrations(name);""", FROM unnest(enum_range(NULL::integration_provider)) AS supported_integrations(name);""",
{"project_id": project_id}) {"project_id": project_id})
) )
r = cur.fetchall() r = await cur.fetchall()
for k in r: for k in r:
if k["count"] > 0 and k["name"] not in EXCEPT: if k["count"] > 0 and k["name"] not in EXCEPT:
result.append({"value": helper.key_to_camel_case(k["name"]), "type": "logTool"}) result.append({"value": helper.key_to_camel_case(k["name"]), "type": "logTool"})
return {"data": result} return {"data": result}
def add(project_id, integration, options): async def add(project_id, integration, options):
options = json.dumps(options) options = json.dumps(options)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""\ """\
INSERT INTO public.integrations(project_id, provider, options) INSERT INTO public.integrations(project_id, provider, options)
@ -39,13 +39,13 @@ def add(project_id, integration, options):
RETURNING *;""", RETURNING *;""",
{"project_id": project_id, "provider": integration, "options": options}) {"project_id": project_id, "provider": integration, "options": options})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r)) return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get(project_id, integration): async def get(project_id, integration):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""\ """\
SELECT integrations.* SELECT integrations.*
@ -56,13 +56,13 @@ def get(project_id, integration):
LIMIT 1;""", LIMIT 1;""",
{"project_id": project_id, "provider": integration}) {"project_id": project_id, "provider": integration})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r)) return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get_all_by_type(integration): async def get_all_by_type(integration):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""\ """\
SELECT integrations.* SELECT integrations.*
@ -70,19 +70,19 @@ def get_all_by_type(integration):
WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""", WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""",
{"provider": integration}) {"provider": integration})
) )
r = cur.fetchall() r = await cur.fetchall()
return helper.list_to_camel_case(r, flatten=True) return helper.list_to_camel_case(r, flatten=True)
def edit(project_id, integration, changes): async def edit(project_id, integration, changes):
if "projectId" in changes: if "projectId" in changes:
changes.pop("project_id") changes.pop("project_id")
if "integration" in changes: if "integration" in changes:
changes.pop("integration") changes.pop("integration")
if len(changes.keys()) == 0: if len(changes.keys()) == 0:
return None return None
with pg_client.PostgresClient() as cur: with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
UPDATE public.integrations UPDATE public.integrations
SET options=options||%(changes)s SET options=options||%(changes)s
@ -90,12 +90,12 @@ def edit(project_id, integration, changes):
RETURNING *;""", RETURNING *;""",
{"project_id": project_id, "provider": integration, "changes": json.dumps(changes)}) {"project_id": project_id, "provider": integration, "changes": json.dumps(changes)})
) )
return helper.dict_to_camel_case(helper.flatten_nested_dicts(cur.fetchone())) return helper.dict_to_camel_case(helper.flatten_nested_dicts(await cur.fetchone()))
def delete(project_id, integration): async def delete(project_id, integration):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
DELETE FROM public.integrations DELETE FROM public.integrations
WHERE project_id=%(project_id)s AND provider=%(provider)s;""", WHERE project_id=%(project_id)s AND provider=%(provider)s;""",
@ -104,9 +104,9 @@ def delete(project_id, integration):
return {"state": "success"} return {"state": "success"}
def get_all_by_tenant(tenant_id, integration): async def get_all_by_tenant(tenant_id, integration):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT integrations.* """SELECT integrations.*
FROM public.integrations INNER JOIN public.projects USING(project_id) FROM public.integrations INNER JOIN public.projects USING(project_id)
@ -114,5 +114,5 @@ def get_all_by_tenant(tenant_id, integration):
AND projects.deleted_at ISNULL;""", AND projects.deleted_at ISNULL;""",
{"provider": integration}) {"provider": integration})
) )
r = cur.fetchall() r = await cur.fetchall()
return helper.list_to_camel_case(r, flatten=True) return helper.list_to_camel_case(r, flatten=True)

View file

@ -13,8 +13,8 @@ def column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)] return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool: async def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
constraints = column_names() constraints = column_names()
if exclude_index: if exclude_index:
del constraints[exclude_index - 1] del constraints[exclude_index - 1]
@ -26,21 +26,21 @@ def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -
AND deleted_at ISNULL AND deleted_at ISNULL
AND ({" OR ".join(constraints)})) AS exists;""", AND ({" OR ".join(constraints)})) AS exists;""",
{"project_id": project_id, "name": name}) {"project_id": project_id, "name": name})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
return row["exists"] return row["exists"]
def get(project_id): async def get(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(column_names())} query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects FROM public.projects
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id}) LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
metas = cur.fetchone() metas = await cur.fetchone()
results = [] results = []
if metas is not None: if metas is not None:
for i, k in enumerate(metas.keys()): for i, k in enumerate(metas.keys()):
@ -49,17 +49,17 @@ def get(project_id):
return results return results
def get_batch(project_ids): async def get_batch(project_ids):
if project_ids is None or len(project_ids) == 0: if project_ids is None or len(project_ids) == 0:
return [] return []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT project_id, {",".join(column_names())} query = cur.mogrify(f"""SELECT project_id, {",".join(column_names())}
FROM public.projects FROM public.projects
WHERE project_id IN %(project_ids)s WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)}) {"project_ids": tuple(project_ids)})
cur.execute(query=query) await cur.execute(query=query)
full_metas = cur.fetchall() full_metas = await cur.fetchall()
results = {} results = {}
if full_metas is not None and len(full_metas) > 0: if full_metas is not None and len(full_metas) > 0:
for metas in full_metas: for metas in full_metas:
@ -79,8 +79,8 @@ def index_to_colname(index):
return f"metadata_{index}" return f"metadata_{index}"
def __get_available_index(project_id): async def __get_available_index(project_id):
used_indexs = get(project_id) used_indexs = await get(project_id)
used_indexs = [i["index"] for i in used_indexs] used_indexs = [i["index"] for i in used_indexs]
if len(used_indexs) >= MAX_INDEXES: if len(used_indexs) >= MAX_INDEXES:
return -1 return -1
@ -90,15 +90,15 @@ def __get_available_index(project_id):
return i return i
def __edit(project_id, col_index, colname, new_name): async def __edit(project_id, col_index, colname, new_name):
if new_name is None or len(new_name) == 0: if new_name is None or len(new_name) == 0:
return {"errors": ["key value invalid"]} return {"errors": ["key value invalid"]}
old_metas = get(project_id) old_metas = await get(project_id)
old_metas = {k["index"]: k for k in old_metas} old_metas = {k["index"]: k for k in old_metas}
if col_index not in list(old_metas.keys()): if col_index not in list(old_metas.keys()):
return {"errors": ["custom field not found"]} return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if old_metas[col_index]["key"] != new_name: if old_metas[col_index]["key"] != new_name:
query = cur.mogrify(f"""UPDATE public.projects query = cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s SET {colname} = %(value)s
@ -106,76 +106,76 @@ def __edit(project_id, col_index, colname, new_name):
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING {colname};""", RETURNING {colname};""",
{"project_id": project_id, "value": new_name}) {"project_id": project_id, "value": new_name})
cur.execute(query=query) await cur.execute(query=query)
new_name = cur.fetchone()[colname] new_name = await cur.fetchone()[colname]
old_metas[col_index]["key"] = new_name old_metas[col_index]["key"] = new_name
return {"data": old_metas[col_index]} return {"data": old_metas[col_index]}
def edit(tenant_id, project_id, index: int, new_name: str): async def edit(tenant_id, project_id, index: int, new_name: str):
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=index): if await __exists_by_name(project_id=project_id, name=new_name, exclude_index=index):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name) return await __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
def delete(tenant_id, project_id, index: int): async def delete(tenant_id, project_id, index: int):
index = int(index) index = int(index)
old_segments = get(project_id) old_segments = await get(project_id)
old_segments = [k["index"] for k in old_segments] old_segments = [k["index"] for k in old_segments]
if index not in old_segments: if index not in old_segments:
return {"errors": ["custom field not found"]} return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
colname = index_to_colname(index) colname = index_to_colname(index)
query = cur.mogrify(f"""UPDATE public.projects query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= NULL SET {colname}= NULL
WHERE project_id = %(project_id)s AND deleted_at ISNULL;""", WHERE project_id = %(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
query = cur.mogrify(f"""UPDATE public.sessions query = cur.mogrify(f"""UPDATE public.sessions
SET {colname}= NULL SET {colname}= NULL
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND {colname} IS NOT NULL AND {colname} IS NOT NULL
""", """,
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
return {"data": get(project_id)} return {"data": await get(project_id)}
def add(tenant_id, project_id, new_name): async def add(tenant_id, project_id, new_name):
index = __get_available_index(project_id=project_id) index = await __get_available_index(project_id=project_id)
if index < 1: if index < 1:
return {"errors": ["maximum allowed metadata reached"]} return {"errors": ["maximum allowed metadata reached"]}
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None): if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
colname = index_to_colname(index) colname = index_to_colname(index)
query = cur.mogrify(f"""UPDATE public.projects query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= %(key)s SET {colname}= %(key)s
WHERE project_id =%(project_id)s WHERE project_id =%(project_id)s
RETURNING {colname};""", RETURNING {colname};""",
{"key": new_name, "project_id": project_id}) {"key": new_name, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
col_val = cur.fetchone()[colname] col_val = await cur.fetchone()[colname]
return {"data": {"key": col_val, "index": index}} return {"data": {"key": col_val, "index": index}}
def search(tenant_id, project_id, key, value): async def search(tenant_id, project_id, key, value):
value = value + "%" value = value + "%"
s_query = [] s_query = []
for f in column_names(): for f in column_names():
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}") s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(s_query)} query = cur.mogrify(f"""SELECT {",".join(s_query)}
FROM public.projects FROM public.projects
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
LIMIT 1;""", LIMIT 1;""",
{"key": key, "project_id": project_id}) {"key": key, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
all_metas = cur.fetchone() all_metas = await cur.fetchone()
key = None key = None
for c in all_metas: for c in all_metas:
if all_metas[c]: if all_metas[c]:
@ -189,29 +189,29 @@ def search(tenant_id, project_id, key, value):
ORDER BY "{key}" ORDER BY "{key}"
LIMIT 20;""", LIMIT 20;""",
{"value": value, "project_id": project_id}) {"value": value, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
value = cur.fetchall() value = await cur.fetchall()
return {"data": [k[key] for k in value]} return {"data": [k[key] for k in value]}
def get_available_keys(project_id): async def get_available_keys(project_id):
all_metas = get(project_id=project_id) all_metas = await get(project_id=project_id)
return [k["key"] for k in all_metas] return [k["key"] for k in all_metas]
def get_by_session_id(project_id, session_id): async def get_by_session_id(project_id, session_id):
all_metas = get(project_id=project_id) all_metas = await get(project_id=project_id)
if len(all_metas) == 0: if len(all_metas) == 0:
return [] return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas} keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(keys.keys())} query = cur.mogrify(f"""SELECT {",".join(keys.keys())}
FROM public.sessions FROM public.sessions
WHERE project_id= %(project_id)s WHERE project_id= %(project_id)s
AND session_id=%(session_id)s;""", AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id}) {"session_id": session_id, "project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
session_metas = cur.fetchall() session_metas = await cur.fetchall()
results = [] results = []
for m in session_metas: for m in session_metas:
r = {} r = {}
@ -221,18 +221,18 @@ def get_by_session_id(project_id, session_id):
return results return results
def get_keys_by_projects(project_ids): async def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0: if project_ids is None or len(project_ids) == 0:
return {} return {}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT project_id,{",".join(column_names())} query = cur.mogrify(f"""SELECT project_id,{",".join(column_names())}
FROM public.projects FROM public.projects
WHERE project_id IN %(project_ids)s WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)}) {"project_ids": tuple(project_ids)})
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
results = {} results = {}
for r in rows: for r in rows:
project_id = r.pop("project_id") project_id = r.pop("project_id")
@ -270,7 +270,7 @@ def get_keys_by_projects(project_ids):
# return {"errors": ["duplicate keys"]} # return {"errors": ["duplicate keys"]}
# to_delete = list(set(old_indexes) - set(new_indexes)) # to_delete = list(set(old_indexes) - set(new_indexes))
# #
# with pg_client.PostgresClient() as cur: # async with pg_client.cursor() as cur:
# for d in to_delete: # for d in to_delete:
# delete(tenant_id=tenant_id, project_id=project_id, index=d) # delete(tenant_id=tenant_id, project_id=project_id, index=d)
# #
@ -284,10 +284,10 @@ def get_keys_by_projects(project_ids):
# return {"data": get(project_id)} # return {"data": get(project_id)}
def get_remaining_metadata_with_count(tenant_id): async def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id) all_projects = await projects.get_projects(tenant_id=tenant_id)
results = [] results = []
used_metas = get_batch([p["projectId"] for p in all_projects]) used_metas = await get_batch([p["projectId"] for p in all_projects])
for p in all_projects: for p in all_projects:
if MAX_INDEXES < 0: if MAX_INDEXES < 0:
remaining = -1 remaining = -1

File diff suppressed because it is too large Load diff

View file

@ -3,11 +3,11 @@ from chalicelib.utils.storage import StorageClient
from decouple import config from decouple import config
def sign_keys(project_id, session_id, keys): async def sign_keys(project_id, session_id, keys):
result = [] result = []
project_key = projects.get_project_key(project_id) project_key = await projects.get_project_key(project_id)
for k in keys: for k in keys:
result.append(StorageClient.get_presigned_url_for_sharing(bucket=config("iosBucket"), result.append(await StorageClient.get_presigned_url_for_sharing(bucket=config("iosBucket"),
key=f"{project_key}/{session_id}/{k}", key=f"{project_key}/{session_id}/{k}",
expires_in=60 * 60)) expires_in=60 * 60))
return result return result

View file

@ -4,9 +4,9 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def get_all(tenant_id, user_id): async def get_all(tenant_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
SELECT notifications.*, SELECT notifications.*,
user_viewed_notifications.notification_id NOTNULL AS viewed user_viewed_notifications.notification_id NOTNULL AS viewed
@ -19,15 +19,15 @@ def get_all(tenant_id, user_id):
LIMIT 100;""", LIMIT 100;""",
{"user_id": user_id}) {"user_id": user_id})
) )
rows = helper.list_to_camel_case(cur.fetchall()) rows = helper.list_to_camel_case(await cur.fetchall())
for r in rows: for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
return rows return rows
def get_all_count(tenant_id, user_id): async def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
SELECT COALESCE(COUNT(notifications.*),0) AS count SELECT COALESCE(COUNT(notifications.*),0) AS count
FROM public.notifications FROM public.notifications
@ -37,19 +37,19 @@ def get_all_count(tenant_id, user_id):
WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""", WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"user_id": user_id}) {"user_id": user_id})
) )
row = cur.fetchone() row = await cur.fetchone()
return row return row
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): async def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if len(notification_ids) == 0 and endTimestamp is None: if len(notification_ids) == 0 and endTimestamp is None:
return False return False
if startTimestamp is None: if startTimestamp is None:
startTimestamp = 0 startTimestamp = 0
notification_ids = [(user_id, id) for id in notification_ids] notification_ids = [(user_id, id) for id in notification_ids]
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if len(notification_ids) > 0: if len(notification_ids) > 0:
cur.executemany( await cur.executemany(
"INSERT INTO public.user_viewed_notifications(user_id, notification_id) VALUES (%s,%s) ON CONFLICT DO NOTHING;", "INSERT INTO public.user_viewed_notifications(user_id, notification_id) VALUES (%s,%s) ON CONFLICT DO NOTHING;",
notification_ids) notification_ids)
else: else:
@ -64,14 +64,14 @@ def view_notification(user_id, notification_ids=[], tenant_id=None, startTimesta
"endTimestamp": endTimestamp} "endTimestamp": endTimestamp}
# print('-------------------') # print('-------------------')
# print(cur.mogrify(query, params)) # print(cur.mogrify(query, params))
cur.execute(cur.mogrify(query, params)) await cur.execute(cur.mogrify(query, params))
return True return True
def create(notifications): async def create(notifications):
if len(notifications) == 0: if len(notifications) == 0:
return [] return []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
values = [] values = []
for n in notifications: for n in notifications:
clone = dict(n) clone = dict(n)
@ -86,10 +86,10 @@ def create(notifications):
"(%(userId)s, %(title)s, %(description)s, %(buttonText)s, %(buttonUrl)s, %(imageUrl)s,%(options)s)", "(%(userId)s, %(title)s, %(description)s, %(buttonText)s, %(buttonUrl)s, %(imageUrl)s,%(options)s)",
clone).decode('UTF-8') clone).decode('UTF-8')
) )
cur.execute( await cur.execute(
f"""INSERT INTO public.notifications(user_id, title, description, button_text, button_url, image_url, options) f"""INSERT INTO public.notifications(user_id, title, description, button_text, button_url, image_url, options)
VALUES {",".join(values)} RETURNING *;""") VALUES {",".join(values)} RETURNING *;""")
rows = helper.list_to_camel_case(cur.fetchall()) rows = helper.list_to_camel_case(await cur.fetchall())
for r in rows: for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"]) r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
r["viewed"] = False r["viewed"] = False

View file

@ -84,7 +84,7 @@ JOURNEY_TYPES = {
# sort by top 5 according to sessions_count at the CTE level # sort by top 5 according to sessions_count at the CTE level
# final part project data without grouping # final part project data without grouping
# if start-point is selected, the selected event is ranked n°1 # if start-point is selected, the selected event is ranked n°1
def path_analysis(project_id: int, data: schemas.CardPathAnalysis): async def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sub_events = [] sub_events = []
start_points_from = "pre_ranked_events" start_points_from = "pre_ranked_events"
sub_sessions_extra_projection = "" sub_sessions_extra_projection = ""
@ -377,7 +377,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
avg_time_from_previous avg_time_from_previous
FROM n{i})""") FROM n{i})""")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
pg_query = f"""\ pg_query = f"""\
WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection} WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
FROM public.sessions {" ".join(start_join)} FROM public.sessions {" ".join(start_join)}
@ -413,13 +413,13 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
logger.debug("----------------------") logger.debug("----------------------")
logger.debug(query) logger.debug(query)
logger.debug("----------------------") logger.debug("----------------------")
cur.execute(query) await cur.execute(query)
if time() - _now > 2: if time() - _now > 2:
logger.warning(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<") logger.warning(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
logger.warning("----------------------") logger.warning("----------------------")
logger.warning(query) logger.warning(query)
logger.warning("----------------------") logger.warning("----------------------")
rows = cur.fetchall() rows = await cur.fetchall()
return __transform_journey(rows=rows, reverse_path=reverse) return __transform_journey(rows=rows, reverse_path=reverse)
@ -515,7 +515,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# time_constraint=True) # time_constraint=True)
# pg_sub_query.append("user_id IS NOT NULL") # pg_sub_query.append("user_id IS NOT NULL")
# pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") # pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)")
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, # pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week,
# COUNT(DISTINCT connexions_list.user_id) AS users_count, # COUNT(DISTINCT connexions_list.user_id) AS users_count,
# ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users # ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
@ -562,7 +562,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", # pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
# time_constraint=True) # time_constraint=True)
# pg_sub_query.append("user_id IS NOT NULL") # pg_sub_query.append("user_id IS NOT NULL")
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, # pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
# FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, # FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
# COUNT(DISTINCT connexions_list.user_id) AS users_count, # COUNT(DISTINCT connexions_list.user_id) AS users_count,
@ -628,7 +628,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# event_column = JOURNEY_TYPES[event_type]["column"] # event_column = JOURNEY_TYPES[event_type]["column"]
# pg_sub_query.append(f"feature.{event_column} = %(value)s") # pg_sub_query.append(f"feature.{event_column} = %(value)s")
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# if default: # if default:
# # get most used value # # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count # pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -718,7 +718,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# #
# pg_sub_query.append(f"feature.{event_column} = %(value)s") # pg_sub_query.append(f"feature.{event_column} = %(value)s")
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# if default: # if default:
# # get most used value # # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count # pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -800,7 +800,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query.append(f"sessions.user_id = %(user_id)s") # pg_sub_query.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count # pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
# FROM sessions # FROM sessions
# WHERE {" AND ".join(pg_sub_query)} # WHERE {" AND ".join(pg_sub_query)}
@ -866,7 +866,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"] # event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"] # event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count # pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
# FROM sessions # FROM sessions
# WHERE {" AND ".join(pg_sub_query)} # WHERE {" AND ".join(pg_sub_query)}
@ -935,7 +935,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"] # event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"] # event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") # pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
# pg_sub_query.append("feature.timestamp < %(endTimestamp)s") # pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
# if default: # if default:
@ -995,7 +995,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"] # event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"] # event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") # pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s")
# pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") # pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s")
# pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") # pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
@ -1056,7 +1056,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query.append(f"sessions.user_id = %(user_id)s") # pg_sub_query.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# pg_sub_query.append(f"length({event_column})>2") # pg_sub_query.append(f"length({event_column})>2")
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg # pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg
# FROM {event_table} AS feature INNER JOIN sessions USING (session_id) # FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
# WHERE {" AND ".join(pg_sub_query)} # WHERE {" AND ".join(pg_sub_query)}
@ -1090,7 +1090,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") # pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"] # extra_values["user_id"] = f["value"]
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart # pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart
# FROM (SELECT generated_timestamp AS timestamp, # FROM (SELECT generated_timestamp AS timestamp,
# COALESCE(COUNT(users), 0) AS count # COALESCE(COUNT(users), 0) AS count
@ -1120,7 +1120,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) # pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
# pg_sub_query.append("user_id IS NOT NULL") # pg_sub_query.append("user_id IS NOT NULL")
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition # pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition
# FROM (SELECT number_of_days, COUNT(user_id) AS count # FROM (SELECT number_of_days, COUNT(user_id) AS count
# FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days # FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days
@ -1163,7 +1163,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# event_column = JOURNEY_TYPES[event_type]["column"] # event_column = JOURNEY_TYPES[event_type]["column"]
# pg_sub_query.append(f"feature.{event_column} = %(value)s") # pg_sub_query.append(f"feature.{event_column} = %(value)s")
# #
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# if default: # if default:
# # get most used value # # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count # pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -1218,7 +1218,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# "value": helper.string_to_sql_like(text.lower()), # "value": helper.string_to_sql_like(text.lower()),
# "platform_0": platform} # "platform_0": platform}
# if feature_type == "ALL": # if feature_type == "ALL":
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# sub_queries = [] # sub_queries = []
# for e in JOURNEY_TYPES: # for e in JOURNEY_TYPES:
# sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" # sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type"
@ -1230,7 +1230,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# cur.execute(cur.mogrify(pg_query, params)) # cur.execute(cur.mogrify(pg_query, params))
# rows = cur.fetchall() # rows = cur.fetchall()
# elif JOURNEY_TYPES.get(feature_type) is not None: # elif JOURNEY_TYPES.get(feature_type) is not None:
# with pg_client.PostgresClient() as cur: # with pg_client.cursor() as cur:
# pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" # pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type"
# FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) # FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id)
# WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s # WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s

View file

@ -10,8 +10,8 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool: async def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects FROM public.projects
WHERE deleted_at IS NULL WHERE deleted_at IS NULL
@ -19,42 +19,42 @@ def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
{"AND project_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""", {"AND project_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id}) {"name": name, "exclude_id": exclude_id})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
return row["exists"] return row["exists"]
def __update(tenant_id, project_id, changes): async def __update(tenant_id, project_id, changes):
if len(changes.keys()) == 0: if len(changes.keys()) == 0:
return None return None
sub_query = [] sub_query = []
for key in changes.keys(): for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s") sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.projects query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)} SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""", RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes}) {"project_id": project_id, **changes})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def __create(tenant_id, data): async def __create(tenant_id, data):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active) query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active)
VALUES (%(name)s,%(platform)s,TRUE) VALUES (%(name)s,%(platform)s,TRUE)
RETURNING project_id;""", RETURNING project_id;""",
data) data)
cur.execute(query=query) await cur.execute(query=query)
project_id = cur.fetchone()["project_id"] project_id = (await cur.fetchone())["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) return await get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False): async def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
extra_projection = "" extra_projection = ""
if gdpr: if gdpr:
extra_projection += ',s.gdpr' extra_projection += ',s.gdpr'
@ -77,8 +77,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
WHERE s.deleted_at IS NULL WHERE s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""", ORDER BY s.name {") AS raw" if recorded else ""};""",
{"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4}) {"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4})
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
# if recorded is requested, check if it was saved or computed # if recorded is requested, check if it was saved or computed
if recorded: if recorded:
u_values = [] u_values = []
@ -100,7 +100,7 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded) FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded)
WHERE projects.project_id=u.project_id;""", params) WHERE projects.project_id=u.project_id;""", params)
cur.execute(query) await cur.execute(query)
else: else:
for r in rows: for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
@ -109,8 +109,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None): async def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
extra_select = "" extra_select = ""
if include_last_session: if include_last_session:
extra_select += """,(SELECT max(ss.start_ts) extra_select += """,(SELECT max(ss.start_ts)
@ -129,69 +129,70 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
AND s.deleted_at IS NULL AND s.deleted_at IS NULL
LIMIT 1;""", LIMIT 1;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False): async def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None): if await __exists_by_name(name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not skip_authorization: if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = await users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
return {"data": __create(tenant_id=tenant_id, data=data.model_dump())} return {"data": await __create(tenant_id=tenant_id, data=data.model_dump())}
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): async def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if __exists_by_name(name=data.name, exclude_id=project_id): if __exists_by_name(name=data.name, exclude_id=project_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = await users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
return {"data": __update(tenant_id=tenant_id, project_id=project_id, return {"data": __update(tenant_id=tenant_id, project_id=project_id,
changes=data.model_dump())} changes=data.model_dump())}
def delete(tenant_id, user_id, project_id): async def delete(tenant_id, user_id, project_id):
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = await users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET deleted_at = timezone('utc'::text, now()), SET deleted_at = timezone('utc'::text, now()),
active = FALSE active = FALSE
WHERE project_id = %(project_id)s;""", WHERE project_id = %(project_id)s;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
return {"data": {"state": "success"}} return {"data": {"state": "success"}}
def get_gdpr(project_id): async def get_gdpr(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT gdpr query = cur.mogrify("""SELECT gdpr
FROM public.projects AS s FROM public.projects AS s
WHERE s.project_id =%(project_id)s WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""", AND s.deleted_at IS NULL;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone()["gdpr"] row = await cur.fetchone()
row = row["gdpr"]
row["projectId"] = project_id row["projectId"] = project_id
return row return row
def edit_gdpr(project_id, gdpr: schemas.GdprSchema): async def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s::jsonb SET gdpr = gdpr|| %(gdpr)s::jsonb
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING gdpr;""", RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump())}) {"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump())})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
if not row: if not row:
return {"errors": ["something went wrong"]} return {"errors": ["something went wrong"]}
row = row["gdpr"] row = row["gdpr"]
@ -199,8 +200,8 @@ def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
return row return row
def get_by_project_key(project_key): async def get_by_project_key(project_key):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT project_id, query = cur.mogrify("""SELECT project_id,
project_key, project_key,
platform, platform,
@ -209,51 +210,51 @@ def get_by_project_key(project_key):
WHERE project_key =%(project_key)s WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_key": project_key}) {"project_key": project_key})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def get_project_key(project_id): async def get_project_key(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT project_key query = cur.mogrify("""SELECT project_key
FROM public.projects FROM public.projects
WHERE project_id =%(project_id)s WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
project = cur.fetchone() project = await cur.fetchone()
return project["project_key"] if project is not None else None return project["project_key"] if project is not None else None
def get_capture_status(project_id): async def get_capture_status(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects FROM public.projects
WHERE project_id =%(project_id)s WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def update_capture_status(project_id, changes: schemas.SampleRateSchema): async def update_capture_status(project_id, changes: schemas.SampleRateSchema):
sample_rate = changes.rate sample_rate = changes.rate
if changes.capture_all: if changes.capture_all:
sample_rate = 100 sample_rate = 100
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate}) {"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query) await cur.execute(query=query)
return changes return changes
def get_conditions(project_id): async def get_conditions(project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT p.sample_rate AS rate, p.conditional_capture, query = cur.mogrify("""SELECT p.sample_rate AS rate, p.conditional_capture,
COALESCE( COALESCE(
array_agg( array_agg(
@ -275,15 +276,15 @@ def get_conditions(project_id):
AND p.deleted_at IS NULL AND p.deleted_at IS NULL
GROUP BY p.sample_rate, p.conditional_capture;""", GROUP BY p.sample_rate, p.conditional_capture;""",
{"project_id": project_id}) {"project_id": project_id})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
row = helper.dict_to_camel_case(row) row = helper.dict_to_camel_case(row)
row["conditions"] = [schemas.ProjectConditions(**c) for c in row["conditions"]] row["conditions"] = [schemas.ProjectConditions(**c) for c in row["conditions"]]
return row return row
def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str]: async def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str]:
errors = [] errors = []
names = [condition.name for condition in conditions] names = [condition.name for condition in conditions]
@ -300,7 +301,7 @@ def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str
return errors return errors
def update_conditions(project_id, changes: schemas.ProjectSettings): async def update_conditions(project_id, changes: schemas.ProjectSettings):
validation_errors = validate_conditions(changes.conditions) validation_errors = validate_conditions(changes.conditions)
if validation_errors: if validation_errors:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=validation_errors) raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=validation_errors)
@ -309,7 +310,7 @@ def update_conditions(project_id, changes: schemas.ProjectSettings):
for condition in changes.conditions: for condition in changes.conditions:
conditions.append(condition.model_dump()) conditions.append(condition.model_dump())
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET SET
sample_rate= %(sample_rate)s, sample_rate= %(sample_rate)s,
@ -321,12 +322,12 @@ def update_conditions(project_id, changes: schemas.ProjectSettings):
"sample_rate": changes.rate, "sample_rate": changes.rate,
"conditional_capture": changes.conditional_capture "conditional_capture": changes.conditional_capture
}) })
cur.execute(query=query) await cur.execute(query=query)
return update_project_conditions(project_id, changes.conditions) return update_project_conditions(project_id, changes.conditions)
def create_project_conditions(project_id, conditions): async def create_project_conditions(project_id, conditions):
rows = [] rows = []
# insert all conditions rows with single sql query # insert all conditions rows with single sql query
@ -345,18 +346,18 @@ def create_project_conditions(project_id, conditions):
RETURNING condition_id, {", ".join(columns)} RETURNING condition_id, {", ".join(columns)}
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
params = [ params = [
(project_id, c.name, c.capture_rate, json.dumps([filter_.model_dump() for filter_ in c.filters])) (project_id, c.name, c.capture_rate, json.dumps([filter_.model_dump() for filter_ in c.filters]))
for c in conditions] for c in conditions]
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return rows return rows
def update_project_condition(project_id, conditions): async def update_project_condition(project_id, conditions):
values = [] values = []
params = { params = {
"project_id": project_id, "project_id": project_id,
@ -375,28 +376,28 @@ def update_project_condition(project_id, conditions):
WHERE c.condition_id = projects_conditions.condition_id AND project_id = %(project_id)s; WHERE c.condition_id = projects_conditions.condition_id AND project_id = %(project_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) await cur.execute(query)
def delete_project_condition(project_id, ids): async def delete_project_condition(project_id, ids):
sql = """ sql = """
DELETE FROM projects_conditions DELETE FROM projects_conditions
WHERE condition_id IN %(ids)s WHERE condition_id IN %(ids)s
AND project_id= %(project_id)s; AND project_id= %(project_id)s;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"project_id": project_id, "ids": tuple(ids)}) query = cur.mogrify(sql, {"project_id": project_id, "ids": tuple(ids)})
cur.execute(query) await cur.execute(query)
def update_project_conditions(project_id, conditions): async def update_project_conditions(project_id, conditions):
if conditions is None: if conditions is None:
return return
existing = get_conditions(project_id)["conditions"] existing = await get_conditions(project_id)["conditions"]
existing_ids = {c.condition_id for c in existing} existing_ids = {c.condition_id for c in existing}
to_be_updated = [c for c in conditions if c.condition_id in existing_ids] to_be_updated = [c for c in conditions if c.condition_id in existing_ids]
@ -404,24 +405,23 @@ def update_project_conditions(project_id, conditions):
to_be_deleted = existing_ids - {c.condition_id for c in conditions} to_be_deleted = existing_ids - {c.condition_id for c in conditions}
if to_be_deleted: if to_be_deleted:
delete_project_condition(project_id, to_be_deleted) await delete_project_condition(project_id, to_be_deleted)
if to_be_created: if to_be_created:
create_project_conditions(project_id, to_be_created) await create_project_conditions(project_id, to_be_created)
if to_be_updated: if to_be_updated:
print(to_be_updated) await update_project_condition(project_id, to_be_updated)
update_project_condition(project_id, to_be_updated)
return get_conditions(project_id) return await get_conditions(project_id)
def get_projects_ids(tenant_id): async def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = f"""SELECT s.project_id query = f"""SELECT s.project_id
FROM public.projects AS s FROM public.projects AS s
WHERE s.deleted_at IS NULL WHERE s.deleted_at IS NULL
ORDER BY s.project_id;""" ORDER BY s.project_id;"""
cur.execute(query=query) await cur.execute(query=query)
rows = cur.fetchall() rows = await cur.fetchall()
return [r["project_id"] for r in rows] return [r["project_id"] for r in rows]

View file

@ -3,17 +3,17 @@ from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper, smtp from chalicelib.utils import email_helper, captcha, helper, smtp
def reset(data: schemas.ForgetPasswordPayloadSchema): async def reset(data: schemas.ForgetPasswordPayloadSchema):
print(f"====================== reset password {data.email}") print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.") print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]} return {"errors": ["Invalid captcha."]}
if not smtp.has_smtp(): if not smtp.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_users = users.get_by_email_only(data.email) a_users = await users.get_by_email_only(data.email)
if a_users: if a_users:
invitation_link = users.generate_new_invitation(user_id=a_users["userId"]) invitation_link = await users.generate_new_invitation(user_id=a_users["userId"])
email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) await email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link)
else: else:
print(f"!!!invalid email address [{data.email}]") print(f"!!!invalid email address [{data.email}]")
return {"data": {"state": "A reset link will be sent if this email exists in our system."}} return {"data": {"state": "A reset link will be sent if this email exists in our system."}}

View file

@ -2,8 +2,8 @@ from chalicelib.utils import helper, pg_client
from decouple import config from decouple import config
def get_by_session_id(session_id, project_id, start_ts, duration): async def get_by_session_id(session_id, project_id, start_ts, duration):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0: if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
duration = 0 duration = 0
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000 delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
@ -27,6 +27,6 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
AND resources.timestamp<=%(res_end_ts)s;""" AND resources.timestamp<=%(res_end_ts)s;"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration, params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, } "res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
cur.execute(cur.mogrify(ch_query, params)) await cur.execute(cur.mogrify(ch_query, params))
rows = cur.fetchall() rows = await cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)

View file

@ -5,26 +5,26 @@ from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def create(project_id, user_id, data: schemas.SavedSearchSchema): async def create(project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
data = data.model_dump() data = data.model_dump()
data["filter"] = json.dumps(data["filter"]) data["filter"] = json.dumps(data["filter"])
query = cur.mogrify("""\ query = cur.mogrify("""\
INSERT INTO public.searches (project_id, user_id, name, filter,is_public) INSERT INTO public.searches (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s) VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""", {"user_id": user_id, "project_id": project_id, **data}) RETURNING *;""", {"user_id": user_id, "project_id": project_id, **data})
cur.execute( await cur.execute(
query query
) )
r = cur.fetchone() r = await cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"]) r["filter"] = helper.old_search_payload_to_flat(r["filter"])
r = helper.dict_to_camel_case(r) r = helper.dict_to_camel_case(r)
return {"data": r} return {"data": r}
def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema): async def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
data = data.model_dump() data = data.model_dump()
data["filter"] = json.dumps(data["filter"]) data["filter"] = json.dumps(data["filter"])
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
@ -36,19 +36,19 @@ def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
AND project_id= %(project_id)s AND project_id= %(project_id)s
AND (user_id = %(user_id)s OR is_public) AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"search_id": search_id, "project_id": project_id, "user_id": user_id, **data}) RETURNING *;""", {"search_id": search_id, "project_id": project_id, "user_id": user_id, **data})
cur.execute( await cur.execute(
query query
) )
r = cur.fetchone() r = await cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"]) r["filter"] = helper.old_search_payload_to_flat(r["filter"])
r = helper.dict_to_camel_case(r) r = helper.dict_to_camel_case(r)
return r return r
def get_all(project_id, user_id, details=False): async def get_all(project_id, user_id, details=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""\ f"""\
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
@ -61,7 +61,7 @@ def get_all(project_id, user_id, details=False):
) )
) )
rows = cur.fetchall() rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
for row in rows: for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
@ -72,9 +72,9 @@ def get_all(project_id, user_id, details=False):
return rows return rows
def delete(project_id, search_id, user_id): async def delete(project_id, search_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
UPDATE public.searches UPDATE public.searches
SET deleted_at = timezone('utc'::text, now()) SET deleted_at = timezone('utc'::text, now())
@ -87,9 +87,9 @@ def delete(project_id, search_id, user_id):
return {"state": "success"} return {"state": "success"}
def get(search_id, project_id, user_id): async def get(search_id, project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT """SELECT
* *
@ -102,7 +102,7 @@ def get(search_id, project_id, user_id):
) )
) )
f = helper.dict_to_camel_case(cur.fetchone()) f = helper.dict_to_camel_case(await cur.fetchone())
if f is None: if f is None:
return None return None

View file

@ -39,13 +39,13 @@ COALESCE((SELECT TRUE
# This function executes the query and return result # This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, async def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
platform="web"): platform="web"):
if data.bookmarked: if data.bookmarked:
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id) data.startTimestamp, data.endTimestamp = await sessions_favorite.get_start_end_timestamp(project_id, user_id)
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, full_args, query_part = await search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id, favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id, platform=platform) user_id=user_id, platform=platform)
if data.limit is not None and data.page is not None: if data.limit is not None and data.page is not None:
@ -58,7 +58,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args["sessions_limit_e"] = 200 full_args["sessions_limit_e"] = 200
meta_keys = [] meta_keys = []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if errors_only: if errors_only:
main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id,
COALESCE((SELECT TRUE COALESCE((SELECT TRUE
@ -83,7 +83,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
else: else:
sort = 'start_ts' sort = 'start_ts'
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions) COALESCE(JSONB_AGG(users_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
@ -117,7 +117,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions) COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
@ -132,8 +132,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.debug(main_query) logging.debug(main_query)
logging.debug("--------------------") logging.debug("--------------------")
try: try:
cur.execute(main_query) await cur.execute(main_query)
sessions = cur.fetchone() sessions = await cur.fetchone()
except Exception as err: except Exception as err:
logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query.decode('UTF-8')) logging.warning(main_query.decode('UTF-8'))
@ -142,7 +142,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.warning("--------------------") logging.warning("--------------------")
raise err raise err
if errors_only or ids_only: if errors_only or ids_only:
return helper.list_to_camel_case(cur.fetchall()) return helper.list_to_camel_case(await cur.fetchall())
if count_only: if count_only:
return helper.dict_to_camel_case(sessions) return helper.dict_to_camel_case(sessions)
@ -170,7 +170,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
# TODO: remove "table of" search from this function # TODO: remove "table of" search from this function
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, async def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.MetricOfTable, metric_value: List): metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
@ -181,12 +181,12 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is)) operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, full_args, query_part = await search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id, favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event) user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size full_args["step_size"] = step_size
sessions = [] sessions = []
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if metric_type == schemas.MetricType.timeseries: if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
@ -208,7 +208,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.debug(main_query) logging.debug(main_query)
logging.debug("--------------------") logging.debug("--------------------")
try: try:
cur.execute(main_query) await cur.execute(main_query)
except Exception as err: except Exception as err:
logging.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") logging.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
logging.warning(main_query.decode('UTF-8')) logging.warning(main_query.decode('UTF-8'))
@ -217,9 +217,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.warning("--------------------") logging.warning("--------------------")
raise err raise err
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
sessions = cur.fetchall() sessions = await cur.fetchall()
else: else:
sessions = cur.fetchone()["count"] sessions = await cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table: elif metric_type == schemas.MetricType.table:
if isinstance(metric_of, schemas.MetricOfTable): if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id" main_col = "user_id"
@ -269,15 +269,15 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.debug("--------------------") logging.debug("--------------------")
logging.debug(main_query) logging.debug(main_query)
logging.debug("--------------------") logging.debug("--------------------")
cur.execute(main_query) await cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone()) sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]: for s in sessions["values"]:
s.pop("rn") s.pop("rn")
return sessions return sessions
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, async def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_of: schemas.MetricOfTable, metric_value: List): metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True)) density=density, factor=1, decimal=True))
@ -287,11 +287,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is)) operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, full_args, query_part = await search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id, favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event) user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size full_args["step_size"] = step_size
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if isinstance(metric_of, schemas.MetricOfTable): if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id" main_col = "user_id"
extra_col = "" extra_col = ""
@ -340,20 +340,20 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
logging.debug("--------------------") logging.debug("--------------------")
logging.debug(main_query) logging.debug(main_query)
logging.debug("--------------------") logging.debug("--------------------")
cur.execute(main_query) await cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone()) sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]: for s in sessions["values"]:
s.pop("rn") s.pop("rn")
return sessions return sessions
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int): async def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, full_args, query_part = await search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id, favorite_only=False, issue=None, project_id=project_id,
user_id=None) user_id=None)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
full_args["issues_limit"] = data.limit full_args["issues_limit"] = data.limit
full_args["issues_limit_s"] = (data.page - 1) * data.limit full_args["issues_limit_s"] = (data.page - 1) * data.limit
full_args["issues_limit_e"] = data.page * data.limit full_args["issues_limit_e"] = data.page * data.limit
@ -377,8 +377,8 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema,
logging.debug("--------------------") logging.debug("--------------------")
logging.debug(main_query) logging.debug(main_query)
logging.debug("--------------------") logging.debug("--------------------")
cur.execute(main_query) await cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone()) sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]: for s in sessions["values"]:
s.pop("rn") s.pop("rn")
@ -399,7 +399,7 @@ def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
# this function generates the query and return the generated-query with the dict of query arguments # this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, async def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, platform="web", extra_event=None): project_id, user_id, platform="web", extra_event=None):
ss_constraints = [] ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp, full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
@ -556,7 +556,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
elif filter_type == events.EventType.METADATA.ui_type: elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it # get metadata list only if you need it
if meta_keys is None: if meta_keys is None:
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys} meta_keys = {m["key"]: m["index"] for m in meta_keys}
if f.source in meta_keys.keys(): if f.source in meta_keys.keys():
if is_any: if is_any:
@ -1095,18 +1095,18 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
return full_args, query_part return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): async def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
if project_id is None: if project_id is None:
all_projects = projects.get_projects(tenant_id=tenant_id) all_projects = await projects.get_projects(tenant_id=tenant_id)
else: else:
all_projects = [ all_projects = [
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, await projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,
include_gdpr=False)] include_gdpr=False)]
all_projects = {int(p["projectId"]): p["name"] for p in all_projects} all_projects = {int(p["projectId"]): p["name"] for p in all_projects}
project_ids = list(all_projects.keys()) project_ids = list(all_projects.keys())
available_keys = metadata.get_keys_by_projects(project_ids) available_keys = await metadata.get_keys_by_projects(project_ids)
for i in available_keys: for i in available_keys:
available_keys[i]["user_id"] = schemas.FilterType.user_id available_keys[i]["user_id"] = schemas.FilterType.user_id
available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id
@ -1117,7 +1117,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
results[i] = {"total": 0, "sessions": [], "missingMetadata": True} results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
project_ids = list(available_keys.keys()) project_ids = list(available_keys.keys())
if len(project_ids) > 0: if len(project_ids) > 0:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_queries = [] sub_queries = []
for i in project_ids: for i in project_ids:
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
@ -1125,9 +1125,9 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
{"id": i, "value": m_value}).decode('UTF-8')) {"id": i, "value": m_value}).decode('UTF-8'))
query = f"""SELECT {", ".join(sub_queries)};""" query = f"""SELECT {", ".join(sub_queries)};"""
cur.execute(query=query) await cur.execute(query=query)
rows = cur.fetchone() rows = await cur.fetchone()
sub_queries = [] sub_queries = []
for i in rows.keys(): for i in rows.keys():
@ -1151,15 +1151,15 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
)""", )""",
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
if len(sub_queries) > 0: if len(sub_queries) > 0:
cur.execute("\nUNION\n".join(sub_queries)) await cur.execute("\nUNION\n".join(sub_queries))
rows = cur.fetchall() rows = await cur.fetchall()
for i in rows: for i in rows:
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
return results return results
def get_user_sessions(project_id, user_id, start_date, end_date): async def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
if start_date is not None: if start_date is not None:
constraints.append("s.start_ts >= %(startDate)s") constraints.append("s.start_ts >= %(startDate)s")
@ -1170,7 +1170,7 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
FROM public.sessions AS s FROM public.sessions AS s
WHERE {" AND ".join(constraints)}""" WHERE {" AND ".join(constraints)}"""
cur.execute(cur.mogrify(f"""\ await cur.execute(cur.mogrify(f"""\
SELECT s.project_id, SELECT s.project_id,
s.session_id::text AS session_id, s.session_id::text AS session_id,
s.user_uuid, s.user_uuid,
@ -1193,12 +1193,12 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
"endDate": end_date "endDate": end_date
})) }))
sessions = cur.fetchall() sessions = await cur.fetchall()
return helper.list_to_camel_case(sessions) return helper.list_to_camel_case(sessions)
def get_session_user(project_id, user_id): async def get_session_user(project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""\ """\
SELECT SELECT
@ -1216,32 +1216,32 @@ def get_session_user(project_id, user_id):
""", """,
{"project_id": project_id, "userId": user_id} {"project_id": project_id, "userId": user_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchone() data = await cur.fetchone()
return helper.dict_to_camel_case(data) return helper.dict_to_camel_case(data)
def count_all(): async def count_all():
with pg_client.PostgresClient(unlimited_query=True) as cur: async with pg_client.cursor(unlimited_query=True) as cur:
cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") await cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
row = cur.fetchone() row = await cur.fetchone()
return row.get("count", 0) if row else 0 return row.get("count", 0) if row else 0
def session_exists(project_id, session_id): async def session_exists(project_id, session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT 1 query = cur.mogrify("""SELECT 1
FROM public.sessions FROM public.sessions
WHERE session_id=%(session_id)s WHERE session_id=%(session_id)s
AND project_id=%(project_id)s AND project_id=%(project_id)s
LIMIT 1;""", LIMIT 1;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return row is not None return row is not None
def check_recording_status(project_id: int) -> dict: async def check_recording_status(project_id: int) -> dict:
query = f""" query = f"""
WITH project_sessions AS (SELECT COUNT(1) AS full_count, WITH project_sessions AS (SELECT COUNT(1) AS full_count,
COUNT(1) FILTER ( WHERE duration IS NOT NULL) AS nn_duration_count COUNT(1) FILTER ( WHERE duration IS NOT NULL) AS nn_duration_count
@ -1258,10 +1258,10 @@ def check_recording_status(project_id: int) -> dict:
FROM project_sessions; FROM project_sessions;
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, {"project_id": project_id}) query = cur.mogrify(query, {"project_id": project_id})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return { return {
"recordingStatus": row["recording_status"], "recordingStatus": row["recording_status"],
@ -1269,12 +1269,12 @@ def check_recording_status(project_id: int) -> dict:
} }
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id', async def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
ascending: bool = False) -> dict: ascending: bool = False) -> dict:
if session_ids is None or len(session_ids) == 0: if session_ids is None or len(session_ids) == 0:
return {"total": 0, "sessions": []} return {"total": 0, "sessions": []}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
params = {"project_id": project_id, "session_ids": tuple(session_ids)} params = {"project_id": project_id, "session_ids": tuple(session_ids)}
order_direction = 'ASC' if ascending else 'DESC' order_direction = 'ASC' if ascending else 'DESC'
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_BASE_COLS} main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_BASE_COLS}
@ -1284,8 +1284,8 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
AND session_id IN %(session_ids)s AND session_id IN %(session_ids)s
ORDER BY {sort_by} {order_direction};""", params) ORDER BY {sort_by} {order_direction};""", params)
cur.execute(main_query) await cur.execute(main_query)
rows = cur.fetchall() rows = await cur.fetchall()
if len(meta_keys) > 0: if len(meta_keys) > 0:
for s in rows: for s in rows:
s["metadata"] = {} s["metadata"] = {}

View file

@ -6,8 +6,8 @@ from chalicelib.core import integrations_manager, integration_base_issue
import json import json
def __get_saved_data(project_id, session_id, issue_id, tool): async def __get_saved_data(project_id, session_id, issue_id, tool):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
SELECT * SELECT *
FROM public.assigned_sessions FROM public.assigned_sessions
@ -17,33 +17,33 @@ def __get_saved_data(project_id, session_id, issue_id, tool):
AND provider = %(provider)s;\ AND provider = %(provider)s;\
""", """,
{"session_id": session_id, "issue_id": issue_id, "provider": tool.lower()}) {"session_id": session_id, "issue_id": issue_id, "provider": tool.lower()})
cur.execute( await cur.execute(
query query
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type, async def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type,
integration_project_id): integration_project_id):
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id) error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id)
if error is not None: if error is not None:
return error return error
i = integration.get() i = await integration.get()
if i is None: if i is None:
return {"errors": [f"integration not found"]} return {"errors": [f"integration not found"]}
link = config("SITE_URL") + f"/{project_id}/session/{session_id}" link = config("SITE_URL") + f"/{project_id}/session/{session_id}"
description += f"\n> {link}" description += f"\n> {link}"
try: try:
issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description, issue = await integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description,
issue_type=issue_type, issue_type=issue_type,
integration_project_id=integration_project_id) integration_project_id=integration_project_id)
except integration_base_issue.RequestException as e: except integration_base_issue.RequestException as e:
return integration_base_issue.proxy_issues_handler(e) return integration_base_issue.proxy_issues_handler(e)
if issue is None or "id" not in issue: if issue is None or "id" not in issue:
return {"errors": ["something went wrong while creating the issue"]} return {"errors": ["something went wrong while creating the issue"]}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""\ query = cur.mogrify("""\
INSERT INTO public.assigned_sessions(session_id, issue_id, created_by, provider,provider_data) INSERT INTO public.assigned_sessions(session_id, issue_id, created_by, provider,provider_data)
VALUES (%(session_id)s, %(issue_id)s, %(creator_id)s, %(provider)s,%(provider_data)s);\ VALUES (%(session_id)s, %(issue_id)s, %(creator_id)s, %(provider)s,%(provider_data)s);\
@ -51,15 +51,15 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne
{"session_id": session_id, "creator_id": creator_id, {"session_id": session_id, "creator_id": creator_id,
"issue_id": issue["id"], "provider": integration.provider.lower(), "issue_id": issue["id"], "provider": integration.provider.lower(),
"provider_data": json.dumps({"integrationProjectId": integration_project_id})}) "provider_data": json.dumps({"integrationProjectId": integration_project_id})})
cur.execute( await cur.execute(
query query
) )
issue["provider"] = integration.provider.lower() issue["provider"] = integration.provider.lower()
return issue return issue
def get_all(project_id, user_id): async def get_all(project_id, user_id):
available_integrations = integrations_manager.get_available_integrations(user_id=user_id) available_integrations = await integrations_manager.get_available_integrations(user_id=user_id)
no_integration = not any(available_integrations.values()) no_integration = not any(available_integrations.values())
if no_integration: if no_integration:
return [] return []
@ -67,7 +67,7 @@ def get_all(project_id, user_id):
extra_query = ["sessions.project_id = %(project_id)s"] extra_query = ["sessions.project_id = %(project_id)s"]
if not all_integrations: if not all_integrations:
extra_query.append("provider IN %(providers)s") extra_query.append("provider IN %(providers)s")
with pg_client.PostgresClient() as cur: with pg_client.cursor() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
SELECT assigned_sessions.* SELECT assigned_sessions.*
FROM public.assigned_sessions FROM public.assigned_sessions
@ -76,31 +76,31 @@ def get_all(project_id, user_id):
""", """,
{"project_id": project_id, {"project_id": project_id,
"providers": tuple(d for d in available_integrations if available_integrations[d])}) "providers": tuple(d for d in available_integrations if available_integrations[d])})
cur.execute( await cur.execute(
query query
) )
assignments = helper.list_to_camel_case(cur.fetchall()) assignments = helper.list_to_camel_case(await cur.fetchall())
for a in assignments: for a in assignments:
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"]) a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
return assignments return assignments
def get_by_session(tenant_id, user_id, project_id, session_id): async def get_by_session(tenant_id, user_id, project_id, session_id):
available_integrations = integrations_manager.get_available_integrations(user_id=user_id) available_integrations = await integrations_manager.get_available_integrations(user_id=user_id)
if not any(available_integrations.values()): if not any(available_integrations.values()):
return [] return []
extra_query = ["session_id = %(session_id)s", "provider IN %(providers)s"] extra_query = ["session_id = %(session_id)s", "provider IN %(providers)s"]
with pg_client.PostgresClient() as cur: with pg_client.cursor() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
SELECT * SELECT *
FROM public.assigned_sessions FROM public.assigned_sessions
WHERE {" AND ".join(extra_query)};""", WHERE {" AND ".join(extra_query)};""",
{"session_id": session_id, {"session_id": session_id,
"providers": tuple([k for k in available_integrations if available_integrations[k]])}) "providers": tuple([k for k in available_integrations if available_integrations[k]])})
cur.execute( await cur.execute(
query query
) )
results = cur.fetchall() results = await cur.fetchall()
issues = {} issues = {}
for i in results: for i in results:
if i["provider"] not in issues.keys(): if i["provider"] not in issues.keys():
@ -110,49 +110,49 @@ def get_by_session(tenant_id, user_id, project_id, session_id):
"id": i["issue_id"]}) "id": i["issue_id"]})
results = [] results = []
for tool in issues.keys(): for tool in issues.keys():
error, integration = integrations_manager.get_integration(tool=tool, tenant_id=tenant_id, user_id=user_id) error, integration = await integrations_manager.get_integration(tool=tool, tenant_id=tenant_id, user_id=user_id)
if error is not None: if error is not None:
return error return error
i = integration.get() i = await integration.get()
if i is None: if i is None:
print("integration not found") print("integration not found")
continue continue
r = integration.issue_handler.get_by_ids(saved_issues=issues[tool]) r = await integration.issue_handler.get_by_ids(saved_issues=issues[tool])
for i in r["issues"]: for i in r["issues"]:
i["provider"] = tool i["provider"] = tool
results += r["issues"] results += r["issues"]
return results return results
def get(tenant_id, user_id, project_id, session_id, assignment_id): async def get(tenant_id, user_id, project_id, session_id, assignment_id):
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id)
if error is not None: if error is not None:
return error return error
l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) l = await __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider)
if l is None: if l is None:
return {"errors": ["issue not found"]} return {"errors": ["issue not found"]}
i = integration.get() i = await integration.get()
if i is None: if i is None:
return {"errors": ["integration not found"]} return {"errors": ["integration not found"]}
r = integration.issue_handler.get(integration_project_id=l["providerData"]["integrationProjectId"], r = await integration.issue_handler.get(integration_project_id=l["providerData"]["integrationProjectId"],
assignment_id=assignment_id) assignment_id=assignment_id)
r["provider"] = integration.provider.lower() r["provider"] = integration.provider.lower()
return r return r
def comment(tenant_id, user_id, project_id, session_id, assignment_id, message): async def comment(tenant_id, user_id, project_id, session_id, assignment_id, message):
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id) error, integration = await integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id)
if error is not None: if error is not None:
return error return error
i = integration.get() i = await integration.get()
if i is None: if i is None:
return {"errors": [f"integration not found"]} return {"errors": [f"integration not found"]}
l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider) l = await __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider)
return integration.issue_handler.comment(integration_project_id=l["providerData"]["integrationProjectId"], return await integration.issue_handler.comment(integration_project_id=l["providerData"]["integrationProjectId"],
assignment_id=assignment_id, assignment_id=assignment_id,
comment=message) comment=message)

View file

@ -13,12 +13,12 @@ def __get_devtools_keys(project_id, session_id):
] ]
def get_urls(session_id, project_id, check_existence: bool = True): async def get_urls(session_id, project_id, check_existence: bool = True):
results = [] results = []
for k in __get_devtools_keys(project_id=project_id, session_id=session_id): for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k):
continue continue
results.append(StorageClient.get_presigned_url_for_sharing( results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=config("sessions_bucket"), bucket=config("sessions_bucket"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k key=k
@ -26,7 +26,7 @@ def get_urls(session_id, project_id, check_existence: bool = True):
return results return results
def delete_mobs(project_id, session_ids): async def delete_mobs(project_id, session_ids):
for session_id in session_ids: for session_id in session_ids:
for k in __get_devtools_keys(project_id=project_id, session_id=session_id): for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) await StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -2,24 +2,24 @@ import schemas
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id): async def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions(user_id, session_id) INSERT INTO public.user_favorite_sessions(user_id, session_id)
VALUES (%(userId)s,%(session_id)s) VALUES (%(userId)s,%(session_id)s)
RETURNING session_id;""", RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id}) {"userId": context.user_id, "session_id": session_id})
) )
row = cur.fetchone() row = await cur.fetchone()
if row: if row:
return {"data": {"sessionId": session_id}} return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]} return {"errors": ["something went wrong"]}
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): async def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions DELETE FROM public.user_favorite_sessions
WHERE user_id = %(userId)s WHERE user_id = %(userId)s
@ -27,13 +27,13 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session
RETURNING session_id;""", RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id}) {"userId": context.user_id, "session_id": session_id})
) )
row = cur.fetchone() row = await cur.fetchone()
if row: if row:
return {"data": {"sessionId": session_id}} return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]} return {"errors": ["something went wrong"]}
def favorite_session(context: schemas.CurrentContext, project_id, session_id): async def favorite_session(context: schemas.CurrentContext, project_id, session_id):
if favorite_session_exists(user_id=context.user_id, session_id=session_id): if favorite_session_exists(user_id=context.user_id, session_id=session_id):
return remove_favorite_session(context=context, project_id=project_id, return remove_favorite_session(context=context, project_id=project_id,
session_id=session_id) session_id=session_id)
@ -41,9 +41,9 @@ def favorite_session(context: schemas.CurrentContext, project_id, session_id):
return add_favorite_session(context=context, project_id=project_id, session_id=session_id) return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
def favorite_session_exists(session_id, user_id=None): async def favorite_session_exists(session_id, user_id=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT session_id f"""SELECT session_id
FROM public.user_favorite_sessions FROM public.user_favorite_sessions
@ -52,13 +52,13 @@ def favorite_session_exists(session_id, user_id=None):
{'AND user_id = %(userId)s' if user_id else ''};""", {'AND user_id = %(userId)s' if user_id else ''};""",
{"userId": user_id, "session_id": session_id}) {"userId": user_id, "session_id": session_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return r is not None return r is not None
def get_start_end_timestamp(project_id, user_id): async def get_start_end_timestamp(project_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts """SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id) FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
@ -67,5 +67,5 @@ def get_start_end_timestamp(project_id, user_id):
AND project_id = %(project_id)s;""", AND project_id = %(project_id)s;""",
{"userId": user_id, "project_id": project_id}) {"userId": user_id, "project_id": project_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"]) return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])

View file

@ -66,11 +66,11 @@ SUPPORTED_TYPES = {
} }
def search(text: str, meta_type: schemas.FilterType, project_id: int): async def search(text: str, meta_type: schemas.FilterType, project_id: int):
rows = [] rows = []
if meta_type not in list(SUPPORTED_TYPES.keys()): if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]} return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) rows += await SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
# for IOS events autocomplete # for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)

View file

@ -28,12 +28,12 @@ def __get_mob_keys_deprecated(session_id):
return [str(session_id), str(session_id) + "e"] return [str(session_id), str(session_id) + "e"]
def get_urls(project_id, session_id, check_existence: bool = True): async def get_urls(project_id, session_id, check_existence: bool = True):
results = [] results = []
for k in __get_mob_keys(project_id=project_id, session_id=session_id): for k in __get_mob_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k):
continue continue
results.append(StorageClient.get_presigned_url_for_sharing( results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=config("sessions_bucket"), bucket=config("sessions_bucket"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k key=k
@ -41,12 +41,12 @@ def get_urls(project_id, session_id, check_existence: bool = True):
return results return results
def get_urls_depercated(session_id, check_existence: bool = True): async def get_urls_depercated(session_id, check_existence: bool = True):
results = [] results = []
for k in __get_mob_keys_deprecated(session_id=session_id): for k in __get_mob_keys_deprecated(session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): if check_existence and not await StorageClient.exists(bucket=config("sessions_bucket"), key=k):
continue continue
results.append(StorageClient.get_presigned_url_for_sharing( results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=config("sessions_bucket"), bucket=config("sessions_bucket"),
expires_in=100000, expires_in=100000,
key=k key=k
@ -54,12 +54,12 @@ def get_urls_depercated(session_id, check_existence: bool = True):
return results return results
def get_ios_videos(session_id, project_id, check_existence=False): async def get_ios_videos(session_id, project_id, check_existence=False):
results = [] results = []
for k in __get_ios_video_keys(project_id=project_id, session_id=session_id): for k in __get_ios_video_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k): if check_existence and not await StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k):
continue continue
results.append(StorageClient.get_presigned_url_for_sharing( results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=config("IOS_VIDEO_BUCKET"), bucket=config("IOS_VIDEO_BUCKET"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k key=k
@ -67,8 +67,8 @@ def get_ios_videos(session_id, project_id, check_existence=False):
return results return results
def delete_mobs(project_id, session_ids): async def delete_mobs(project_id, session_ids):
for session_id in session_ids: for session_id in session_ids:
for k in __get_mob_keys(project_id=project_id, session_id=session_id) \ for k in __get_mob_keys(project_id=project_id, session_id=session_id) \
+ __get_mob_keys_deprecated(session_id=session_id): + __get_mob_keys_deprecated(session_id=session_id):
StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) await StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -10,8 +10,8 @@ from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def get_note(tenant_id, project_id, user_id, note_id, share=None): async def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""} {",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id) FROM sessions_notes INNER JOIN users USING (user_id)
@ -22,16 +22,16 @@ def get_note(tenant_id, project_id, user_id, note_id, share=None):
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
"note_id": note_id, "share": share}) "note_id": note_id, "share": share})
cur.execute(query=query) await cur.execute(query=query)
row = cur.fetchone() row = await cur.fetchone()
row = helper.dict_to_camel_case(row) row = helper.dict_to_camel_case(row)
if row: if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row return row
def get_session_notes(tenant_id, project_id, session_id, user_id): async def get_session_notes(tenant_id, project_id, session_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id) FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s WHERE sessions_notes.project_id = %(project_id)s
@ -43,16 +43,16 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
{"project_id": project_id, "user_id": user_id, {"project_id": project_id, "user_id": user_id,
"tenant_id": tenant_id, "session_id": session_id}) "tenant_id": tenant_id, "session_id": session_id})
cur.execute(query=query) await cur.execute(query=query)
rows = cur.fetchall() rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
for row in rows: for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows return rows
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): async def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"] conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"]
extra_params = {} extra_params = {}
if data.tags and len(data.tags) > 0: if data.tags and len(data.tags) > 0:
@ -73,29 +73,29 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
cur.execute(query=query) await cur.execute(query=query)
rows = cur.fetchall() rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
for row in rows: for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows return rows
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): async def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id, {"user_id": user_id, "project_id": project_id, "session_id": session_id,
**data.model_dump()}) **data.model_dump()})
cur.execute(query) await cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone()) result = helper.dict_to_camel_case(await cur.fetchone())
if result: if result:
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"]) result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
return result return result
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): async def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
sub_query = [] sub_query = []
if data.message is not None: if data.message is not None:
sub_query.append("message = %(message)s") sub_query.append("message = %(message)s")
@ -105,8 +105,8 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
sub_query.append("is_public = %(is_public)s") sub_query.append("is_public = %(is_public)s")
if data.timestamp is not None: if data.timestamp is not None:
sub_query.append("timestamp = %(timestamp)s") sub_query.append("timestamp = %(timestamp)s")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""UPDATE public.sessions_notes cur.mogrify(f"""UPDATE public.sessions_notes
SET SET
{" ,".join(sub_query)} {" ,".join(sub_query)}
@ -118,16 +118,16 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()}) {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
) )
row = helper.dict_to_camel_case(cur.fetchone()) row = helper.dict_to_camel_case(await cur.fetchone())
if row: if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row return row
return {"errors": ["Note not found"]} return {"errors": ["Note not found"]}
def delete(tenant_id, user_id, project_id, note_id): async def delete(tenant_id, user_id, project_id, note_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(""" UPDATE public.sessions_notes cur.mogrify(""" UPDATE public.sessions_notes
SET deleted_at = timezone('utc'::text, now()) SET deleted_at = timezone('utc'::text, now())
WHERE note_id = %(note_id)s WHERE note_id = %(note_id)s
@ -139,8 +139,8 @@ def delete(tenant_id, user_id, project_id, note_id):
return {"data": {"state": "success"}} return {"data": {"state": "success"}}
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id): async def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) note = await get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None: if note is None:
return {"errors": ["Note not found"]} return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}") session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
@ -171,8 +171,8 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
) )
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id): async def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id) note = await get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None: if note is None:
return {"errors": ["Note not found"]} return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}") session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
@ -205,7 +205,7 @@ def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
"text": bottom, "text": bottom,
"size": "Small", "size": "Small",
"fontType": "Monospace"}) "fontType": "Monospace"})
return MSTeams.send_raw( return await MSTeams.send_raw(
tenant_id=tenant_id, tenant_id=tenant_id,
webhook_id=webhook_id, webhook_id=webhook_id,
body={"type": "message", body={"type": "message",

View file

@ -5,19 +5,19 @@ from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
def __group_metadata(session, project_metadata): async def __group_metadata(session, project_metadata):
meta = {} meta = {}
for m in project_metadata.keys(): for m in project_metadata.keys():
if project_metadata[m] is not None and session.get(m) is not None: if project_metadata[m] is not None and await session.get(m) is not None:
meta[project_metadata[m]] = session[m] meta[project_metadata[m]] = session[m]
session.pop(m) session.pop(m)
return meta return meta
# for backward compatibility # for backward compatibility
def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, async def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True): group_metadata=False, live=True):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
extra_query = [] extra_query = []
if include_fav_viewed: if include_fav_viewed:
extra_query.append("""COALESCE((SELECT TRUE extra_query.append("""COALESCE((SELECT TRUE
@ -41,57 +41,57 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id} {"project_id": project_id, "session_id": session_id, "userId": context.user_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchone() data = await cur.fetchone()
if data is not None: if data is not None:
data = helper.dict_to_camel_case(data) data = helper.dict_to_camel_case(data)
if full_data: if full_data:
if data["platform"] == 'ios': if data["platform"] == 'ios':
data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) data['events'] = await events_ios.get_by_sessionId(project_id=project_id, session_id=session_id)
for e in data['events']: for e in data['events']:
if e["type"].endswith("_IOS"): if e["type"].endswith("_IOS"):
e["type"] = e["type"][:-len("_IOS")] e["type"] = e["type"][:-len("_IOS")]
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['crashes'] = await events_ios.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, data['userEvents'] = await events_ios.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['mobsUrl'] = [] data['mobsUrl'] = []
else: else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, data['events'] = await events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) all_errors = await events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack # to keep only the first stack
# limit the number of errors to reduce the response-body size # limit the number of errors to reduce the response-body size
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500] if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, data['userEvents'] = await events.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, data['domURL'] = await sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) data['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, data['resources'] = await resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=data["startTs"], duration=data["duration"]) start_ts=data["startTs"], duration=data["duration"])
data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, data['notes'] = await sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id,
session_id=session_id, user_id=context.user_id) session_id=session_id, user_id=context.user_id)
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['metadata'] = await __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['issues'] = await issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, data['live'] = live and await assist.is_live(project_id=project_id, session_id=session_id,
project_key=data["projectKey"]) project_key=data["projectKey"])
data["inDB"] = True data["inDB"] = True
return data return data
elif live: elif live:
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) return await assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
else: else:
return None return None
def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, async def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True): group_metadata=False, live=True):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
extra_query = [] extra_query = []
if include_fav_viewed: if include_fav_viewed:
extra_query.append("""COALESCE((SELECT TRUE extra_query.append("""COALESCE((SELECT TRUE
@ -115,43 +115,43 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id} {"project_id": project_id, "session_id": session_id, "userId": context.user_id}
) )
cur.execute(query=query) await cur.execute(query=query)
data = cur.fetchone() data = await cur.fetchone()
if data is not None: if data is not None:
data = helper.dict_to_camel_case(data) data = helper.dict_to_camel_case(data)
if full_data: if full_data:
if data["platform"] == 'ios': if data["platform"] == 'ios':
data['mobsUrl'] = [] data['mobsUrl'] = []
data['videoURL'] = sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id, data['videoURL'] = await sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
else: else:
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) data['mobsUrl'] = await sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) data['canvasURL'] = await canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id): if await user_testing.has_test_signals(session_id=session_id, project_id=project_id):
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id, data['utxVideo'] = await user_testing.get_ux_webcam_signed_url(session_id=session_id,
project_id=project_id, project_id=project_id,
check_existence=False) check_existence=False)
else: else:
data['utxVideo'] = [] data['utxVideo'] = []
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, data['domURL'] = await sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['metadata'] = await __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, data['live'] = live and await assist.is_live(project_id=project_id, session_id=session_id,
project_key=data["projectKey"]) project_key=data["projectKey"])
data["inDB"] = True data["inDB"] = True
return data return data
elif live: elif live:
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) return await assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
else: else:
return None return None
def get_events(project_id, session_id): async def get_events(project_id, session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""SELECT session_id, platform, start_ts, duration f"""SELECT session_id, platform, start_ts, duration
FROM public.sessions AS s FROM public.sessions AS s
@ -159,37 +159,37 @@ def get_events(project_id, session_id):
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id} {"project_id": project_id, "session_id": session_id}
) )
cur.execute(query=query) await cur.execute(query=query)
s_data = cur.fetchone() s_data = await cur.fetchone()
if s_data is not None: if s_data is not None:
s_data = helper.dict_to_camel_case(s_data) s_data = helper.dict_to_camel_case(s_data)
data = {} data = {}
if s_data["platform"] == 'ios': if s_data["platform"] == 'ios':
data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) data['events'] = await events_ios.get_by_sessionId(project_id=project_id, session_id=session_id)
for e in data['events']: for e in data['events']:
if e["type"].endswith("_IOS"): if e["type"].endswith("_IOS"):
e["type"] = e["type"][:-len("_IOS")] e["type"] = e["type"][:-len("_IOS")]
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['crashes'] = await events_ios.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, data['userEvents'] = await events_ios.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['userTesting'] = [] data['userTesting'] = []
else: else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, data['events'] = await events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) all_errors = await events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack # to keep only the first stack
# limit the number of errors to reduce the response-body size # limit the number of errors to reduce the response-body size
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500] if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, data['userEvents'] = await events.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, data['resources'] = await resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=s_data["startTs"], duration=s_data["duration"]) start_ts=s_data["startTs"], duration=s_data["duration"])
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id) data['userTesting'] = await user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['issues'] = await issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['issues'] = reduce_issues(data['issues']) data['issues'] = reduce_issues(data['issues'])
return data return data
else: else:

View file

@ -1,9 +1,9 @@
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def view_session(project_id, user_id, session_id): async def view_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_sessions(user_id, session_id) cur.mogrify("""INSERT INTO public.user_viewed_sessions(user_id, session_id)
VALUES (%(userId)s,%(session_id)s) VALUES (%(userId)s,%(session_id)s)
ON CONFLICT DO NOTHING;""", ON CONFLICT DO NOTHING;""",

View file

@ -28,7 +28,7 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
30: 2.042} 30: 2.042}
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]: async def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
""" """
Add minimal timestamp Add minimal timestamp
:param filter_d: dict contains events&filters&... :param filter_d: dict contains events&filters&...
@ -96,7 +96,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k)) sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type: elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None: if meta_keys is None:
meta_keys = metadata.get(project_id=project_id) meta_keys = await metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys} meta_keys = {m["key"]: m["index"] for m in meta_keys}
# op = sessions.__get_sql_operator(f["operator"]) # op = sessions.__get_sql_operator(f["operator"])
if f.source in meta_keys.keys(): if f.source in meta_keys.keys():
@ -232,14 +232,14 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp, params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
"endTimestamp": filter_d.endTimestamp, "endTimestamp": filter_d.endTimestamp,
"issueTypes": tuple(filter_issues), **values} "issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(n_stages_query, params) query = cur.mogrify(n_stages_query, params)
logging.debug("---------------------------------------------------") logging.debug("---------------------------------------------------")
logging.debug(query) logging.debug(query)
logging.debug("---------------------------------------------------") logging.debug("---------------------------------------------------")
try: try:
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
except Exception as err: except Exception as err:
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
logging.warning(query.decode('UTF-8')) logging.warning(query.decode('UTF-8'))
@ -550,7 +550,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): async def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
output = [] output = []
stages = filter_d.events stages = filter_d.events
# TODO: handle 1 stage alone # TODO: handle 1 stage alone
@ -577,13 +577,13 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d), # counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True) # project_id=project_id, user_id=None, count_only=True)
# last change # last change
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), counts = await sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
project_id=project_id, user_id=None, count_only=True) project_id=project_id, user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"] output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"] output[0]["usersCount"] = counts["countUsers"]
return output, 0 return output, 0
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = await get_stages_and_events(filter_d=filter_d, project_id=project_id)
if len(rows) == 0: if len(rows) == 0:
return get_stages(stages, []), 0 return get_stages(stages, []), 0
# Obtain the first part of the output # Obtain the first part of the output
@ -596,11 +596,11 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
return stages_list, total_drop_due_to_issues return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None): async def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.events stages = filter_d.events
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = await get_stages_and_events(filter_d=filter_d, project_id=project_id)
if len(rows) == 0: if len(rows) == 0:
return output return output
# Obtain the second part of the output # Obtain the second part of the output

View file

@ -24,9 +24,9 @@ async def create_tenant(data: schemas.UserSignupSchema):
if email is None or len(email) < 5: if email is None or len(email) < 5:
errors.append("Invalid email address.") errors.append("Invalid email address.")
else: else:
if users.email_exists(email): if await users.email_exists(email):
errors.append("Email address already in use.") errors.append("Email address already in use.")
if users.get_deleted_user_by_email(email) is not None: if await users.get_deleted_user_by_email(email) is not None:
errors.append("Email address previously deleted.") errors.append("Email address previously deleted.")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
@ -72,11 +72,10 @@ async def create_tenant(data: schemas.UserSignupSchema):
VALUES (%(projectName)s, TRUE) VALUES (%(projectName)s, TRUE)
RETURNING project_id, (SELECT api_key FROM t) AS api_key;""" RETURNING project_id, (SELECT api_key FROM t) AS api_key;"""
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify(query, params)) await cur.execute(cur.mogrify(query, params))
await telemetry.new_client()
telemetry.new_client() r = await users.authenticate(email, password)
r = users.authenticate(email, password)
r["smtp"] = smtp.has_smtp() r["smtp"] = smtp.has_smtp()
return { return {

View file

@ -1,17 +1,17 @@
import requests
from decouple import config from decouple import config
from chalicelib.core import projects from chalicelib.core import projects
def start_replay(project_id, session_id, device, os_version, mob_url): async def start_replay(project_id, session_id, device, os_version, mob_url):
r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={ async with httpx.AsyncClient() as client:
"projectId": project_id, r = await client.post(config("IOS_MIDDLEWARE") + "/replay", json={
"projectKey": projects.get_project_key(project_id), "projectId": project_id,
"session_id": session_id, "projectKey": await projects.get_project_key(project_id),
"device": device, "session_id": session_id,
"osVersion": os_version, "device": device,
"mobUrl": mob_url "osVersion": os_version,
}) "mobUrl": mob_url
})
if r.status_code != 200: if r.status_code != 200:
print("failed replay middleware") print("failed replay middleware")
print("status code: %s" % r.status_code) print("status code: %s" % r.status_code)

View file

@ -1,25 +1,25 @@
from urllib.parse import urlparse from urllib.parse import urlparse
import requests import httpx
from decouple import config from decouple import config
from chalicelib.core import sourcemaps_parser from chalicelib.core import sourcemaps_parser
from chalicelib.utils.storage import StorageClient, generators from chalicelib.utils.storage import StorageClient, generators
def presign_share_urls(project_id, urls): async def presign_share_urls(project_id, urls):
results = [] results = []
for u in urls: for u in urls:
results.append(StorageClient.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120, results.append(await StorageClient.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120,
key=generators.generate_file_key_from_url(project_id, u), key=generators.generate_file_key_from_url(project_id, u),
check_exists=True)) check_exists=True))
return results return results
def presign_upload_urls(project_id, urls): async def presign_upload_urls(project_id, urls):
results = [] results = []
for u in urls: for u in urls:
results.append(StorageClient.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'), results.append(await StorageClient.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'),
expires_in=1800, expires_in=1800,
key=generators.generate_file_key_from_url(project_id, u))) key=generators.generate_file_key_from_url(project_id, u)))
return results return results
@ -67,9 +67,10 @@ def format_payload(p, truncate_to_first=False):
return [] return []
def url_exists(url): async def url_exists(url):
try: try:
r = requests.head(url, allow_redirects=False) async with httpx.AsyncClient() as client:
r = await client.head(url, follow_redirects=False)
return r.status_code == 200 and "text/html" not in r.headers.get("Content-Type", "") return r.status_code == 200 and "text/html" not in r.headers.get("Content-Type", "")
except Exception as e: except Exception as e:
print(f"!! Issue checking if URL exists: {url}") print(f"!! Issue checking if URL exists: {url}")
@ -77,7 +78,7 @@ def url_exists(url):
return False return False
def get_traces_group(project_id, payload): async def get_traces_group(project_id, payload):
frames = format_payload(payload) frames = format_payload(payload)
results = [{}] * len(frames) results = [{}] * len(frames)
@ -95,12 +96,12 @@ def get_traces_group(project_id, payload):
payloads[key] = None payloads[key] = None
if key not in payloads: if key not in payloads:
file_exists_in_bucket = len(file_url) > 0 and StorageClient.exists(config('sourcemaps_bucket'), key) file_exists_in_bucket = len(file_url) > 0 and await StorageClient.exists(config('sourcemaps_bucket'), key)
if len(file_url) > 0 and not file_exists_in_bucket: if len(file_url) > 0 and not file_exists_in_bucket:
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 looking in server") print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 looking in server")
if not file_url.endswith(".map"): if not file_url.endswith(".map"):
file_url += '.map' file_url += '.map'
file_exists_in_server = url_exists(file_url) file_exists_in_server = await StorageClienturl_exists(file_url)
file_exists_in_bucket = file_exists_in_server file_exists_in_bucket = file_exists_in_server
all_exists = all_exists and file_exists_in_bucket all_exists = all_exists and file_exists_in_bucket
if not file_exists_in_bucket and not file_exists_in_server: if not file_exists_in_bucket and not file_exists_in_server:
@ -118,7 +119,7 @@ def get_traces_group(project_id, payload):
for key in payloads.keys(): for key in payloads.keys():
if payloads[key] is None: if payloads[key] is None:
continue continue
key_results = sourcemaps_parser.get_original_trace( key_results = await sourcemaps_parser.get_original_trace(
key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key, key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key,
positions=[o["position"] for o in payloads[key]], positions=[o["position"] for o in payloads[key]],
is_url=payloads[key][0]["isURL"]) is_url=payloads[key][0]["isURL"])
@ -143,7 +144,7 @@ def get_js_cache_path(fullURL):
MAX_COLUMN_OFFSET = 60 MAX_COLUMN_OFFSET = 60
def fetch_missed_contexts(frames): async def fetch_missed_contexts(frames):
source_cache = {} source_cache = {}
for i in range(len(frames)): for i in range(len(frames)):
if frames[i] and frames[i].get("context") and len(frames[i]["context"]) > 0: if frames[i] and frames[i].get("context") and len(frames[i]["context"]) > 0:
@ -153,7 +154,7 @@ def fetch_missed_contexts(frames):
file = source_cache[file_abs_path] file = source_cache[file_abs_path]
else: else:
file_path = get_js_cache_path(file_abs_path) file_path = get_js_cache_path(file_abs_path)
file = StorageClient.get_file(config('js_cache_bucket'), file_path) file = await StorageClient.get_file(config('js_cache_bucket'), file_path)
if file is None: if file is None:
print(f"Missing abs_path: {file_abs_path}, file {file_path} not found in {config('js_cache_bucket')}") print(f"Missing abs_path: {file_abs_path}, file {file_path} not found in {config('js_cache_bucket')}")
source_cache[file_abs_path] = file source_cache[file_abs_path] = file

View file

@ -1,5 +1,4 @@
import requests import httpx
from decouple import config from decouple import config
SMR_URL = config("sourcemaps_reader") SMR_URL = config("sourcemaps_reader")
@ -11,7 +10,7 @@ if '%s' in SMR_URL:
SMR_URL = SMR_URL % "smr" SMR_URL = SMR_URL % "smr"
def get_original_trace(key, positions, is_url=False): async def get_original_trace(key, positions, is_url=False):
payload = { payload = {
"key": key, "key": key,
"positions": positions, "positions": positions,
@ -20,14 +19,12 @@ def get_original_trace(key, positions, is_url=False):
"isURL": is_url "isURL": is_url
} }
try: try:
r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5)) async with httpx.AsyncClient() as client:
r = await client.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
if r.status_code != 200: if r.status_code != 200:
print(f"Issue getting sourcemap status_code:{r.status_code}") print(f"Issue getting sourcemap status_code:{r.status_code}")
return None return None
return r.json() return r.json()
except requests.exceptions.Timeout:
print("Timeout getting sourcemap")
return None
except Exception as e: except Exception as e:
print("Issue getting sourcemap") print("Issue getting sourcemap")
print(e) print(e)

View file

@ -3,7 +3,7 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def create_tag(project_id: int, data: schemas.TagCreate) -> int: async def create_tag(project_id: int, data: schemas.TagCreate) -> int:
query = """ query = """
INSERT INTO public.tags (project_id, name, selector, ignore_click_rage, ignore_dead_click) INSERT INTO public.tags (project_id, name, selector, ignore_click_rage, ignore_dead_click)
VALUES (%(project_id)s, %(name)s, %(selector)s, %(ignore_click_rage)s, %(ignore_dead_click)s) VALUES (%(project_id)s, %(name)s, %(selector)s, %(ignore_click_rage)s, %(ignore_dead_click)s)
@ -18,15 +18,15 @@ def create_tag(project_id: int, data: schemas.TagCreate) -> int:
'ignore_dead_click': data.ignoreDeadClick 'ignore_dead_click': data.ignoreDeadClick
} }
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, data) query = cur.mogrify(query, data)
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return row['tag_id'] return row['tag_id']
def list_tags(project_id: int): async def list_tags(project_id: int):
query = """ query = """
SELECT tag_id, name, selector, ignore_click_rage, ignore_dead_click SELECT tag_id, name, selector, ignore_click_rage, ignore_dead_click
FROM public.tags FROM public.tags
@ -34,36 +34,36 @@ def list_tags(project_id: int):
AND deleted_at IS NULL AND deleted_at IS NULL
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'project_id': project_id}) query = cur.mogrify(query, {'project_id': project_id})
cur.execute(query) await cur.execute(query)
rows = cur.fetchall() rows = await cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)
def update_tag(project_id: int, tag_id: int, data: schemas.TagUpdate): async def update_tag(project_id: int, tag_id: int, data: schemas.TagUpdate):
query = """ query = """
UPDATE public.tags UPDATE public.tags
SET name = %(name)s SET name = %(name)s
WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'tag_id': tag_id, 'name': data.name, 'project_id': project_id}) query = cur.mogrify(query, {'tag_id': tag_id, 'name': data.name, 'project_id': project_id})
cur.execute(query) await cur.execute(query)
return True return True
def delete_tag(project_id: int, tag_id: int): async def delete_tag(project_id: int, tag_id: int):
query = """ query = """
UPDATE public.tags UPDATE public.tags
SET deleted_at = now() at time zone 'utc' SET deleted_at = now() at time zone 'utc'
WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s
""" """
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'tag_id': tag_id, 'project_id': project_id}) query = cur.mogrify(query, {'tag_id': tag_id, 'project_id': project_id})
cur.execute(query) await cur.execute(query)
return True return True

View file

@ -1,5 +1,5 @@
import httpx
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
import requests
from chalicelib.core import license from chalicelib.core import license
@ -19,9 +19,9 @@ def process_data(data):
} }
def compute(): async def compute():
with pg_client.PostgresClient(long_query=True) as cur: async with pg_client.cursor(long_query=True) as cur:
cur.execute( await cur.execute(
f"""UPDATE public.tenants f"""UPDATE public.tenants
SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) + SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) +
(SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') + (SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') +
@ -36,17 +36,19 @@ def compute():
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out, RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
(SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);"""
) )
data = cur.fetchone() data = await cur.fetchone()
if len(data) > 0: if len(data) > 0:
requests.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(data)]}) async with httpx.AsyncClient() as client:
await client.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(data)]})
def new_client(): async def new_client():
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
f"""SELECT *, openreplay_version() AS version_number, f"""SELECT *, openreplay_version() AS version_number,
(SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email (SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email
FROM public.tenants FROM public.tenants
LIMIT 1;""") LIMIT 1;""")
data = cur.fetchone() data = await cur.fetchone()
requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) async with httpx.AsyncClient() as client:
await client.post('https://api.openreplay.com/os/signup', json=process_data(data))

View file

@ -3,8 +3,8 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
def get_by_tenant_id(tenant_id): async def get_by_tenant_id(tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT tenants.tenant_id, query = cur.mogrify(f"""SELECT tenants.tenant_id,
tenants.name, tenants.name,
tenants.api_key, tenants.api_key,
@ -15,12 +15,12 @@ def get_by_tenant_id(tenant_id):
FROM public.tenants FROM public.tenants
LIMIT 1;""", LIMIT 1;""",
{"tenantId": tenant_id}) {"tenantId": tenant_id})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def get_by_api_key(api_key): async def get_by_api_key(api_key):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT 1 AS tenant_id, query = cur.mogrify(f"""SELECT 1 AS tenant_id,
tenants.name, tenants.name,
tenants.created_at tenants.created_at
@ -28,28 +28,28 @@ def get_by_api_key(api_key):
WHERE tenants.api_key = %(api_key)s WHERE tenants.api_key = %(api_key)s
LIMIT 1;""", LIMIT 1;""",
{"api_key": api_key}) {"api_key": api_key})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def generate_new_api_key(tenant_id): async def generate_new_api_key(tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.tenants query = cur.mogrify(f"""UPDATE public.tenants
SET api_key=generate_api_key(20) SET api_key=generate_api_key(20)
RETURNING api_key;""", RETURNING api_key;""",
{"tenant_id": tenant_id}) {"tenant_id": tenant_id})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def edit_tenant(tenant_id, changes): async def edit_tenant(tenant_id, changes):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.tenants query = cur.mogrify(f"""UPDATE public.tenants
SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])} SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])}
RETURNING name, opt_out;""", RETURNING name, opt_out;""",
{"tenant_id": tenant_id, **changes}) {"tenant_id": tenant_id, **changes})
cur.execute(query=query) await cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def tenants_exists_sync(use_pool=True): def tenants_exists_sync(use_pool=True):
@ -60,9 +60,7 @@ def tenants_exists_sync(use_pool=True):
async def tenants_exists(use_pool=True): async def tenants_exists(use_pool=True):
from app import app async with pg_client.cursor() as cur:
async with app.state.postgresql.connection() as cnx: await cur.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
async with cnx.transaction() as txn: row = await cur.fetchone()
row = await cnx.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)") return row["exists"]
row = await row.fetchone()
return row["exists"]

View file

@ -3,9 +3,9 @@ from chalicelib.utils.storage import StorageClient
from decouple import config from decouple import config
def get_test_signals(session_id, project_id): async def get_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT * SELECT *
FROM public.ut_tests_signals FROM public.ut_tests_signals
LEFT JOIN public.ut_tests_tasks USING (task_id) LEFT JOIN public.ut_tests_tasks USING (task_id)
@ -13,28 +13,28 @@ def get_test_signals(session_id, project_id):
ORDER BY timestamp;""", ORDER BY timestamp;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
rows = cur.fetchall() rows = await cur.fetchall()
return helper.dict_to_camel_case(rows) return helper.dict_to_camel_case(rows)
def has_test_signals(session_id, project_id): async def has_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT EXISTS(SELECT 1 FROM public.ut_tests_signals SELECT EXISTS(SELECT 1 FROM public.ut_tests_signals
WHERE session_id = %(session_id)s) AS has;""", WHERE session_id = %(session_id)s) AS has;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
) )
row = cur.fetchone() row = await cur.fetchone()
return row.get("has") return row.get("has")
def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool = True): async def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool = True):
results = [] results = []
bucket_name = "uxtesting-records" # config("sessions_bucket") bucket_name = "uxtesting-records" # config("sessions_bucket")
k = f'{session_id}/ux_webcam_record.webm' k = f'{session_id}/ux_webcam_record.webm'
if check_existence and not StorageClient.exists(bucket=bucket_name, key=k): if check_existence and not await StorageClient.exists(bucket=bucket_name, key=k):
return [] return []
results.append(StorageClient.get_presigned_url_for_sharing( results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=bucket_name, bucket=bucket_name,
expires_in=100000, expires_in=100000,
key=k key=k

View file

@ -17,8 +17,8 @@ def __generate_invitation_token():
return secrets.token_urlsafe(64) return secrets.token_urlsafe(64)
def create_new_member(email, invitation_token, admin, name, owner=False): async def create_new_member(email, invitation_token, admin, name, owner=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
WITH u AS (INSERT INTO public.users (email, role, name, data) WITH u AS (INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s) VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
@ -41,15 +41,15 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
{"email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name, {"email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"invitation_token": invitation_token}) "invitation_token": invitation_token})
cur.execute(query) await cur.execute(query)
row = helper.dict_to_camel_case(cur.fetchone()) row = helper.dict_to_camel_case(await cur.fetchone())
if row: if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row return row
def restore_member(user_id, email, invitation_token, admin, name, owner=False): async def restore_member(user_id, email, invitation_token, admin, name, owner=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
WITH ua AS (UPDATE public.basic_authentication WITH ua AS (UPDATE public.basic_authentication
SET invitation_token = %(invitation_token)s, SET invitation_token = %(invitation_token)s,
@ -78,16 +78,16 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
{"user_id": user_id, "email": email, {"user_id": user_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "role": "owner" if owner else "admin" if admin else "member",
"name": name, "invitation_token": invitation_token}) "name": name, "invitation_token": invitation_token})
cur.execute(query) await cur.execute(query)
result = cur.fetchone() result = await cur.fetchone()
cur.execute(query) await cur.execute(query)
result["created_at"] = TimeUTC.datetime_to_timestamp(result["created_at"]) result["created_at"] = TimeUTC.datetime_to_timestamp(result["created_at"])
return helper.dict_to_camel_case(result) return helper.dict_to_camel_case(result)
def generate_new_invitation(user_id): async def generate_new_invitation(user_id):
invitation_token = __generate_invitation_token() invitation_token = __generate_invitation_token()
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""\ query = cur.mogrify("""\
UPDATE public.basic_authentication UPDATE public.basic_authentication
SET invitation_token = %(invitation_token)s, SET invitation_token = %(invitation_token)s,
@ -97,23 +97,23 @@ def generate_new_invitation(user_id):
WHERE user_id=%(user_id)s WHERE user_id=%(user_id)s
RETURNING invitation_token;""", RETURNING invitation_token;""",
{"user_id": user_id, "invitation_token": invitation_token}) {"user_id": user_id, "invitation_token": invitation_token})
cur.execute( await cur.execute(
query query
) )
return __get_invitation_link(cur.fetchone().pop("invitation_token")) return __get_invitation_link(await cur.fetchone().pop("invitation_token"))
def reset_member(tenant_id, editor_id, user_id_to_update): async def reset_member(tenant_id, editor_id, user_id_to_update):
admin = get(tenant_id=tenant_id, user_id=editor_id) admin = await get(tenant_id=tenant_id, user_id=editor_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
user = get(tenant_id=tenant_id, user_id=user_id_to_update) user = await get(tenant_id=tenant_id, user_id=user_id_to_update)
if not user: if not user:
return {"errors": ["user not found"]} return {"errors": ["user not found"]}
return {"data": {"invitationLink": generate_new_invitation(user_id_to_update)}} return {"data": {"invitationLink": generate_new_invitation(user_id_to_update)}}
def update(tenant_id, user_id, changes, output=True): async def update(tenant_id, user_id, changes, output=True):
AUTH_KEYS = ["password", "invitationToken", "invitedAt", "changePwdExpireAt", "changePwdToken"] AUTH_KEYS = ["password", "invitationToken", "invitedAt", "changePwdExpireAt", "changePwdToken"]
if len(changes.keys()) == 0: if len(changes.keys()) == 0:
return None return None
@ -130,51 +130,51 @@ def update(tenant_id, user_id, changes, output=True):
else: else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s") sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
if len(sub_query_users) > 0: if len(sub_query_users) > 0:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
UPDATE public.users UPDATE public.users
SET {" ,".join(sub_query_users)} SET {" ,".join(sub_query_users)}
WHERE users.user_id = %(user_id)s;""", WHERE users.user_id = %(user_id)s;""",
{"user_id": user_id, **changes}) {"user_id": user_id, **changes})
cur.execute(query) await cur.execute(query)
if len(sub_query_bauth) > 0: if len(sub_query_bauth) > 0:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
UPDATE public.basic_authentication UPDATE public.basic_authentication
SET {" ,".join(sub_query_bauth)} SET {" ,".join(sub_query_bauth)}
WHERE basic_authentication.user_id = %(user_id)s;""", WHERE basic_authentication.user_id = %(user_id)s;""",
{"user_id": user_id, **changes}) {"user_id": user_id, **changes})
cur.execute(query) await cur.execute(query)
if not output: if not output:
return None return None
return get(user_id=user_id, tenant_id=tenant_id) return get(user_id=user_id, tenant_id=tenant_id)
def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks): async def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks):
admin = get(tenant_id=tenant_id, user_id=user_id) admin = await get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if data.user_id is not None: if data.user_id is not None:
return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]} return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]}
user = get_by_email_only(email=data.email) user = await get_by_email_only(email=data.email)
if user: if user:
return {"errors": ["user already exists"]} return {"errors": ["user already exists"]}
if data.name is None or len(data.name) == 0: if data.name is None or len(data.name) == 0:
data.name = data.email data.name = data.email
invitation_token = __generate_invitation_token() invitation_token = __generate_invitation_token()
user = get_deleted_user_by_email(email=data.email) user = await get_deleted_user_by_email(email=data.email)
if user is not None: if user is not None:
new_member = restore_member(email=data.email, invitation_token=invitation_token, new_member = await restore_member(email=data.email, invitation_token=invitation_token,
admin=data.admin, name=data.name, user_id=user["userId"]) admin=data.admin, name=data.name, user_id=user["userId"])
else: else:
new_member = create_new_member(email=data.email, invitation_token=invitation_token, new_member = await create_new_member(email=data.email, invitation_token=invitation_token,
admin=data.admin, name=data.name) admin=data.admin, name=data.name)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
background_tasks.add_task(email_helper.send_team_invitation, **{ background_tasks.add_task(email_helper.send_team_invitation, **{
"recipient": data.email, "recipient": data.email,
"invitation_link": new_member["invitationLink"], "invitation_link": new_member["invitationLink"],
"client_id": tenants.get_by_tenant_id(tenant_id)["name"], "client_id": (await tenants.get_by_tenant_id(tenant_id))["name"],
"sender_name": admin["name"] "sender_name": admin["name"]
}) })
return {"data": new_member} return {"data": new_member}
@ -184,23 +184,23 @@ def __get_invitation_link(invitation_token):
return config("SITE_URL") + config("invitation_link") % invitation_token return config("SITE_URL") + config("invitation_link") % invitation_token
def allow_password_change(user_id, delta_min=10): async def allow_password_change(user_id, delta_min=10):
pass_token = secrets.token_urlsafe(8) pass_token = secrets.token_urlsafe(8)
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.basic_authentication query = cur.mogrify(f"""UPDATE public.basic_authentication
SET change_pwd_expire_at = timezone('utc'::text, now()+INTERVAL '%(delta)s MINUTES'), SET change_pwd_expire_at = timezone('utc'::text, now()+INTERVAL '%(delta)s MINUTES'),
change_pwd_token = %(pass_token)s change_pwd_token = %(pass_token)s
WHERE user_id = %(user_id)s""", WHERE user_id = %(user_id)s""",
{"user_id": user_id, "delta": delta_min, "pass_token": pass_token}) {"user_id": user_id, "delta": delta_min, "pass_token": pass_token})
cur.execute( await cur.execute(
query query
) )
return pass_token return pass_token
def get(user_id, tenant_id): async def get(user_id, tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
@ -219,13 +219,13 @@ def get(user_id, tenant_id):
LIMIT 1;""", LIMIT 1;""",
{"userId": user_id}) {"userId": user_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def generate_new_api_key(user_id): async def generate_new_api_key(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""UPDATE public.users f"""UPDATE public.users
SET api_key=generate_api_key(20) SET api_key=generate_api_key(20)
@ -234,13 +234,13 @@ def generate_new_api_key(user_id):
RETURNING api_key;""", RETURNING api_key;""",
{"userId": user_id}) {"userId": user_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def __get_account_info(tenant_id, user_id): async def __get_account_info(tenant_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT users.name, f"""SELECT users.name,
tenants.name AS tenant_name, tenants.name AS tenant_name,
@ -250,18 +250,18 @@ def __get_account_info(tenant_id, user_id):
AND users.deleted_at IS NULL;""", AND users.deleted_at IS NULL;""",
{"tenantId": tenant_id, "userId": user_id}) {"tenantId": tenant_id, "userId": user_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema): async def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
if changes.opt_out is not None or changes.tenantName is not None and len(changes.tenantName) > 0: if changes.opt_out is not None or changes.tenantName is not None and len(changes.tenantName) > 0:
user = get(user_id=user_id, tenant_id=tenant_id) user = await get(user_id=user_id, tenant_id=tenant_id)
if not user["superAdmin"] and not user["admin"]: if not user["superAdmin"] and not user["admin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if changes.name is not None and len(changes.name) > 0: if changes.name is not None and len(changes.name) > 0:
update(tenant_id=tenant_id, user_id=user_id, changes={"name": changes.name}) await update(tenant_id=tenant_id, user_id=user_id, changes={"name": changes.name})
_tenant_changes = {} _tenant_changes = {}
if changes.tenantName is not None and len(changes.tenantName) > 0: if changes.tenantName is not None and len(changes.tenantName) > 0:
@ -272,14 +272,14 @@ def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
if len(_tenant_changes.keys()) > 0: if len(_tenant_changes.keys()) > 0:
tenants.edit_tenant(tenant_id=tenant_id, changes=_tenant_changes) tenants.edit_tenant(tenant_id=tenant_id, changes=_tenant_changes)
return {"data": __get_account_info(tenant_id=tenant_id, user_id=user_id)} return {"data": await __get_account_info(tenant_id=tenant_id, user_id=user_id)}
def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id): async def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id):
user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) user = await get_member(user_id=user_id_to_update, tenant_id=tenant_id)
_changes = {} _changes = {}
if editor_id != user_id_to_update: if editor_id != user_id_to_update:
admin = get_user_role(tenant_id=tenant_id, user_id=editor_id) admin = await get_user_role(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]: if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if admin["admin"] and user["superAdmin"]: if admin["admin"] and user["superAdmin"]:
@ -297,14 +297,14 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema,
_changes["role"] = "admin" if changes.admin else "member" _changes["role"] = "admin" if changes.admin else "member"
if len(_changes.keys()) > 0: if len(_changes.keys()) > 0:
update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes, output=False) await update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes, output=False)
return {"data": get_member(user_id=user_id_to_update, tenant_id=tenant_id)} return {"data": await get_member(user_id=user_id_to_update, tenant_id=tenant_id)}
return {"data": user} return {"data": user}
def get_by_email_only(email): async def get_by_email_only(email):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
@ -322,13 +322,13 @@ def get_by_email_only(email):
LIMIT 1;""", LIMIT 1;""",
{"email": email}) {"email": email})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def get_member(tenant_id, user_id): async def get_member(tenant_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
@ -348,7 +348,7 @@ def get_member(tenant_id, user_id):
ORDER BY name, user_id""", ORDER BY name, user_id""",
{"user_id": user_id}) {"user_id": user_id})
) )
u = helper.dict_to_camel_case(cur.fetchone()) u = helper.dict_to_camel_case(await cur.fetchone())
if u: if u:
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
if u["invitationToken"]: if u["invitationToken"]:
@ -359,9 +359,9 @@ def get_member(tenant_id, user_id):
return u return u
def get_members(tenant_id): async def get_members(tenant_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
users.email, users.email,
@ -379,7 +379,7 @@ def get_members(tenant_id):
WHERE users.deleted_at IS NULL WHERE users.deleted_at IS NULL
ORDER BY name, user_id""" ORDER BY name, user_id"""
) )
r = cur.fetchall() r = await cur.fetchall()
if len(r): if len(r):
r = helper.list_to_camel_case(r) r = helper.list_to_camel_case(r)
for u in r: for u in r:
@ -393,71 +393,71 @@ def get_members(tenant_id):
return [] return []
def delete_member(user_id, tenant_id, id_to_delete): async def delete_member(user_id, tenant_id, id_to_delete):
if user_id == id_to_delete: if user_id == id_to_delete:
return {"errors": ["unauthorized, cannot delete self"]} return {"errors": ["unauthorized, cannot delete self"]}
admin = get(user_id=user_id, tenant_id=tenant_id) admin = await get(user_id=user_id, tenant_id=tenant_id)
if admin["member"]: if admin["member"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
to_delete = get(user_id=id_to_delete, tenant_id=tenant_id) to_delete = await get(user_id=id_to_delete, tenant_id=tenant_id)
if to_delete is None: if to_delete is None:
return {"errors": ["not found"]} return {"errors": ["not found"]}
if to_delete["superAdmin"]: if to_delete["superAdmin"]:
return {"errors": ["cannot delete super admin"]} return {"errors": ["cannot delete super admin"]}
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""UPDATE public.users cur.mogrify(f"""UPDATE public.users
SET deleted_at = timezone('utc'::text, now()), SET deleted_at = timezone('utc'::text, now()),
jwt_iat= NULL, jwt_refresh_jti= NULL, jwt_iat= NULL, jwt_refresh_jti= NULL,
jwt_refresh_iat= NULL jwt_refresh_iat= NULL
WHERE user_id=%(user_id)s;""", WHERE user_id=%(user_id)s;""",
{"user_id": id_to_delete})) {"user_id": id_to_delete}))
cur.execute( await cur.execute(
cur.mogrify(f"""UPDATE public.basic_authentication cur.mogrify(f"""UPDATE public.basic_authentication
SET password= NULL, invitation_token= NULL, SET password= NULL, invitation_token= NULL,
invited_at= NULL, changed_at= NULL, invited_at= NULL, changed_at= NULL,
change_pwd_expire_at= NULL, change_pwd_token= NULL change_pwd_expire_at= NULL, change_pwd_token= NULL
WHERE user_id=%(user_id)s;""", WHERE user_id=%(user_id)s;""",
{"user_id": id_to_delete})) {"user_id": id_to_delete}))
return {"data": get_members(tenant_id=tenant_id)} return {"data": await get_members(tenant_id=tenant_id)}
def change_password(tenant_id, user_id, email, old_password, new_password): async def change_password(tenant_id, user_id, email, old_password, new_password):
item = get(tenant_id=tenant_id, user_id=user_id) item = await get(tenant_id=tenant_id, user_id=user_id)
if item is None: if item is None:
return {"errors": ["access denied"]} return {"errors": ["access denied"]}
if old_password == new_password: if old_password == new_password:
return {"errors": ["old and new password are the same"]} return {"errors": ["old and new password are the same"]}
auth = authenticate(email, old_password, for_change_password=True) auth = await authenticate(email, old_password, for_change_password=True)
if auth is None: if auth is None:
return {"errors": ["wrong password"]} return {"errors": ["wrong password"]}
changes = {"password": new_password} changes = {"password": new_password}
user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) user = await update(tenant_id=tenant_id, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password) r = await authenticate(user['email'], new_password)
return { return {
'jwt': r.pop('jwt') 'jwt': r.pop('jwt')
} }
def set_password_invitation(user_id, new_password): async def set_password_invitation(user_id, new_password):
changes = {"password": new_password} changes = {"password": new_password}
user = update(tenant_id=-1, user_id=user_id, changes=changes) user = await update(tenant_id=-1, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password) r = await authenticate(user['email'], new_password)
tenant_id = r.pop("tenantId") tenant_id = r.pop("tenantId")
r["limits"] = { r["limits"] = {
"teamMember": -1, "teamMember": -1,
"projects": -1, "projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)} "metadata": await metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id) c = await tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt") c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recorded=True) c["projects"] = await projects.get_projects(tenant_id=tenant_id, recorded=True)
c["smtp"] = smtp.has_smtp() c["smtp"] = smtp.has_smtp()
c["iceServers"] = assist.get_ice_servers() c["iceServers"] = assist.get_ice_servers()
return { return {
@ -469,9 +469,9 @@ def set_password_invitation(user_id, new_password):
} }
def email_exists(email): async def email_exists(email):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
count(user_id) count(user_id)
@ -482,13 +482,13 @@ def email_exists(email):
LIMIT 1;""", LIMIT 1;""",
{"email": email}) {"email": email})
) )
r = cur.fetchone() r = await cur.fetchone()
return r["count"] > 0 return r["count"] > 0
def get_deleted_user_by_email(email): async def get_deleted_user_by_email(email):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
* *
@ -499,13 +499,13 @@ def get_deleted_user_by_email(email):
LIMIT 1;""", LIMIT 1;""",
{"email": email}) {"email": email})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def get_by_invitation_token(token, pass_token=None): async def get_by_invitation_token(token, pass_token=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
*, *,
@ -518,13 +518,13 @@ def get_by_invitation_token(token, pass_token=None):
LIMIT 1;""", LIMIT 1;""",
{"token": token, "pass_token": pass_token}) {"token": token, "pass_token": pass_token})
) )
r = cur.fetchone() r = await cur.fetchone()
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def auth_exists(user_id, jwt_iat): async def auth_exists(user_id, jwt_iat):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""SELECT user_id, EXTRACT(epoch FROM jwt_iat)::BIGINT AS jwt_iat cur.mogrify(f"""SELECT user_id, EXTRACT(epoch FROM jwt_iat)::BIGINT AS jwt_iat
FROM public.users FROM public.users
WHERE user_id = %(userId)s WHERE user_id = %(userId)s
@ -532,15 +532,15 @@ def auth_exists(user_id, jwt_iat):
LIMIT 1;""", LIMIT 1;""",
{"userId": user_id}) {"userId": user_id})
) )
r = cur.fetchone() r = await cur.fetchone()
return r is not None \ return r is not None \
and r.get("jwt_iat") is not None \ and r.get("jwt_iat") is not None \
and abs(jwt_iat - r["jwt_iat"]) <= 1 and abs(jwt_iat - r["jwt_iat"]) <= 1
def refresh_auth_exists(user_id, jwt_jti=None): async def refresh_auth_exists(user_id, jwt_jti=None):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify(f"""SELECT user_id cur.mogrify(f"""SELECT user_id
FROM public.users FROM public.users
WHERE user_id = %(userId)s WHERE user_id = %(userId)s
@ -549,12 +549,12 @@ def refresh_auth_exists(user_id, jwt_jti=None):
LIMIT 1;""", LIMIT 1;""",
{"userId": user_id, "jwt_jti": jwt_jti}) {"userId": user_id, "jwt_jti": jwt_jti})
) )
r = cur.fetchone() r = await cur.fetchone()
return r is not None return r is not None
def change_jwt_iat_jti(user_id): async def change_jwt_iat_jti(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.users query = cur.mogrify(f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'), SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'),
jwt_refresh_jti = 0, jwt_refresh_jti = 0,
@ -564,13 +564,13 @@ def change_jwt_iat_jti(user_id):
jwt_refresh_jti, jwt_refresh_jti,
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""", EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
{"user_id": user_id}) {"user_id": user_id})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat") return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
def refresh_jwt_iat_jti(user_id): async def refresh_jwt_iat_jti(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.users query = cur.mogrify(f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'), SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'),
jwt_refresh_jti = jwt_refresh_jti + 1 jwt_refresh_jti = jwt_refresh_jti + 1
@ -579,13 +579,13 @@ def refresh_jwt_iat_jti(user_id):
jwt_refresh_jti, jwt_refresh_jti,
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""", EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
{"user_id": user_id}) {"user_id": user_id})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat") return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
def authenticate(email, password, for_change_password=False) -> dict | bool | None: async def authenticate(email, password, for_change_password=False) -> dict | bool | None:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
@ -602,14 +602,14 @@ def authenticate(email, password, for_change_password=False) -> dict | bool | No
LIMIT 1;""", LIMIT 1;""",
{"email": email, "password": password}) {"email": email, "password": password})
cur.execute(query) await cur.execute(query)
r = cur.fetchone() r = await cur.fetchone()
if r is not None: if r is not None:
if for_change_password: if for_change_password:
return True return True
r = helper.dict_to_camel_case(r) r = helper.dict_to_camel_case(r)
jwt_iat, jwt_r_jti, jwt_r_iat = change_jwt_iat_jti(user_id=r['userId']) jwt_iat, jwt_r_jti, jwt_r_iat = await change_jwt_iat_jti(user_id=r['userId'])
return { return {
"jwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], iat=jwt_iat, "jwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], iat=jwt_iat,
aud=f"front:{helper.get_stage_name()}"), aud=f"front:{helper.get_stage_name()}"),
@ -623,18 +623,18 @@ def authenticate(email, password, for_change_password=False) -> dict | bool | No
return None return None
def logout(user_id: int): async def logout(user_id: int):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify( query = cur.mogrify(
"""UPDATE public.users """UPDATE public.users
SET jwt_iat = NULL, jwt_refresh_jti = NULL, jwt_refresh_iat = NULL SET jwt_iat = NULL, jwt_refresh_jti = NULL, jwt_refresh_iat = NULL
WHERE user_id = %(user_id)s;""", WHERE user_id = %(user_id)s;""",
{"user_id": user_id}) {"user_id": user_id})
cur.execute(query) await cur.execute(query)
def refresh(user_id: int, tenant_id: int = -1) -> dict: async def refresh(user_id: int, tenant_id: int = -1) -> dict:
jwt_iat, jwt_r_jti, jwt_r_iat = refresh_jwt_iat_jti(user_id=user_id) jwt_iat, jwt_r_jti, jwt_r_iat = await refresh_jwt_iat_jti(user_id=user_id)
return { return {
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat, "jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
aud=f"front:{helper.get_stage_name()}"), aud=f"front:{helper.get_stage_name()}"),
@ -645,9 +645,9 @@ def refresh(user_id: int, tenant_id: int = -1) -> dict:
} }
def get_user_role(tenant_id, user_id): async def get_user_role(tenant_id, user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
users.user_id, users.user_id,
@ -664,13 +664,13 @@ def get_user_role(tenant_id, user_id):
LIMIT 1""", LIMIT 1""",
{"user_id": user_id}) {"user_id": user_id})
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def get_user_settings(user_id): async def get_user_settings(user_id):
# read user settings from users.settings:jsonb column # read user settings from users.settings:jsonb column
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
settings settings
@ -680,16 +680,16 @@ def get_user_settings(user_id):
LIMIT 1""", LIMIT 1""",
{"user_id": user_id}) {"user_id": user_id})
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())
def update_user_module(user_id, data: schemas.ModuleStatus): async def update_user_module(user_id, data: schemas.ModuleStatus):
# example data = {"settings": {"modules": ['ASSIST', 'METADATA']} # example data = {"settings": {"modules": ['ASSIST', 'METADATA']}
# update user settings from users.settings:jsonb column only update settings.modules # update user settings from users.settings:jsonb column only update settings.modules
# if module property is not exists, it will be created # if module property is not exists, it will be created
# if module property exists, it will be updated, modify here and call update_user_settings # if module property exists, it will be updated, modify here and call update_user_settings
# module is a single element to be added or removed # module is a single element to be added or removed
settings = get_user_settings(user_id)["settings"] settings = (await get_user_settings(user_id))["settings"]
if settings is None: if settings is None:
settings = {} settings = {}
@ -702,13 +702,13 @@ def update_user_module(user_id, data: schemas.ModuleStatus):
elif not data.status and data.module in settings["modules"]: elif not data.status and data.module in settings["modules"]:
settings["modules"].remove(data.module) settings["modules"].remove(data.module)
return update_user_settings(user_id, settings) return await update_user_settings(user_id, settings)
def update_user_settings(user_id, settings): async def update_user_settings(user_id, settings):
# update user settings from users.settings:jsonb column # update user settings from users.settings:jsonb column
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify( cur.mogrify(
f"""UPDATE public.users f"""UPDATE public.users
SET settings = %(settings)s SET settings = %(settings)s
@ -717,4 +717,4 @@ def update_user_settings(user_id, settings):
RETURNING settings;""", RETURNING settings;""",
{"user_id": user_id, "settings": json.dumps(settings)}) {"user_id": user_id, "settings": json.dumps(settings)})
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(await cur.fetchone())

View file

@ -1,7 +1,7 @@
import logging import logging
from typing import Optional from typing import Optional
import requests import httpx
from fastapi import HTTPException, status from fastapi import HTTPException, status
import schemas import schemas
@ -9,66 +9,66 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def get_by_id(webhook_id): async def get_by_id(webhook_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
SELECT w.* SELECT w.*
FROM public.webhooks AS w FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id}) {"webhook_id": webhook_id})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if w: if w:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return w return w
def get_webhook(tenant_id, webhook_id, webhook_type='webhook'): async def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""SELECT w.* cur.mogrify("""SELECT w.*
FROM public.webhooks AS w FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s WHERE w.webhook_id =%(webhook_id)s
AND deleted_at ISNULL AND type=%(webhook_type)s;""", AND deleted_at ISNULL AND type=%(webhook_type)s;""",
{"webhook_id": webhook_id, "webhook_type": webhook_type}) {"webhook_id": webhook_id, "webhook_type": webhook_type})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if w: if w:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return w return w
def get_by_type(tenant_id, webhook_type): async def get_by_type(tenant_id, webhook_type):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""", WHERE w.type =%(type)s AND deleted_at ISNULL;""",
{"type": webhook_type}) {"type": webhook_type})
) )
webhooks = helper.list_to_camel_case(cur.fetchall()) webhooks = helper.list_to_camel_case(await cur.fetchall())
for w in webhooks: for w in webhooks:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return webhooks return webhooks
def get_by_tenant(tenant_id, replace_none=False): async def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute("""SELECT w.* await cur.execute("""SELECT w.*
FROM public.webhooks AS w FROM public.webhooks AS w
WHERE deleted_at ISNULL;""") WHERE deleted_at ISNULL;""")
all = helper.list_to_camel_case(cur.fetchall()) all = helper.list_to_camel_case(await cur.fetchall())
for w in all: for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return all return all
def update(tenant_id, webhook_id, changes, replace_none=False): async def update(tenant_id, webhook_id, changes, replace_none=False):
allow_update = ["name", "index", "authHeader", "endpoint"] allow_update = ["name", "index", "authHeader", "endpoint"]
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update] sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update]
cur.execute( await cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
UPDATE public.webhooks UPDATE public.webhooks
SET {','.join(sub_query)} SET {','.join(sub_query)}
@ -76,7 +76,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
RETURNING *;""", RETURNING *;""",
{"id": webhook_id, **changes}) {"id": webhook_id, **changes})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
if w is None: if w is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.")
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
@ -87,18 +87,18 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
return w return w
def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", replace_none=False): async def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", replace_none=False):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify("""\ query = cur.mogrify("""\
INSERT INTO public.webhooks(endpoint,auth_header,type,name) INSERT INTO public.webhooks(endpoint,auth_header,type,name)
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s) VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
RETURNING *;""", RETURNING *;""",
{"endpoint": endpoint, "auth_header": auth_header, {"endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name}) "type": webhook_type, "name": name})
cur.execute( await cur.execute(
query query
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(await cur.fetchone())
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
if replace_none: if replace_none:
for k in w.keys(): for k in w.keys():
@ -107,9 +107,9 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w return w
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook, async def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
tenant_id: Optional[int] = None) -> bool: tenant_id: Optional[int] = None) -> bool:
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.webhooks FROM public.webhooks
WHERE name ILIKE %(name)s WHERE name ILIKE %(name)s
@ -117,12 +117,12 @@ def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = sch
AND type=%(webhook_type)s AND type=%(webhook_type)s
{"AND webhook_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""", {"AND webhook_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id, "webhook_type": webhook_type}) {"name": name, "exclude_id": exclude_id, "webhook_type": webhook_type})
cur.execute(query) await cur.execute(query)
row = cur.fetchone() row = await cur.fetchone()
return row["exists"] return row["exists"]
def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None): async def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
if len(data.name) > 0 \ if len(data.name) > 0 \
and exists_by_name(name=data.name, exclude_id=data.webhook_id): and exists_by_name(name=data.name, exclude_id=data.webhook_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
@ -140,9 +140,9 @@ def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
replace_none=replace_none) replace_none=replace_none)
def delete(tenant_id, webhook_id): async def delete(tenant_id, webhook_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute( await cur.execute(
cur.mogrify("""\ cur.mogrify("""\
UPDATE public.webhooks UPDATE public.webhooks
SET deleted_at = (now() at time zone 'utc') SET deleted_at = (now() at time zone 'utc')
@ -153,7 +153,7 @@ def delete(tenant_id, webhook_id):
return {"data": {"state": "success"}} return {"data": {"state": "success"}}
def trigger_batch(data_list): async def trigger_batch(data_list):
webhooks_map = {} webhooks_map = {}
for w in data_list: for w in data_list:
if w["destination"] not in webhooks_map: if w["destination"] not in webhooks_map:
@ -164,13 +164,14 @@ def trigger_batch(data_list):
__trigger(hook=webhooks_map[w["destination"]], data=w["data"]) __trigger(hook=webhooks_map[w["destination"]], data=w["data"])
def __trigger(hook, data): async def __trigger(hook, data):
if hook is not None and hook["type"] == 'webhook': if hook is not None and hook["type"] == 'webhook':
headers = {} headers = {}
if hook["authHeader"] is not None and len(hook["authHeader"]) > 0: if hook["authHeader"] is not None and len(hook["authHeader"]) > 0:
headers = {"Authorization": hook["authHeader"]} headers = {"Authorization": hook["authHeader"]}
r = requests.post(url=hook["endpoint"], json=data, headers=headers) async with httpx.AsyncClient() as client:
r = await client.post(url=hook["endpoint"], json=data, headers=headers)
if r.status_code != 200: if r.status_code != 200:
logging.error("=======> webhook: something went wrong for:") logging.error("=======> webhook: something went wrong for:")
logging.error(hook) logging.error(hook)

View file

@ -5,41 +5,41 @@ from chalicelib.utils.helper import get_issue_title
LOWEST_BAR_VALUE = 3 LOWEST_BAR_VALUE = 3
def get_config(user_id): async def get_config(user_id):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT users.weekly_report SELECT users.weekly_report
FROM public.users FROM public.users
WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s
LIMIT 1;""", {"user_id": user_id})) LIMIT 1;""", {"user_id": user_id}))
result = cur.fetchone() result = await cur.fetchone()
return helper.dict_to_camel_case(result) return helper.dict_to_camel_case(result)
def edit_config(user_id, weekly_report): async def edit_config(user_id, weekly_report):
with pg_client.PostgresClient() as cur: async with pg_client.cursor() as cur:
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
UPDATE public.users UPDATE public.users
SET weekly_report= %(weekly_report)s SET weekly_report= %(weekly_report)s
WHERE users.deleted_at ISNULL WHERE users.deleted_at ISNULL
AND users.user_id=%(user_id)s AND users.user_id=%(user_id)s
RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report})) RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report}))
result = cur.fetchone() result = await cur.fetchone()
return helper.dict_to_camel_case(result) return helper.dict_to_camel_case(result)
def cron(): async def cron():
if not smtp.has_smtp(): if not smtp.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report") print("!!! No SMTP configuration found, ignoring weekly report")
return return
_now = TimeUTC.now() _now = TimeUTC.now()
with pg_client.PostgresClient(unlimited_query=True) as cur: async with pg_client.cursor(unlimited_query=True) as cur:
params = {"tomorrow": TimeUTC.midnight(delta_days=1), params = {"tomorrow": TimeUTC.midnight(delta_days=1),
"3_days_ago": TimeUTC.midnight(delta_days=-3), "3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7), "1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14), "2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)} "5_week_ago": TimeUTC.midnight(delta_days=-35)}
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT project_id, SELECT project_id,
name AS project_name, name AS project_name,
users.emails AS emails, users.emails AS emails,
@ -86,7 +86,7 @@ def cron():
AND issues.timestamp <= %(1_week_ago)s AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(5_week_ago)s AND issues.timestamp >= %(5_week_ago)s
) AS month_1_issues ON (TRUE);"""), params) ) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall() projects_data = await cur.fetchall()
_now2 = TimeUTC.now() _now2 = TimeUTC.now()
print(f">> Weekly report query: {_now2 - _now} ms") print(f">> Weekly report query: {_now2 - _now} ms")
_now = _now2 _now = _now2
@ -103,7 +103,7 @@ def cron():
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1) helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
p["past_month_issues_evolution"] = helper.__decimal_limit( p["past_month_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1) helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1)
cur.execute(cur.mogrify(""" await cur.execute(cur.mogrify("""
SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short, SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
( (
@ -119,7 +119,7 @@ def cron():
'1 day'::INTERVAL '1 day'::INTERVAL
) AS timestamp_i ) AS timestamp_i
ORDER BY timestamp_i;""", params)) ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall() days_partition = await cur.fetchall()
_now2 = TimeUTC.now() _now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}") print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2 _now = _now2
@ -130,7 +130,7 @@ def cron():
else: else:
d["value"] = d["issues_count"] * 100 / max_days_partition d["value"] = d["issues_count"] * 100 / max_days_partition
d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT type, COUNT(*) AS count SELECT type, COUNT(*) AS count
FROM events_common.issues INNER JOIN public.issues USING (issue_id) FROM events_common.issues INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
@ -138,7 +138,7 @@ def cron():
GROUP BY type GROUP BY type
ORDER BY count DESC, type ORDER BY count DESC, type
LIMIT 4;""", params)) LIMIT 4;""", params))
issues_by_type = cur.fetchall() issues_by_type = await cur.fetchall()
_now2 = TimeUTC.now() _now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}") print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2 _now = _now2
@ -149,7 +149,7 @@ def cron():
i["value"] = LOWEST_BAR_VALUE i["value"] = LOWEST_BAR_VALUE
else: else:
i["value"] = i["count"] * 100 / max_issues_by_type i["value"] = i["count"] * 100 / max_issues_by_type
cur.execute(cur.mogrify("""\ await cur.execute(cur.mogrify("""\
SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short, SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long, TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
COALESCE((SELECT JSONB_AGG(sub) COALESCE((SELECT JSONB_AGG(sub)
@ -170,7 +170,7 @@ def cron():
) AS timestamp_i ) AS timestamp_i
GROUP BY timestamp_i GROUP BY timestamp_i
ORDER BY timestamp_i;""", params)) ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall() issues_breakdown_by_day = await cur.fetchall()
_now2 = TimeUTC.now() _now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}") print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2 _now = _now2
@ -186,7 +186,7 @@ def cron():
else: else:
j["value"] = j["count"] * 100 / max_days_partition j["value"] = j["count"] * 100 / max_days_partition
j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify(""" await cur.execute(cur.mogrify("""
SELECT type, SELECT type,
COUNT(*) AS issue_count, COUNT(*) AS issue_count,
COUNT(DISTINCT session_id) AS sessions_count, COUNT(DISTINCT session_id) AS sessions_count,
@ -219,7 +219,7 @@ def cron():
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
GROUP BY type GROUP BY type
ORDER BY issue_count DESC;""", params)) ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall() issues_breakdown_list = await cur.fetchall()
_now2 = TimeUTC.now() _now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}") print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2 _now = _now2

View file

@ -1,8 +1,7 @@
import logging import logging
import requests import httpx
from decouple import config from decouple import config
from chalicelib.utils import helper from chalicelib.utils import helper
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -12,12 +11,13 @@ def __get_captcha_config():
return config("captcha_server"), config("captcha_key") return config("captcha_server"), config("captcha_key")
def is_valid(response): async def is_valid(response):
if not helper.allow_captcha(): if not helper.allow_captcha():
logger.info("!! Captcha is disabled") logger.info("!! Captcha is disabled")
return True return True
url, secret = __get_captcha_config() url, secret = __get_captcha_config()
r = requests.post(url=url, data={"secret": secret, "response": response}) async with httpx.AsyncClient() as client:
r = await client.post(url=url, data={"secret": secret, "response": response})
if r.status_code != 200: if r.status_code != 200:
logger.warning("something went wrong") logger.warning("something went wrong")
logger.error(r) logger.error(r)

View file

@ -48,7 +48,7 @@ def __replace_images(HTML):
return HTML, mime_img return HTML, mime_img
def send_html(BODY_HTML, SUBJECT, recipient, bcc=None): async def send_html(BODY_HTML, SUBJECT, recipient, bcc=None):
BODY_HTML, mime_img = __replace_images(BODY_HTML) BODY_HTML, mime_img = __replace_images(BODY_HTML)
if not isinstance(recipient, list): if not isinstance(recipient, list):
recipient = [recipient] recipient = [recipient]

View file

@ -27,9 +27,9 @@ def send_assign_session(recipient, message, link):
send_html(BODY_HTML, SUBJECT, recipient) send_html(BODY_HTML, SUBJECT, recipient)
def alert_email(recipients, subject, data): async def alert_email(recipients, subject, data):
BODY_HTML = __get_html_from_file("chalicelib/utils/html/alert_notification.html", formatting_variables=data) BODY_HTML = __get_html_from_file("chalicelib/utils/html/alert_notification.html", formatting_variables=data)
send_html(BODY_HTML=BODY_HTML, SUBJECT=subject, recipient=recipients) await send_html(BODY_HTML=BODY_HTML, SUBJECT=subject, recipient=recipients)
def __get_color(idx): def __get_color(idx):

View file

@ -1,7 +1,7 @@
import logging import logging
from datetime import datetime from datetime import datetime
import requests import httpx
from fastapi import HTTPException, status from fastapi import HTTPException, status
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -116,12 +116,13 @@ class githubV3Request:
def __get_request_header(self): def __get_request_header(self):
return {"Accept": "application/vnd.github.v3+json", 'Authorization': f'token {self.__token}'} return {"Accept": "application/vnd.github.v3+json", 'Authorization': f'token {self.__token}'}
def get(self, url, params={}): async def get(self, url, params={}):
results = [] results = []
params = {"per_page": 100, **params} params = {"per_page": 100, **params}
pages = {"next": f"{self.__base}{url}", "last": ""} pages = {"next": f"{self.__base}{url}", "last": ""}
while len(pages.keys()) > 0 and pages["next"] != pages["last"]: while len(pages.keys()) > 0 and pages["next"] != pages["last"]:
response = requests.get(pages["next"], headers=self.__get_request_header(), params=params) async with httpx.AsyncClient() as client:
response = await client.get(pages["next"], headers=self.__get_request_header(), params=params)
pages = get_response_links(response) pages = get_response_links(response)
result = response.json() result = response.json()
if response.status_code != 200: if response.status_code != 200:
@ -133,6 +134,7 @@ class githubV3Request:
results += result results += result
return results return results
def post(self, url, body): async def post(self, url, body):
response = requests.post(f"{self.__base}{url}", headers=self.__get_request_header(), json=body) async with httpx.AsyncClient() as client:
response = await client.post(f"{self.__base}{url}", headers=self.__get_request_header(), json=body)
return response.json() return response.json()

View file

@ -1,12 +1,11 @@
import logging import logging
import time import time
import httpx
from datetime import datetime from datetime import datetime
import requests
from fastapi import HTTPException, status from fastapi import HTTPException, status
from jira import JIRA from jira import JIRA
from jira.exceptions import JIRAError from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels" fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
@ -91,11 +90,12 @@ class JiraManager:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_issue_info(issue) return self.__parser_issue_info(issue)
def get_issue_v3(self, issue_id: str): async def get_issue_v3(self, issue_id: str):
try: try:
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}?fields={fields}" url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}?fields={fields}"
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD']) auth = (self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
issue = requests.get( async with httpx.AsyncClient() as client:
issue = await client.get(
url, url,
headers={ headers={
"Accept": "application/json" "Accept": "application/json"
@ -159,11 +159,12 @@ class JiraManager:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_comment_info(comment) return self.__parser_comment_info(comment)
def add_comment_v3(self, issue_id: str, comment: str): async def add_comment_v3(self, issue_id: str, comment: str):
try: try:
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}/comment" url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}/comment"
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD']) auth = (self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
comment_response = requests.post( async with httpx.AsyncClient() as client:
comment_response = await client.post(
url, url,
headers={ headers={
"Accept": "application/json" "Accept": "application/json"

View file

@ -6,6 +6,8 @@ import psycopg2
import psycopg2.extras import psycopg2.extras
from decouple import config from decouple import config
from psycopg2 import pool from psycopg2 import pool
import contextlib
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -165,13 +167,13 @@ class PostgresClient:
return self.__enter__() return self.__enter__()
async def init(): def init():
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}") logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
if config('PG_POOL', cast=bool, default=True): if config('PG_POOL', cast=bool, default=True):
make_pool() make_pool()
async def terminate(): def terminate():
global postgreSQL_pool global postgreSQL_pool
if postgreSQL_pool is not None: if postgreSQL_pool is not None:
try: try:
@ -179,3 +181,12 @@ async def terminate():
logging.info("Closed all connexions to PostgreSQL") logging.info("Closed all connexions to PostgreSQL")
except (Exception, psycopg2.DatabaseError) as error: except (Exception, psycopg2.DatabaseError) as error:
logging.error("Error while closing all connexions to PostgreSQL", error) logging.error("Error while closing all connexions to PostgreSQL", error)
@contextlib.asynccontextmanager
async def cursor():
from app import app
import psycopg
async with app.state.postgresql.connection() as cnx:
async with psycopg.AsyncClientCursor(cnx) as cur:
yield cur

View file

@ -25,7 +25,7 @@ class AmazonS3Storage(ObjectStorage):
region_name=config("sessions_region"), region_name=config("sessions_region"),
verify=not config("S3_DISABLE_SSL_VERIFY", default=False, cast=bool)) verify=not config("S3_DISABLE_SSL_VERIFY", default=False, cast=bool))
def exists(self, bucket, key): async def exists(self, bucket, key):
try: try:
self.resource.Object(bucket, key).load() self.resource.Object(bucket, key).load()
except botocore.exceptions.ClientError as e: except botocore.exceptions.ClientError as e:
@ -36,8 +36,8 @@ class AmazonS3Storage(ObjectStorage):
raise raise
return True return True
def get_presigned_url_for_sharing(self, bucket, expires_in, key, check_exists=False): async def get_presigned_url_for_sharing(self, bucket, expires_in, key, check_exists=False):
if check_exists and not self.exists(bucket, key): if check_exists and not await self.exists(bucket, key):
return None return None
return self.client.generate_presigned_url( return self.client.generate_presigned_url(
@ -79,7 +79,7 @@ class AmazonS3Storage(ObjectStorage):
f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields']) f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields'])
return req.url return req.url
def get_file(self, source_bucket, source_key): async def get_file(self, source_bucket, source_key):
try: try:
result = self.client.get_object( result = self.client.get_object(
Bucket=source_bucket, Bucket=source_bucket,
@ -92,7 +92,7 @@ class AmazonS3Storage(ObjectStorage):
raise ex raise ex
return result["Body"].read().decode() return result["Body"].read().decode()
def tag_for_deletion(self, bucket, key): async def tag_for_deletion(self, bucket, key):
if not self.exists(bucket, key): if not self.exists(bucket, key):
return False return False
# Copy the file to change the creation date, so it can be deleted X days after the tag's creation # Copy the file to change the creation date, so it can be deleted X days after the tag's creation
@ -103,10 +103,10 @@ class AmazonS3Storage(ObjectStorage):
TaggingDirective='COPY' TaggingDirective='COPY'
) )
self.tag_file(bucket=bucket, file_key=key, tag_key='to_delete_in_days', await self.tag_file(bucket=bucket, file_key=key, tag_key='to_delete_in_days',
tag_value=config("SCH_DELETE_DAYS", default='7')) tag_value=config("SCH_DELETE_DAYS", default='7'))
def tag_file(self, file_key, bucket, tag_key, tag_value): async def tag_file(self, file_key, bucket, tag_key, tag_value):
return self.client.put_object_tagging( return self.client.put_object_tagging(
Bucket=bucket, Bucket=bucket,
Key=file_key, Key=file_key,

View file

@ -1,3 +1,4 @@
import asyncio
from apscheduler.triggers.cron import CronTrigger from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger from apscheduler.triggers.interval import IntervalTrigger
@ -5,24 +6,24 @@ from chalicelib.core import telemetry
from chalicelib.core import weekly_report, jobs, health from chalicelib.core import weekly_report, jobs, health
async def run_scheduled_jobs() -> None: def run_scheduled_jobs() -> None:
jobs.execute_jobs() asyncio.run(jobs.execute_jobs())
async def weekly_report_cron() -> None: def weekly_report_cron() -> None:
weekly_report.cron() asyncio.run(weekly_report.cron())
async def telemetry_cron() -> None: def telemetry_cron() -> None:
telemetry.compute() asyncio.run(telemetry.compute())
async def health_cron() -> None: def health_cron() -> None:
health.cron() asyncio.run(health.cron())
async def weekly_health_cron() -> None: def weekly_health_cron() -> None:
health.weekly_cron() asyncio.run(health.weekly_cron())
cron_jobs = [ cron_jobs = [

View file

@ -28,3 +28,47 @@ cd openreplay-dev/openreplay/scripts/helmcharts
# bash local_deploy.sh help # bash local_deploy.sh help
bash local_deploy.sh api bash local_deploy.sh api
``` ```
### autogenerated api frontend
API can autogenerate a frontend that documents, and allows to play
with, in a limited way, its interface. Make sure you have the
following variables inside the current `.env`:
```
docs_url=/docs
root_path=''
```
If the `.env` that is in-use is based on `env.default` then it is
already the case. Start, or restart the http server, then go to
`https://127.0.0.1:8000/docs`. That is autogenerated documentation
based on pydantic schema, fastapi routes, and docstrings :wink:.
Happy experiments, and then documentation!
### psycopg3 API
I mis-remember the psycopg v2 vs. v3 API.
For the record, the expected psycopg3's async api looks like the
following pseudo code:
```python
async with app.state.postgresql.connection() as cnx:
async with cnx.transaction():
row = await cnx.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
row = await row.fetchone()
return row["exists"]
```
Mind the following:
- Where `app.state.postgresql` is the postgresql connection pooler.
- Wrap explicit transaction with `async with cnx.transaction():
foobar()`
- Most of the time the transaction object is not used;
- Do execute await operation against `cnx`;
- `await cnx.execute` returns a cursor object;
- Do the `await cursor.fetchqux...` calls against the object return by
a call to execute.

View file

@ -7,7 +7,7 @@ psycopg2-binary==2.9.9
psycopg[pool,binary]==3.1.15 psycopg[pool,binary]==3.1.15
elasticsearch==8.11.1 elasticsearch==8.11.1
jira==3.5.2 jira==3.5.2
httpx==0.26.0
fastapi==0.105.0 fastapi==0.105.0

File diff suppressed because it is too large Load diff

View file

@ -47,14 +47,14 @@ if not tenants.tenants_exists_sync(use_pool=False):
@public_app.post('/login', tags=["authentication"]) @public_app.post('/login', tags=["authentication"])
def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)): async def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): if helper.allow_captcha() and not await captcha.is_valid(data.g_recaptcha_response):
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha." detail="Invalid captcha."
) )
r = users.authenticate(data.email, data.password.get_secret_value()) r = await users.authenticate(data.email, data.password.get_secret_value())
if r is None: if r is None:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
@ -82,15 +82,15 @@ def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)
@app.get('/logout', tags=["login"]) @app.get('/logout', tags=["login"])
def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)): async def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
users.logout(user_id=context.user_id) await users.logout(user_id=context.user_id)
response.delete_cookie(key="refreshToken", path="/api/refresh") response.delete_cookie(key="refreshToken", path="/api/refresh")
return {"data": "success"} return {"data": "success"}
@app.get('/refresh', tags=["login"]) @app.get('/refresh', tags=["login"])
def refresh_login(context: schemas.CurrentContext = Depends(OR_context)): async def refresh_login(context: schemas.CurrentContext = Depends(OR_context)):
r = users.refresh(user_id=context.user_id) r = await users.refresh(user_id=context.user_id)
content = {"jwt": r.get("jwt")} content = {"jwt": r.get("jwt")}
response = JSONResponse(content=content) response = JSONResponse(content=content)
response.set_cookie(key="refreshToken", value=r.get("refreshToken"), path="/api/refresh", response.set_cookie(key="refreshToken", value=r.get("refreshToken"), path="/api/refresh",
@ -99,9 +99,9 @@ def refresh_login(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/account', tags=['accounts']) @app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)): async def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) r = await users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id) t = await tenants.get_by_tenant_id(context.tenant_id)
if t is not None: if t is not None:
t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"]) t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"])
t["tenantName"] = t.pop("name") t["tenantName"] = t.pop("name")
@ -116,16 +116,16 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/account', tags=["account"]) @app.post('/account', tags=["account"])
def edit_account(data: schemas.EditAccountSchema = Body(...), async def edit_account(data: schemas.EditAccountSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_account(tenant_id=context.tenant_id, user_id=context.user_id, changes=data) return await users.edit_account(tenant_id=context.tenant_id, user_id=context.user_id, changes=data)
@app.post('/integrations/slack', tags=['integrations']) @app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations'])
def add_slack_integration(data: schemas.AddCollaborationSchema, async def add_slack_integration(data: schemas.AddCollaborationSchema,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data) n = await Slack.add(tenant_id=context.tenant_id, data=data)
if n is None: if n is None:
return { return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
@ -134,14 +134,14 @@ def add_slack_integration(data: schemas.AddCollaborationSchema,
@app.post('/integrations/slack/{integrationId}', tags=['integrations']) @app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0: if len(data.url) > 0:
old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) old = await Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId)
if not old: if not old:
return {"errors": ["Slack integration not found."]} return {"errors": ["Slack integration not found."]}
if old["endpoint"] != data.url: if old["endpoint"] != data.url:
if not Slack.say_hello(data.url): if not await Slack.say_hello(data.url):
return { return {
"errors": [ "errors": [
"We couldn't send you a test message on your Slack channel. Please verify your webhook url."] "We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
@ -151,17 +151,17 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
@app.post('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")]) @app.post('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data, return await users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data,
background_tasks=background_tasks) background_tasks=background_tasks)
@public_app.get('/users/invitation', tags=['users']) @public_app.get('/users/invitation', tags=['users'])
def process_invitation_link(token: str): async def process_invitation_link(token: str):
if token is None or len(token) < 64: if token is None or len(token) < 64:
return {"errors": ["please provide a valid invitation"]} return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(token) user = await users.get_by_invitation_token(token)
if user is None: if user is None:
return {"errors": ["invitation not found"]} return {"errors": ["invitation not found"]}
if user["expiredInvitation"]: if user["expiredInvitation"]:
@ -170,32 +170,32 @@ def process_invitation_link(token: str):
and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60:
pass_token = user["changePwdToken"] pass_token = user["changePwdToken"]
else: else:
pass_token = users.allow_password_change(user_id=user["userId"]) pass_token = await users.allow_password_change(user_id=user["userId"])
return RedirectResponse(url=config("SITE_URL") + config("change_password_link") % (token, pass_token)) return RedirectResponse(url=config("SITE_URL") + config("change_password_link") % (token, pass_token))
@public_app.post('/password/reset', tags=["users"]) @public_app.post('/password/reset', tags=["users"])
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
return {"errors": ["please provide a valid invitation & pass"]} return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase) user = await users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase)
if user is None: if user is None:
return {"errors": ["invitation not found"]} return {"errors": ["invitation not found"]}
if user["expiredChange"]: if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]} return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"]) return await users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"])
@app.put('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")]) @app.put('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")])
def edit_member(memberId: int, data: schemas.EditMemberSchema, async def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, return await users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId) user_id_to_update=memberId)
@app.get('/metadata/session_search', tags=["metadata"]) @app.get('/metadata/session_search', tags=["metadata"])
def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if key is None or value is None or len(value) == 0 and len(key) == 0: if key is None or value is None or len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]} return {"errors": ["please provide a key&value for search"]}
@ -204,24 +204,24 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
if len(key) == 0: if len(key) == 0:
return {"errors": ["please provide a key for search"]} return {"errors": ["please provide a key for search"]}
return { return {
"data": sessions.search_by_metadata(tenant_id=context.tenant_id, user_id=context.user_id, m_value=value, "data": await sessions.search_by_metadata(tenant_id=context.tenant_id, user_id=context.user_id, m_value=value,
m_key=key, project_id=projectId)} m_key=key, project_id=projectId)}
@app.get('/projects', tags=['projects']) @app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)): async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, gdpr=True, recorded=True)} return {"data": await projects.get_projects(tenant_id=context.tenant_id, gdpr=True, recorded=True)}
# for backward compatibility # for backward compatibility
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric(): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else: else:
sessionId = int(sessionId) sessionId = int(sessionId)
data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, data = await sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -234,29 +234,29 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
@app.post('/{projectId}/sessions/search', tags=["sessions"]) @app.post('/{projectId}/sessions/search', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), async def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, data = await sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id,
platform=context.project.platform) platform=context.project.platform)
return {'data': data} return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) @app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), async def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True, data = await sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True,
platform=context.project.platform) platform=context.project.platform)
return {'data': data} return {'data': data}
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"])
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric(): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else: else:
sessionId = int(sessionId) sessionId = int(sessionId)
data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, data = await sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -269,13 +269,13 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta
@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"])
def get_session_events(projectId: int, sessionId: Union[int, str], async def get_session_events(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric(): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else: else:
sessionId = int(sessionId) sessionId = int(sessionId)
data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) data = await sessions_replay.get_events(project_id=projectId, session_id=sessionId)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -285,9 +285,9 @@ def get_session_events(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"])
def get_error_trace(projectId: int, sessionId: int, errorId: str, async def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId) data = await errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data: if "errors" in data:
return data return data
return { return {
@ -296,9 +296,9 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}', tags=['errors']) @app.get('/{projectId}/errors/{errorId}', tags=['errors'])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, data = await errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30}) **{"density24": density24, "density30": density30})
if data.get("data") is not None: if data.get("data") is not None:
background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id, background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id,
@ -307,9 +307,9 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'])
def errors_get_details_sourcemaps(projectId: int, errorId: str, async def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId) data = await errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data: if "errors" in data:
return data return data
return { return {
@ -318,29 +318,29 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite": if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) return await errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
elif action == "sessions": elif action == "sessions":
start_date = startDate start_date = startDate
end_date = endDate end_date = endDate
return { return {
"data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId, "data": await errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId,
start_date=start_date, end_date=end_date)} start_date=start_date, end_date=end_date)}
elif action in list(errors.ACTION_STATE.keys()): elif action in list(errors.ACTION_STATE.keys()):
return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action) return await errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action)
else: else:
return {"errors": ["undefined action"]} return {"errors": ["undefined action"]}
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) data = await assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None: if data is None:
data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, data = await sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId,
full_data=True, include_fav_viewed=True, group_metadata=True, live=False) full_data=True, include_fav_viewed=True, group_metadata=True, live=False)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -351,20 +351,20 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"]) @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]} not_found = {"errors": ["Replay file not found"]}
if not sessionId.isnumeric(): if not sessionId.isnumeric():
return not_found return not_found
else: else:
sessionId = int(sessionId) sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not await sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not await assist.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in Assist.") print(f"{projectId}/{sessionId} not found in Assist.")
return not_found return not_found
path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId) path = await assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId)
if path is None: if path is None:
return not_found return not_found
@ -372,20 +372,20 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"]) @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]} not_found = {"errors": ["Devtools file not found"]}
if not sessionId.isnumeric(): if not sessionId.isnumeric():
return not_found return not_found
else: else:
sessionId = int(sessionId) sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not await sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not await assist.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in Assist.") print(f"{projectId}/{sessionId} not found in Assist.")
return not_found return not_found
path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId) path = await assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId)
if path is None: if path is None:
return {"errors": ["Devtools file not found"]} return {"errors": ["Devtools file not found"]}
@ -393,20 +393,20 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} return {"data": await heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
def add_remove_favorite_session2(projectId: int, sessionId: int, async def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId) return await sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): async def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, data = await sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context.tenant_id, tenant_id=context.tenant_id,
user_id=context.user_id) user_id=context.user_id)
if "errors" in data: if "errors" in data:
@ -417,9 +417,9 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"])
def assign_session(projectId: int, sessionId: int, issueId: str, async def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, data = await sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
tenant_id=context.tenant_id, user_id=context.user_id) tenant_id=context.tenant_id, user_id=context.user_id)
if "errors" in data: if "errors" in data:
return data return data
@ -429,10 +429,10 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
def comment_assignment(projectId: int, sessionId: int, issueId: str, async def comment_assignment(projectId: int, sessionId: int, issueId: str,
data: schemas.CommentAssignmentSchema = Body(...), data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, data = await sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, assignment_id=issueId, session_id=sessionId, assignment_id=issueId,
user_id=context.user_id, message=data.message) user_id=context.user_id, message=data.message)
if "errors" in data.keys(): if "errors" in data.keys():
@ -443,11 +443,11 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not await sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]} return {"errors": ["Session not found"]}
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, data = await sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id, data=data) session_id=sessionId, user_id=context.user_id, data=data)
if "errors" in data.keys(): if "errors" in data.keys():
return data return data
@ -457,8 +457,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) @app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, data = await sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id) session_id=sessionId, user_id=context.user_id)
if "errors" in data: if "errors" in data:
return data return data
@ -468,9 +468,9 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data = await sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data) note_id=noteId, data=data)
if "errors" in data.keys(): if "errors" in data.keys():
return data return data
@ -480,30 +480,30 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) @app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): async def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data = await sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId) note_id=noteId)
return data return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int, async def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, return await sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId) note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, return await sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId) note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"]) @app.post('/{projectId}/notes', tags=["sessions", "notes"])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, data = await sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data) user_id=context.user_id, data=data)
if "errors" in data: if "errors" in data:
return data return data
@ -511,43 +511,43 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{projectId}/click_maps/search', tags=["click maps"]) @app.post('/{projectId}/click_maps/search', tags=["click maps"])
def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...), async def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} return {"data": await click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}
@app.post('/{project_id}/feature-flags/search', tags=["feature flags"]) @app.post('/{project_id}/feature-flags/search', tags=["feature flags"])
def search_feature_flags(project_id: int, async def search_feature_flags(project_id: int,
data: schemas.SearchFlagsSchema = Body(...), data: schemas.SearchFlagsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return feature_flags.search_feature_flags(project_id=project_id, user_id=context.user_id, data=data) return await feature_flags.search_feature_flags(project_id=project_id, user_id=context.user_id, data=data)
@app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) @app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"])
def get_feature_flag(project_id: int, feature_flag_id: int): async def get_feature_flag(project_id: int, feature_flag_id: int):
return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) return await feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)
@app.post('/{project_id}/feature-flags', tags=["feature flags"]) @app.post('/{project_id}/feature-flags', tags=["feature flags"])
def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), async def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data) return await feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data)
@app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) @app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"])
def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), async def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id, return await feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id,
user_id=context.user_id, feature_flag=data) user_id=context.user_id, feature_flag=data)
@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"])
def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)):
return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} return {"data": await feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)}
@app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"]) @app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"])
def update_feature_flag_status(project_id: int, feature_flag_id: int, async def update_feature_flag_status(project_id: int, feature_flag_id: int,
data: schemas.FeatureFlagStatus = Body(...)): data: schemas.FeatureFlagStatus = Body(...)):
return {"data": feature_flags.update_feature_flag_status(project_id=project_id, feature_flag_id=feature_flag_id, return {"data": await feature_flags.update_feature_flag_status(project_id=project_id, feature_flag_id=feature_flag_id,
is_active=data.is_active)} is_active=data.is_active)}

View file

@ -8,10 +8,10 @@ public_app, app, app_apikey = get_routers()
@app.get('/healthz', tags=["health-check"]) @app.get('/healthz', tags=["health-check"])
def get_global_health_status(): async def get_global_health_status():
if config("LOCAL_DEV", cast=bool, default=False): if config("LOCAL_DEV", cast=bool, default=False):
return {"data": ""} return {"data": ""}
return {"data": health.get_health()} return {"data": await health.get_health()}
if not tenants.tenants_exists_sync(use_pool=False): if not tenants.tenants_exists_sync(use_pool=False):
@ -20,4 +20,4 @@ if not tenants.tenants_exists_sync(use_pool=False):
if await tenants.tenants_exists(): if await tenants.tenants_exists():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Not Found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Not Found")
return {"data": health.get_health()} return {"data": await health.get_health()}

View file

@ -11,18 +11,18 @@ public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/dashboards', tags=["dashboard"]) @app.post('/{projectId}/dashboards', tags=["dashboard"])
def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), async def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/dashboards', tags=["dashboard"]) @app.get('/{projectId}/dashboards', tags=["dashboard"])
def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)} return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)}
@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
if data is None: if data is None:
return {"errors": ["dashboard not found"]} return {"errors": ["dashboard not found"]}
@ -30,25 +30,25 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), async def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id, return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)} dashboard_id=dashboardId, data=data)}
@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def delete_dashboard(projectId: int, dashboardId: int, _=Body(None), async def delete_dashboard(projectId: int, dashboardId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"]) @app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"])
def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): async def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)} return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
def add_card_to_dashboard(projectId: int, dashboardId: int, async def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
@ -57,7 +57,7 @@ def add_card_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
# @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, async def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CardSchema = Body(...), data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
@ -65,7 +65,7 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, async def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...), data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
@ -73,50 +73,50 @@ def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) @app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None), async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId) widget_id=widgetId)
@app.post('/{projectId}/cards/try', tags=["cards"]) @app.post('/{projectId}/cards/try', tags=["cards"])
def try_card(projectId: int, data: schemas.CardSchema = Body(...), async def try_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_chart(project_id=projectId, data=data, user_id=context.user_id)} return {"data": custom_metrics.get_chart(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"]) @app.post('/{projectId}/cards/try/sessions', tags=["cards"])
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), async def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, data=data) data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data} return {"data": data}
@app.post('/{projectId}/cards/try/issues', tags=["cards"]) @app.post('/{projectId}/cards/try/issues', tags=["cards"])
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...), async def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)} return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards', tags=["cards"]) @app.get('/{projectId}/cards', tags=["cards"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"]) @app.post('/{projectId}/cards', tags=["cards"])
def create_card(projectId: int, data: schemas.CardSchema = Body(...), async def create_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data) return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data)
@app.post('/{projectId}/cards/search', tags=["cards"]) @app.post('/{projectId}/cards/search', tags=["cards"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), async def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) @app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): async def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if metric_id.isnumeric(): if metric_id.isnumeric():
metric_id = int(metric_id) metric_id = int(metric_id)
else: else:
@ -128,7 +128,7 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
def get_card_sessions(projectId: int, metric_id: int, async def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions_by_card_id(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.get_sessions_by_card_id(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
@ -139,7 +139,7 @@ def get_card_sessions(projectId: int, metric_id: int,
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], async def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if metric_id.isnumeric(): if metric_id.isnumeric():
@ -155,7 +155,7 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, async def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id, data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
@ -166,7 +166,7 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
def get_card_errors_list(projectId: int, metric_id: int, async def get_card_errors_list(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
@ -177,7 +177,7 @@ def get_card_errors_list(projectId: int, metric_id: int,
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...), async def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data) data=data)
@ -185,7 +185,7 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), async def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None: if data is None:
@ -194,7 +194,7 @@ def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
def update_card_state(projectId: int, metric_id: int, async def update_card_state(projectId: int, metric_id: int,
data: schemas.UpdateCardStatusSchema = Body(...), data: schemas.UpdateCardStatusSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
@ -203,6 +203,6 @@ def update_card_state(projectId: int, metric_id: int,
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
def delete_card(projectId: int, metric_id: int, _=Body(None), async def delete_card(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

View file

@ -9,10 +9,10 @@ public_app, app, app_apikey = get_routers()
@app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"]) @app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"])
def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None, async def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": sessions.get_user_sessions( "data": await sessions.get_user_sessions(
project_id=context.project.project_id, project_id=context.project.project_id,
user_id=userId, user_id=userId,
start_date=start_date, start_date=start_date,
@ -22,9 +22,9 @@ def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_
@app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"]) @app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"])
def get_session_events(projectKey: str, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_session_events(projectKey: str, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": events.get_by_session_id( "data": await events.get_by_session_id(
project_id=context.project.project_id, project_id=context.project.project_id,
session_id=sessionId session_id=sessionId
) )
@ -32,9 +32,9 @@ def get_session_events(projectKey: str, sessionId: int, context: schemas.Current
@app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"]) @app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"])
def get_user_details(projectKey: str, userId: str, context: schemas.CurrentContext = Depends(OR_context)): async def get_user_details(projectKey: str, userId: str, context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": sessions.get_session_user( "data": await sessions.get_session_user(
project_id=context.project.project_id, project_id=context.project.project_id,
user_id=userId user_id=userId
) )
@ -42,25 +42,25 @@ def get_user_details(projectKey: str, userId: str, context: schemas.CurrentConte
@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"]) @app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"])
def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None), async def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
record = jobs.create(project_id=context.project.project_id, user_id=userId) record = await jobs.create(project_id=context.project.project_id, user_id=userId)
return {"data": record} return {"data": record}
@app_apikey.get('/v1/{projectKey}/jobs', tags=["api"]) @app_apikey.get('/v1/{projectKey}/jobs', tags=["api"])
def get_jobs(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): async def get_jobs(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": jobs.get_all(project_id=context.project.project_id)} return {"data": await jobs.get_all(project_id=context.project.project_id)}
@app_apikey.get('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) @app_apikey.get('/v1/{projectKey}/jobs/{jobId}', tags=["api"])
def get_job(projectKey: str, jobId: int, context: schemas.CurrentContext = Depends(OR_context)): async def get_job(projectKey: str, jobId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": jobs.get(job_id=jobId, project_id=context.project.project_id)} return {"data": await jobs.get(job_id=jobId, project_id=context.project.project_id)}
@app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) @app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"])
def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): async def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
job = jobs.get(job_id=jobId, project_id=context.project.project_id) job = await jobs.get(job_id=jobId, project_id=context.project.project_id)
job_not_found = len(job.keys()) == 0 job_not_found = len(job.keys()) == 0
if job_not_found: if job_not_found:
@ -69,12 +69,12 @@ def cancel_job(projectKey: str, jobId: int, _=Body(None), context: schemas.Curre
return {"errors": ["The request job has already been canceled/completed."]} return {"errors": ["The request job has already been canceled/completed."]}
job["status"] = "cancelled" job["status"] = "cancelled"
return {"data": jobs.update(job_id=jobId, job=job)} return {"data": await jobs.update(job_id=jobId, job=job)}
@app_apikey.get('/v1/projects', tags=["api"]) @app_apikey.get('/v1/projects', tags=["api"])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)): async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
records = projects.get_projects(tenant_id=context.tenant_id) records = await projects.get_projects(tenant_id=context.tenant_id)
for record in records: for record in records:
del record['projectId'] del record['projectId']
@ -82,16 +82,16 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
@app_apikey.get('/v1/projects/{projectKey}', tags=["api"]) @app_apikey.get('/v1/projects/{projectKey}', tags=["api"])
def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): async def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": projects.get_by_project_key(project_key=projectKey) "data": await projects.get_by_project_key(project_key=projectKey)
} }
@app_apikey.post('/v1/projects', tags=["api"]) @app_apikey.post('/v1/projects', tags=["api"])
def create_project(data: schemas.CreateProjectSchema = Body(...), async def create_project(data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
record = projects.create( record = await projects.create(
tenant_id=context.tenant_id, tenant_id=context.tenant_id,
user_id=None, user_id=None,
data=data, data=data,

View file

@ -201,22 +201,22 @@ def get_errors_by_session_id(session_id, project_id):
return helper.list_to_camel_case(errors) return helper.list_to_camel_case(errors)
def search(text, event_type, project_id, source, key): async def search(text, event_type, project_id, source, key):
if not event_type: if not event_type:
return {"data": autocomplete.__get_autocomplete_table(text, project_id)} return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys(): if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) rows = await SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for IOS events autocomplete # for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys(): # if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source) # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) rows = await SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id) return await sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \ elif event_type.endswith("_IOS") \
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys(): and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id) return await sessions_metas.search(text, event_type, project_id)
else: else:
return {"errors": ["unsupported event"]} return {"errors": ["unsupported event"]}

View file

@ -71,11 +71,11 @@ SUPPORTED_TYPES = {
} }
def search(text: str, meta_type: schemas.FilterType, project_id: int): async def search(text: str, meta_type: schemas.FilterType, project_id: int):
rows = [] rows = []
if meta_type not in list(SUPPORTED_TYPES.keys()): if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]} return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) rows += await SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
# for IOS events autocomplete # for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)

View file

@ -74,7 +74,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
context=context, check_existence=False) context=context, check_existence=False)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=data["startTs"], duration=data["duration"]) start_ts=data["startTs"], duration=data["duration"])
@ -135,7 +135,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
check_existence=False) check_existence=False)
else: else:
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
context=context, check_existence=False) context=context, check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id): if user_testing.has_test_signals(session_id=session_id, project_id=project_id):

View file

@ -4,13 +4,13 @@ from chalicelib.utils.storage import StorageClient
def tag_session(file_key, tag_key='retention', tag_value='vault'): def tag_session(file_key, tag_key='retention', tag_value='vault'):
bucket = config("sessions_bucket") bucket = config("sessions_bucket")
if not StorageClient.exists(bucket=bucket, key=file_key): if not await StorageClient.exists(bucket=bucket, key=file_key):
return None return None
return StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value) return await StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)
def tag_record(file_key, tag_key='retention', tag_value='vault'): def tag_record(file_key, tag_key='retention', tag_value='vault'):
bucket = config('ASSIST_RECORDS_BUCKET') bucket = config('ASSIST_RECORDS_BUCKET')
if not StorageClient.exists(bucket=bucket, key=file_key): if not await StorageClient.exists(bucket=bucket, key=file_key):
return None return None
return StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value) return await StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)