Make async postgresql calls, and http calls.

This commit is contained in:
Amirouche 2024-01-29 14:21:55 +01:00
parent a26b603945
commit 0e2ae898c5
72 changed files with 1525 additions and 1522 deletions

View file

@ -13,21 +13,21 @@ from chalicelib.utils import pg_client, helper, email_helper, smtp
from chalicelib.utils.TimeUTC import TimeUTC
def get(id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
SELECT *
FROM public.alerts
WHERE alert_id =%(id)s;""",
{"id": id})
)
a = helper.dict_to_camel_case(cur.fetchone())
a = helper.dict_to_camel_case(await cur.fetchone())
return helper.custom_alert_to_front(__process_circular(a))
def get_all(project_id):
with pg_client.PostgresClient() as cur:
async def get_all(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""\
SELECT alerts.*,
COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count',
@ -39,8 +39,8 @@ def get_all(project_id):
AND alerts.deleted_at ISNULL
ORDER BY alerts.created_at;""",
{"project_id": project_id})
cur.execute(query=query)
all = helper.list_to_camel_case(cur.fetchall())
await cur.execute(query=query)
all = helper.list_to_camel_case(await cur.fetchall())
for i in range(len(all)):
all[i] = helper.custom_alert_to_front(__process_circular(all[i]))
return all
@ -54,29 +54,29 @@ def __process_circular(alert):
return alert
def create(project_id, data: schemas.AlertSchema):
async def create(project_id, data: schemas.AlertSchema):
data = data.model_dump()
data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id, change)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s, %(change)s)
RETURNING *;""",
{"project_id": project_id, **data})
)
a = helper.dict_to_camel_case(cur.fetchone())
a = helper.dict_to_camel_case(await cur.fetchone())
return {"data": helper.custom_alert_to_front(helper.dict_to_camel_case(__process_circular(a)))}
def update(id, data: schemas.AlertSchema):
async def update(id, data: schemas.AlertSchema):
data = data.model_dump()
data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""\
UPDATE public.alerts
SET name = %(name)s,
@ -90,8 +90,8 @@ def update(id, data: schemas.AlertSchema):
WHERE alert_id =%(id)s AND deleted_at ISNULL
RETURNING *;""",
{"id": id, **data})
cur.execute(query=query)
a = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
a = helper.dict_to_camel_case(await cur.fetchone())
return {"data": helper.custom_alert_to_front(__process_circular(a))}
@ -211,9 +211,9 @@ def send_to_msteams_batch(notifications_list):
attachments=webhookId_map[batch]["batch"])
def delete(project_id, alert_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(project_id, alert_id):
with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(""" UPDATE public.alerts
SET deleted_at = timezone('utc'::text, now()),
active = FALSE

View file

@ -1,8 +1,8 @@
from chalicelib.utils import pg_client, helper
def get_all_alerts():
with pg_client.PostgresClient(long_query=True) as cur:
async def get_all_alerts():
async with pg_client.cursor(long_query=True) as cur:
query = """SELECT -1 AS tenant_id,
alert_id,
projects.project_id,
@ -27,6 +27,6 @@ def get_all_alerts():
AND projects.deleted_at ISNULL
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
ORDER BY alerts.created_at;"""
cur.execute(query=query)
all_alerts = helper.list_to_camel_case(cur.fetchall())
await cur.execute(query=query)
all_alerts = helper.list_to_camel_case(await cur.fetchall())
return all_alerts

View file

@ -186,10 +186,10 @@ def Build(a):
return q, params
def process():
async def process():
notifications = []
all_alerts = alerts_listener.get_all_alerts()
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
for alert in all_alerts:
if can_check(alert):
query, params = Build(alert)
@ -203,8 +203,8 @@ def process():
logging.debug(alert)
logging.debug(query)
try:
cur.execute(query)
result = cur.fetchone()
await cur.execute(query)
result = await cur.fetchone()
if result["valid"]:
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
notifications.append(generate_notification(alert, result))
@ -213,9 +213,8 @@ def process():
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(query)
logging.error(e)
cur = cur.recreate(rollback=True)
if len(notifications) > 0:
cur.execute(
await cur.execute(
cur.mogrify(f"""UPDATE public.alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))

View file

@ -4,8 +4,8 @@ from decouple import config
from chalicelib.utils.TimeUTC import TimeUTC
def get_all(user_id):
with pg_client.PostgresClient() as cur:
async def get_all(user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""
SELECT a.*, u.last >= (EXTRACT(EPOCH FROM a.created_at)*1000) AS viewed
FROM public.announcements AS a,
@ -15,10 +15,10 @@ def get_all(user_id):
LIMIT 1) AS u(last)
ORDER BY a.created_at DESC;""",
{"userId": user_id})
cur.execute(
await cur.execute(
query
)
announcements = helper.list_to_camel_case(cur.fetchall())
announcements = helper.list_to_camel_case(await cur.fetchall())
for a in announcements:
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
if a["imageUrl"] is not None and len(a["imageUrl"]) > 0:
@ -26,8 +26,8 @@ def get_all(user_id):
return announcements
def view(user_id):
with pg_client.PostgresClient() as cur:
async def view(user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""
UPDATE public.users
SET data=data ||
@ -36,7 +36,7 @@ def view(user_id):
'}')::jsonb
WHERE user_id = %(userId)s;""",
{"userId": user_id})
cur.execute(
await cur.execute(
query
)
return True

View file

@ -2,7 +2,7 @@ from os import access, R_OK
from os.path import exists as path_exists, getsize
import jwt
import requests
import httpx
from decouple import config
from fastapi import HTTPException, status
@ -63,16 +63,14 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id)
try:
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
async with httpx.AsyncClient() as client:
results = await client.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
print(results.text)
return {"total": 0, "sessions": []}
live_peers = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
live_peers = {"total": 0, "sessions": []}
except Exception as e:
print("!! Issue getting Live-Assist response")
print(str(e))
@ -113,7 +111,8 @@ def __get_agent_token(project_id, project_key, session_id):
def get_live_session_by_id(project_id, session_id):
project_key = projects.get_project_key(project_id)
try:
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
@ -124,9 +123,6 @@ def get_live_session_by_id(project_id, session_id):
return None
results["live"] = True
results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id)
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return None
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
@ -143,16 +139,14 @@ def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for is_live")
print(results.text)
return False
results = results.json().get("data")
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
@ -165,13 +159,14 @@ def is_live(project_id, session_id, project_key=None):
return str(session_id) == results
def autocomplete(project_id, q: str, key: str = None):
project_key = projects.get_project_key(project_id)
async def autocomplete(project_id, q: str, key: str = None):
project_key = await projects.get_project_key(project_id)
params = {"q": q}
if key:
params["key"] = key
try:
results = requests.get(
async with httpx.AsyncClient() as client:
results = await client.get(
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
@ -179,9 +174,6 @@ def autocomplete(project_id, q: str, key: str = None):
print(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
@ -255,7 +247,8 @@ def get_raw_devtools_by_id(project_id, session_id):
def session_exists(project_id, session_id):
project_key = projects.get_project_key(project_id)
try:
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
async with httpx.AsyncClient() as client:
results = await client.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for session_exists")
@ -265,9 +258,6 @@ def session_exists(project_id, session_id):
if results is None:
return False
return True
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))

View file

@ -7,7 +7,7 @@ from chalicelib.utils.event_filter_definition import Event
TABLE = "public.autocomplete"
def __get_autocomplete_table(value, project_id):
async def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
@ -48,7 +48,7 @@ def __get_autocomplete_table(value, project_id):
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";",
{"project_id": project_id,
"value": helper.string_to_sql_like(value),
@ -56,7 +56,7 @@ def __get_autocomplete_table(value, project_id):
"c_list": tuple(c_list)
})
try:
cur.execute(query)
await cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
@ -64,7 +64,7 @@ def __get_autocomplete_table(value, project_id):
print(value)
print("--------------------")
raise err
results = cur.fetchall()
results = await cur.fetchall()
for r in results:
r["type"] = r.pop("_type")
results = helper.list_to_camel_case(results)
@ -110,20 +110,20 @@ def __generic_query(typename, value_length=None):
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
async def f(project_id, value, key=None, source=None):
async with pg_client.cursor() as cur:
query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
cur.execute(cur.mogrify(query, params))
return helper.list_to_camel_case(cur.fetchall())
await cur.execute(cur.mogrify(query, params))
return helper.list_to_camel_case(await cur.fetchall())
return f
def __generic_autocomplete_metas(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
async def f(project_id, text):
async with pg_client.cursor() as cur:
params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)}
@ -133,8 +133,8 @@ def __generic_autocomplete_metas(typename):
return []
query = cur.mogrify(__generic_query(typename, value_length=len(text)), params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
return f
@ -214,19 +214,19 @@ def __errors_query(source=None, value_length=None):
LIMIT 5));"""
def __search_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
async def __search_errors(project_id, value, key=None, source=None):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(__errors_query(source,
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
results = helper.list_to_camel_case(await cur.fetchall())
return results
def __search_errors_ios(project_id, value, key=None, source=None):
async def __search_errors_ios(project_id, value, key=None, source=None):
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
@ -287,14 +287,14 @@ def __search_errors_ios(project_id, value, key=None, source=None):
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
results = helper.list_to_camel_case(await cur.fetchall())
return results
def __search_metadata(project_id, value, key=None, source=None):
async def __search_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
@ -321,11 +321,11 @@ def __search_metadata(project_id, value, key=None, source=None):
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
results = helper.list_to_camel_case(await cur.fetchall())
return results

View file

@ -4,23 +4,23 @@ from chalicelib.core import projects, log_tool_datadog, log_tool_stackdriver, lo
from chalicelib.core import users
def get_state(tenant_id):
async def get_state(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
recorded = False
meta = False
if len(pids) > 0:
cur.execute(
await cur.execute(
cur.mogrify("""SELECT EXISTS(( SELECT 1
FROM public.sessions AS s
WHERE s.project_id IN %(ids)s)) AS exists;""",
{"ids": tuple(pids)})
)
recorded = cur.fetchone()["exists"]
recorded = await cur.fetchone()["exists"]
meta = False
if recorded:
cur.execute("""SELECT EXISTS((SELECT 1
await cur.execute("""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
@ -36,7 +36,7 @@ def get_state(tenant_id):
OR p.metadata_10 IS NOT NULL )
)) AS exists;""")
meta = cur.fetchone()["exists"]
meta = await cur.fetchone()["exists"]
return [
{"task": "Install OpenReplay",
@ -46,7 +46,7 @@ def get_state(tenant_id):
"done": meta,
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"},
{"task": "Invite Team Members",
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
"done": len(await users.get_members(tenant_id=tenant_id)) > 1,
"URL": "https://app.openreplay.com/client/manage-users"},
{"task": "Integrations",
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
@ -58,7 +58,7 @@ def get_state(tenant_id):
def get_state_installing(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
with pg_client.cursor() as cur:
recorded = False
if len(pids) > 0:
@ -76,7 +76,7 @@ def get_state_installing(tenant_id):
def get_state_identify_users(tenant_id):
with pg_client.PostgresClient() as cur:
with pg_client.cursor() as cur:
cur.execute("""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1

View file

@ -3,16 +3,16 @@ from chalicelib.utils.storage import StorageClient
from decouple import config
def get_canvas_presigned_urls(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def get_canvas_presigned_urls(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
SELECT *
FROM events.canvas_recordings
WHERE session_id = %(session_id)s
ORDER BY timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
rows = await cur.fetchall()
for i in range(len(rows)):
params = {

View file

@ -27,7 +27,7 @@ COALESCE((SELECT TRUE
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True):
async def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True):
no_platform = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
@ -42,7 +42,7 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
favorite_only=data.bookmarked, issue=None,
project_id=project_id, user_id=user_id)
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
@ -57,7 +57,7 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
# print(main_query)
# print("--------------------")
try:
cur.execute(main_query)
await cur.execute(main_query)
except Exception as err:
print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8'))
@ -66,7 +66,7 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
print("--------------------")
raise err
session = cur.fetchone()
session = await cur.fetchone()
if session:
if include_mobs:
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)

View file

@ -1,6 +1,6 @@
import logging
import requests
import httpx
from decouple import config
from fastapi import HTTPException, status
@ -26,7 +26,8 @@ class MSTeams(BaseCollaboration):
@classmethod
def say_hello(cls, url):
r = requests.post(
async with httpx.AsyncClient() as client:
r = await client.post(
url=url,
json={
"@type": "MessageCard",
@ -46,7 +47,8 @@ class MSTeams(BaseCollaboration):
if integration is None:
return {"errors": ["msteams integration not found"]}
try:
r = requests.post(
async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"],
json=body,
timeout=5)
@ -54,9 +56,6 @@ class MSTeams(BaseCollaboration):
logging.warning(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
logging.warning(r.text)
return None
except requests.exceptions.Timeout:
logging.warning(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
return None
except Exception as e:
logging.warning(f"!! Issue sending msteams raw webhookId:{webhook_id}")
logging.warning(e)
@ -74,7 +73,8 @@ class MSTeams(BaseCollaboration):
for j in range(1, len(part), 2):
part.insert(j, {"text": "***"})
r = requests.post(url=integration["endpoint"],
async with httpx.AsyncClient() as client:
r = await client.post(url=integration["endpoint"],
json={
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
@ -86,13 +86,14 @@ class MSTeams(BaseCollaboration):
logging.warning(r.text)
@classmethod
def __share(cls, tenant_id, integration_id, attachement, extra=None):
async def __share(cls, tenant_id, integration_id, attachement, extra=None):
if extra is None:
extra = {}
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["Microsoft Teams integration not found"]}
r = requests.post(
async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"],
json={
"@type": "MessageCard",

View file

@ -1,9 +1,9 @@
from datetime import datetime
import requests
from decouple import config
from fastapi import HTTPException, status
import httpx
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
@ -23,8 +23,9 @@ class Slack(BaseCollaboration):
return None
@classmethod
def say_hello(cls, url):
r = requests.post(
async def say_hello(cls, url):
async with httpx.AsyncClient() as client:
r = await client.post(
url=url,
json={
"attachments": [
@ -41,12 +42,13 @@ class Slack(BaseCollaboration):
return True
@classmethod
def send_raw(cls, tenant_id, webhook_id, body):
async def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
try:
r = requests.post(
async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"],
json=body,
timeout=5)
@ -54,9 +56,6 @@ class Slack(BaseCollaboration):
print(f"!! issue sending slack raw; webhookId:{webhook_id} code:{r.status_code}")
print(r.text)
return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending slack raw webhookId:{webhook_id}")
return None
except Exception as e:
print(f"!! Issue sending slack raw webhookId:{webhook_id}")
print(str(e))
@ -64,13 +63,14 @@ class Slack(BaseCollaboration):
return {"data": r.text}
@classmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
async def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
print(f"====> sending slack batch notification: {len(attachments)}")
for i in range(0, len(attachments), 100):
r = requests.post(
async with httpx.AsyncClient() as client:
r = await client.post(
url=integration["endpoint"],
json={"attachments": attachments[i:i + 100]})
if r.status_code != 200:
@ -80,14 +80,15 @@ class Slack(BaseCollaboration):
print(r.text)
@classmethod
def __share(cls, tenant_id, integration_id, attachement, extra=None):
async def __share(cls, tenant_id, integration_id, attachement, extra=None):
if extra is None:
extra = {}
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["slack integration not found"]}
attachement["ts"] = datetime.now().timestamp()
r = requests.post(url=integration["endpoint"], json={"attachments": [attachement], **extra})
async with httpx.AsyncClient() as client:
r = await client.post(url=integration["endpoint"], json={"attachments": [attachement], **extra})
return r.text
@classmethod

View file

@ -311,8 +311,8 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
return r
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
async def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
async with pg_client.cursor() as cur:
session_data = None
if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
@ -349,14 +349,14 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
RETURNING metric_id;"""
query = cur.mogrify(query, params)
cur.execute(query)
r = cur.fetchone()
await cur.execute(query)
r = await cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
async def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
@ -393,7 +393,7 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
sub_queries = []
if len(n_series) > 0:
sub_queries.append(f"""\
@ -430,11 +430,11 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
cur.execute(query)
await cur.execute(query)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
async def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"]
params = {"project_id": project_id, "user_id": user_id,
@ -451,7 +451,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
sub_join = ""
if include_series:
sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
@ -481,8 +481,8 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order.value}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
if include_series:
for r in rows:
for s in r["series"]:
@ -506,9 +506,9 @@ def get_all(project_id, user_id):
return result
def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete_card(project_id, metric_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
@ -530,8 +530,8 @@ def __get_path_analysis_attributes(row):
return row
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
with pg_client.PostgresClient() as cur:
async def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
async with pg_client.cursor() as cur:
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
@ -563,8 +563,8 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is None:
return None
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
@ -578,9 +578,9 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
return row
def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_series_for_alert(project_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT series_id AS value,
metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
@ -598,13 +598,13 @@ def get_series_for_alert(project_id, user_id):
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
rows = await cur.fetchall()
return helper.list_to_camel_case(rows)
def change_state(project_id, metric_id, user_id, status):
with pg_client.PostgresClient() as cur:
cur.execute(
async def change_state(project_id, metric_id, user_id, status):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET active = %(status)s
@ -612,14 +612,14 @@ def change_state(project_id, metric_id, user_id, status):
AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "status": status, "user_id": user_id})
)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
return await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
async def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
card: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if card is None:
return None
metric: schemas.CardSchema = schemas.CardSchema(**card)
@ -654,8 +654,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
async def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = await get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")

View file

@ -7,8 +7,8 @@ from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
async def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
async with pg_client.cursor() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *"""
@ -25,28 +25,28 @@ def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
# params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i})
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
if row is None:
return {"errors": ["something went wrong while creating the dashboard"]}
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
def get_dashboards(project_id, user_id):
with pg_client.PostgresClient() as cur:
async def get_dashboards(project_id, user_id):
async with pg_client.cursor() as cur:
pg_query = f"""SELECT *
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(projectId)s
AND (user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
await cur.execute(cur.mogrify(pg_query, params))
rows = await cur.fetchall()
return helper.list_to_camel_case(rows)
def get_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
async def get_dashboard(project_id, user_id, dashboard_id):
async with pg_client.cursor() as cur:
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
FROM dashboards
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
@ -77,8 +77,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
for w in row["widgets"]:
@ -92,26 +92,26 @@ def get_dashboard(project_id, user_id, dashboard_id):
return helper.dict_to_camel_case(row)
def delete_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
async def delete_dashboard(project_id, user_id, dashboard_id):
async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboards
SET deleted_at = timezone('utc'::text, now())
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
await cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
with pg_client.PostgresClient() as cur:
async def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
async with pg_client.cursor() as cur:
pg_query = """SELECT COALESCE(COUNT(*),0) AS count
FROM dashboard_widgets
WHERE dashboard_id = %(dashboard_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
offset = row["count"]
pg_query = f"""UPDATE dashboards
SET name = %(name)s,
@ -137,15 +137,15 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i + offset})
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
if row:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def get_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
async def get_widget(project_id, user_id, dashboard_id, widget_id):
async with pg_client.cursor() as cur:
pg_query = """SELECT metrics.*, metric_series.series
FROM dashboard_widgets
INNER JOIN dashboards USING (dashboard_id)
@ -163,13 +163,13 @@ def get_widget(project_id, user_id, dashboard_id, widget_id):
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
with pg_client.PostgresClient() as cur:
async def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
async with pg_client.cursor() as cur:
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
@ -180,13 +180,13 @@ def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashb
RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
with pg_client.PostgresClient() as cur:
async def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboard_widgets
SET config= %(config)s
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
@ -194,22 +194,22 @@ def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.Up
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
"widget_id": widget_id, **data.model_dump()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def remove_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
async def remove_widget(project_id, user_id, dashboard_id, widget_id):
async with pg_client.cursor() as cur:
pg_query = """DELETE FROM dashboard_widgets
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
await cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def pin_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
async def pin_dashboard(project_id, user_id, dashboard_id):
async with pg_client.cursor() as cur:
pg_query = """UPDATE dashboards
SET is_pinned = FALSE
WHERE project_id=%(project_id)s;
@ -218,14 +218,14 @@ def pin_dashboard(project_id, user_id, dashboard_id):
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
RETURNING *;"""
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(pg_query, params))
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema):
async def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema):
metric_id = custom_metrics.create_card(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
return await add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
# def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema):

View file

@ -93,44 +93,44 @@ class DatabaseRequestHandler:
logging.info(f"Query: {query}")
return query
def execute_query(self, query, data=None):
async def execute_query(self, query, data=None):
try:
with self.client.PostgresClient() as cur:
async with self.client.cursor() as cur:
mogrified_query = cur.mogrify(query, {**data, **self.params} if data else self.params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
await cur.execute(mogrified_query)
return await cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def fetchall(self):
query = self.build_query()
return self.execute_query(query)
return await self.execute_query(query)
def fetchone(self):
query = self.build_query()
result = self.execute_query(query)
result = await self.execute_query(query)
return result[0] if result else None
def insert(self, data):
query = self.build_query(action="insert", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
result = await self.execute_query(query, data)
return result[0] if result else None
def update(self, data):
query = self.build_query(action="update", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
result = await self.execute_query(query, data)
return result[0] if result else None
def delete(self):
query = self.build_query(action="delete")
return self.execute_query(query)
return await self.execute_query(query)
def batch_insert(self, items):
async def batch_insert(self, items):
if not items:
return None
@ -145,27 +145,27 @@ class DatabaseRequestHandler:
query = f"INSERT INTO {self.table_name} ({columns}) VALUES {all_values_query} RETURNING *;"
try:
with self.client.PostgresClient() as cur:
async with self.client.cursor() as cur:
# Flatten items into a single dictionary with unique keys
combined_params = {f"{k}_{i}": v for i, item in enumerate(items) for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
return cur.fetchall()
await cur.execute(mogrified_query)
return await cur.fetchall()
except Exception as e:
self.logger.error(f"Database batch insert operation failed: {e}")
raise
def raw_query(self, query, params=None):
async def raw_query(self, query, params=None):
try:
with self.client.PostgresClient() as cur:
async with self.client.cursor() as cur:
mogrified_query = cur.mogrify(query, params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
await cur.execute(mogrified_query)
return await cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def batch_update(self, items):
async def batch_update(self, items):
if not items:
return None
@ -192,11 +192,11 @@ class DatabaseRequestHandler:
"""
try:
with self.client.PostgresClient() as cur:
async with self.client.cursor() as cur:
# Flatten items into a single dictionary for mogrify
combined_params = {k: v for item in items for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
await cur.execute(mogrified_query)
except Exception as e:
self.logger.error(f"Database batch update operation failed: {e}")
raise

View file

@ -8,24 +8,24 @@ from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
def get(error_id, family=False):
async def get(error_id, family=False):
if family:
return get_batch([error_id])
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(
"SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
{"error_id": error_id})
cur.execute(query=query)
result = cur.fetchone()
await cur.execute(query=query)
result = await cur.fetchone()
if result is not None:
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
return helper.dict_to_camel_case(result)
def get_batch(error_ids):
async def get_batch(error_ids):
if len(error_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""
WITH RECURSIVE error_family AS (
@ -40,8 +40,8 @@ def get_batch(error_ids):
SELECT *
FROM error_family;""",
{"error_ids": tuple(error_ids)})
cur.execute(query=query)
errors = cur.fetchall()
await cur.execute(query=query)
errors = await cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
@ -81,7 +81,7 @@ def __process_tags(row):
]
def get_details(project_id, error_id, user_id, **data):
async def get_details(project_id, error_id, user_id, **data):
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
pg_sub_query24.append("error_id = %(error_id)s")
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
@ -101,7 +101,7 @@ def get_details(project_id, error_id, user_id, **data):
pg_sub_query30.append("error_id = %(error_id)s")
pg_basic_query = __get_basic_constraints(time_constraint=False)
pg_basic_query.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
data["startDate24"] = TimeUTC.now(-1)
data["endDate24"] = TimeUTC.now()
data["startDate30"] = TimeUTC.now(-30)
@ -253,8 +253,8 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(main_pg_query, params))
row = await cur.fetchone()
if row is None:
return {"errors": ["error not found"]}
row["tags"] = __process_tags(row)
@ -274,8 +274,8 @@ def get_details(project_id, error_id, user_id, **data):
ORDER BY start_ts DESC
LIMIT 1;""",
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
cur.execute(query=query)
status = cur.fetchone()
await cur.execute(query=query)
status = await cur.fetchone()
if status is not None:
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
@ -294,12 +294,12 @@ def get_details(project_id, error_id, user_id, **data):
return {"data": helper.dict_to_camel_case(row)}
def get_details_chart(project_id, error_id, user_id, **data):
async def get_details_chart(project_id, error_id, user_id, **data):
pg_sub_query = __get_basic_constraints()
pg_sub_query.append("error_id = %(error_id)s")
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
pg_sub_query_chart.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
else:
@ -398,8 +398,8 @@ def get_details_chart(project_id, error_id, user_id, **data):
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone()
await cur.execute(cur.mogrify(main_pg_query, params))
row = await cur.fetchone()
if row is None:
return {"errors": ["error not found"]}
row["tags"] = __process_tags(row)
@ -434,7 +434,7 @@ def __get_sort_key(key):
}.get(key, 'max_datetime')
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
async def search(data: schemas.SearchErrorsSchema, project_id, user_id):
empty_response = {
'total': 0,
'errors': []
@ -465,7 +465,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
@ -547,8 +547,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
await cur.execute(cur.mogrify(main_pg_query, params))
rows = await cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if total == 0:
@ -565,8 +565,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
await cur.execute(query=query)
statuses = helper.list_to_camel_case(await cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
@ -584,14 +584,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
}
def __save_stacktrace(error_id, data):
with pg_client.PostgresClient() as cur:
async def __save_stacktrace(error_id, data):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
WHERE error_id = %(error_id)s;""",
{"error_id": error_id, "data": json.dumps(data)})
cur.execute(query=query)
await cur.execute(query=query)
def get_trace(project_id, error_id):
@ -614,7 +614,7 @@ def get_trace(project_id, error_id):
"preparsed": False}
def get_sessions(start_date, end_date, project_id, user_id, error_id):
async def get_sessions(start_date, end_date, project_id, user_id, error_id):
extra_constraints = ["s.project_id = %(project_id)s",
"s.start_ts >= %(startDate)s",
"s.start_ts <= %(endDate)s",
@ -630,7 +630,7 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
"project_id": project_id,
"userId": user_id,
"error_id": error_id}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(
f"""SELECT s.project_id,
s.session_id::text AS session_id,
@ -659,13 +659,13 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
WHERE {" AND ".join(extra_constraints)}
ORDER BY s.start_ts DESC;""",
params)
cur.execute(query=query)
await cur.execute(query=query)
sessions_list = []
total = cur.rowcount
row = cur.fetchone()
row = await cur.fetchone()
while row is not None and len(sessions_list) < 100:
sessions_list.append(row)
row = cur.fetchone()
row = await cur.fetchone()
return {
'total': total,
@ -680,7 +680,7 @@ ACTION_STATE = {
}
def change_state(project_id, user_id, error_id, action):
async def change_state(project_id, user_id, error_id, action):
errors = get(error_id, family=True)
print(len(errors))
status = ACTION_STATE.get(action)
@ -696,15 +696,15 @@ def change_state(project_id, user_id, error_id, action):
"userId": user_id,
"error_ids": tuple([e["errorId"] for e in errors]),
"status": status}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET status = %(status)s
WHERE error_id IN %(error_ids)s
RETURNING status""",
params)
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
if row is not None:
for e in errors:
e["status"] = row["status"]

View file

@ -1,9 +1,9 @@
from chalicelib.utils import pg_client
def add_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def add_favorite_error(project_id, user_id, error_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""INSERT INTO public.user_favorite_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
@ -11,9 +11,9 @@ def add_favorite_error(project_id, user_id, error_id):
return {"errorId": error_id, "favorite": True}
def remove_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def remove_favorite_error(project_id, user_id, error_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""DELETE FROM public.user_favorite_errors
WHERE
user_id = %(userId)s
@ -23,18 +23,18 @@ def remove_favorite_error(project_id, user_id, error_id):
return {"errorId": error_id, "favorite": False}
def favorite_error(project_id, user_id, error_id):
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id)
async def favorite_error(project_id, user_id, error_id):
exists, favorite = await error_exists_and_favorite(user_id=user_id, error_id=error_id)
if not exists:
return {"errors": ["cannot bookmark non-rehydrated errors"]}
if favorite:
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return await remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return await add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
def error_exists_and_favorite(user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def error_exists_and_favorite(user_id, error_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite
FROM public.errors
@ -42,7 +42,7 @@ def error_exists_and_favorite(user_id, error_id):
WHERE error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
r = cur.fetchone()
r = await cur.fetchone()
if r is None:
return False, False
return True, r.get("favorite") is not None

View file

@ -1,17 +1,17 @@
from chalicelib.utils import pg_client
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def add_viewed_error(project_id, user_id, error_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
async def viewed_error_exists(user_id, error_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
@ -22,16 +22,16 @@ def viewed_error_exists(user_id, error_id):
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
await cur.execute(
query=query
)
r = cur.fetchone()
r = await cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
async def viewed_error(project_id, user_id, error_id):
if await viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
return await add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -9,9 +9,9 @@ from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def get_customs_by_session_id(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
SELECT
c.*,
'CUSTOM' AS type
@ -21,7 +21,7 @@ def get_customs_by_session_id(session_id, project_id):
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
rows = await cur.fetchall()
return helper.dict_to_camel_case(rows)
@ -52,8 +52,8 @@ def __get_grouped_clickrage(rows, session_id, project_id):
return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
async def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
async with pg_client.cursor() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.click:
cur.execute(cur.mogrify("""\
@ -98,8 +98,8 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
return rows
def _search_tags(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
async def _search_tags(project_id, value, key=None, source=None):
async with pg_client.cursor() as cur:
query = f"""
SELECT public.tags.name
'{events.EventType.TAG.ui_type}' AS type
@ -109,8 +109,8 @@ def _search_tags(project_id, value, key=None, source=None):
LIMIT 10
"""
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
await cur.execute(query)
results = helper.list_to_camel_case(await cur.fetchall())
return results
@ -182,14 +182,14 @@ SUPPORTED_TYPES = {
}
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
async def get_errors_by_session_id(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
errors = await cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)

View file

@ -2,13 +2,13 @@ from chalicelib.utils import pg_client, helper
from chalicelib.core import events
def get_customs_by_session_id(session_id, project_id):
return events.get_customs_by_session_id(session_id=session_id, project_id=project_id)
async def get_customs_by_session_id(session_id, project_id):
return await events.get_customs_by_session_id(session_id=session_id, project_id=project_id)
def get_by_sessionId(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""
async def get_by_sessionId(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(f"""
SELECT
c.*,
'TAP' AS type
@ -18,9 +18,9 @@ def get_by_sessionId(session_id, project_id):
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
rows = await cur.fetchall()
cur.execute(cur.mogrify(f"""
await cur.execute(cur.mogrify(f"""
SELECT
i.*,
'INPUT' AS type
@ -30,8 +30,8 @@ def get_by_sessionId(session_id, project_id):
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
cur.execute(cur.mogrify(f"""
rows += await cur.fetchall()
await cur.execute(cur.mogrify(f"""
SELECT
v.*,
'VIEW' AS type
@ -39,8 +39,8 @@ def get_by_sessionId(session_id, project_id):
WHERE
v.session_id = %(session_id)s
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
cur.execute(cur.mogrify(f"""
rows += await cur.fetchall()
await cur.execute(cur.mogrify(f"""
SELECT
s.*,
'SWIPE' AS type
@ -48,15 +48,15 @@ def get_by_sessionId(session_id, project_id):
WHERE
s.session_id = %(session_id)s
ORDER BY s.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows += await cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["timestamp"])
return rows
def get_crashes_by_session_id(session_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""
async def get_crashes_by_session_id(session_id):
with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(f"""
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
FROM {events.EventType.CRASH_IOS.table} AS cr
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
@ -64,5 +64,5 @@ def get_crashes_by_session_id(session_id):
WHERE
cr.session_id = %(session_id)s
ORDER BY timestamp;""", {"session_id": session_id}))
errors = cur.fetchall()
errors = await cur.fetchall()
return helper.list_to_camel_case(errors)

View file

@ -22,8 +22,8 @@ feature_flag_columns = (
)
def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
async def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) -> bool:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.feature_flags
WHERE deleted_at IS NULL
@ -31,29 +31,29 @@ def exists_by_name(flag_key: str, project_id: int, exclude_id: Optional[int]) ->
{"AND feature_flag_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"flag_key": flag_key, "exclude_id": exclude_id, "project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
return row["exists"]
def update_feature_flag_status(project_id: int, feature_flag_id: int, is_active: bool) -> Dict[str, Any]:
async def update_feature_flag_status(project_id: int, feature_flag_id: int, is_active: bool) -> Dict[str, Any]:
try:
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE feature_flags
SET is_active = %(is_active)s, updated_at=NOW()
WHERE feature_flag_id=%(feature_flag_id)s AND project_id=%(project_id)s
RETURNING is_active;""",
{"feature_flag_id": feature_flag_id, "is_active": is_active, "project_id": project_id})
cur.execute(query=query)
await cur.execute(query=query)
return {"is_active": cur.fetchone()["is_active"]}
return {"is_active": await cur.fetchone()["is_active"]}
except Exception as e:
logging.error(f"Failed to update feature flag status: {e}")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Failed to update feature flag status")
def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]:
async def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlagsSchema) -> Dict[str, Any]:
"""
Get all feature flags and their total count.
"""
@ -67,10 +67,10 @@ def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlag
LIMIT %(limit)s OFFSET %(offset)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
if len(rows) == 0:
return {"data": {"total": 0, "list": []}}
@ -110,12 +110,12 @@ def prepare_constraints_params_to_search(data, project_id, user_id):
return constraints, params
def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]:
async def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]:
if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant and len(feature_flag_data.variants) == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Variants are required for multi variant flag")
validate_unique_flag_key(feature_flag_data, project_id)
await validate_unique_flag_key(feature_flag_data, project_id)
validate_multi_variant_flag(feature_flag_data)
insert_columns = (
@ -176,19 +176,19 @@ def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schema
SELECT feature_flag_id FROM inserted_flag;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, params)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is None:
return None
return get_feature_flag(project_id=project_id, feature_flag_id=row["feature_flag_id"])
return await get_feature_flag(project_id=project_id, feature_flag_id=row["feature_flag_id"])
def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None):
if exists_by_name(project_id=project_id, flag_key=feature_flag_data.flag_key, exclude_id=exclude_id):
async def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None):
if await exists_by_name(project_id=project_id, flag_key=feature_flag_data.flag_key, exclude_id=exclude_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Feature flag with key already exists.")
@ -238,7 +238,7 @@ def prepare_conditions_values(feature_flag_data):
return conditions_data
def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]:
async def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str, Any]]:
conditions_query = """
SELECT COALESCE(jsonb_agg(ffc ORDER BY condition_id), '[]'::jsonb) AS conditions
FROM feature_flags_conditions AS ffc
@ -261,10 +261,10 @@ def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str
AND ff.deleted_at IS NULL;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "project_id": project_id})
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is None:
return {"errors": ["Feature flag not found"]}
@ -275,7 +275,7 @@ def get_feature_flag(project_id: int, feature_flag_id: int) -> Optional[Dict[str
return {"data": helper.dict_to_camel_case(row)}
def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> List[Dict[str, Any]]:
async def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> List[Dict[str, Any]]:
"""
Create new feature flag conditions and return their data.
"""
@ -297,18 +297,18 @@ def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag
RETURNING condition_id, {", ".join(columns)}
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
params = [
(feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.model_dump() for filter_ in c.filters]))
for c in conditions]
query = cur.mogrify(sql, params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
def update_feature_flag(project_id: int, feature_flag_id: int,
async def update_feature_flag(project_id: int, feature_flag_id: int,
feature_flag: schemas.FeatureFlagSchema, user_id: int):
"""
Update an existing feature flag and return its updated data.
@ -342,23 +342,23 @@ def update_feature_flag(project_id: int, feature_flag_id: int,
RETURNING feature_flag_id, {", ".join(columns)}, created_at, updated_at
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is None:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Feature flag not found")
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"])
row['conditions'] = check_conditions(feature_flag_id, feature_flag.conditions)
row['variants'] = check_variants(feature_flag_id, feature_flag.variants)
row['conditions'] = await check_conditions(feature_flag_id, feature_flag.conditions)
row['variants'] = await check_variants(feature_flag_id, feature_flag.variants)
return {"data": helper.dict_to_camel_case(row)}
def get_conditions(feature_flag_id: int):
async def get_conditions(feature_flag_id: int):
"""
Get all conditions for a feature flag.
"""
@ -374,15 +374,15 @@ def get_conditions(feature_flag_id: int):
ORDER BY condition_id;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id})
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
async def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
existing_ids = [ev.get("variant_id") for ev in get_variants(feature_flag_id)]
to_be_deleted = []
to_be_updated = []
@ -399,18 +399,18 @@ def check_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVaria
to_be_updated.append(variant)
if len(to_be_created) > 0:
create_variants(feature_flag_id=feature_flag_id, variants=to_be_created)
await create_variants(feature_flag_id=feature_flag_id, variants=to_be_created)
if len(to_be_updated) > 0:
update_variants(feature_flag_id=feature_flag_id, variants=to_be_updated)
await update_variants(feature_flag_id=feature_flag_id, variants=to_be_updated)
if len(to_be_deleted) > 0:
delete_variants(feature_flag_id=feature_flag_id, ids=to_be_deleted)
await delete_variants(feature_flag_id=feature_flag_id, ids=to_be_deleted)
return get_variants(feature_flag_id)
return await get_variants(feature_flag_id)
def get_variants(feature_flag_id: int):
async def get_variants(feature_flag_id: int):
sql = """
SELECT
variant_id,
@ -423,15 +423,15 @@ def get_variants(feature_flag_id: int):
ORDER BY variant_id;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id})
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> List[Dict[str, Any]]:
async def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> List[Dict[str, Any]]:
"""
Create new feature flag variants and return their data.
"""
@ -454,16 +454,16 @@ def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVari
RETURNING variant_id, {", ".join(columns)}
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
params = [(feature_flag_id, v.value, v.description, json.dumps(v.payload), v.rollout_percentage) for v in variants]
query = cur.mogrify(sql, params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
async def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVariant]) -> Any:
"""
Update existing feature flag variants and return their updated data.
"""
@ -485,12 +485,12 @@ def update_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVari
WHERE c.variant_id = feature_flags_variants.variant_id AND feature_flag_id = %(feature_flag_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params)
cur.execute(query)
await cur.execute(query)
def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
async def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
"""
Delete existing feature flag variants and return their data.
"""
@ -500,12 +500,12 @@ def delete_variants(feature_flag_id: int, ids: List[int]) -> None:
AND feature_flag_id= %(feature_flag_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)})
cur.execute(query)
await cur.execute(query)
def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
async def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
existing_ids = [ec.get("condition_id") for ec in get_conditions(feature_flag_id)]
to_be_deleted = []
to_be_updated = []
@ -522,18 +522,18 @@ def check_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagC
to_be_updated.append(condition)
if len(to_be_created) > 0:
create_conditions(feature_flag_id=feature_flag_id, conditions=to_be_created)
await create_conditions(feature_flag_id=feature_flag_id, conditions=to_be_created)
if len(to_be_updated) > 0:
update_conditions(feature_flag_id=feature_flag_id, conditions=to_be_updated)
await update_conditions(feature_flag_id=feature_flag_id, conditions=to_be_updated)
if len(to_be_deleted) > 0:
delete_conditions(feature_flag_id=feature_flag_id, ids=to_be_deleted)
await delete_conditions(feature_flag_id=feature_flag_id, ids=to_be_deleted)
return get_conditions(feature_flag_id)
return await get_conditions(feature_flag_id)
def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
async def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlagCondition]) -> Any:
"""
Update existing feature flag conditions and return their updated data.
"""
@ -555,12 +555,12 @@ def update_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag
WHERE c.condition_id = feature_flags_conditions.condition_id AND feature_flag_id = %(feature_flag_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params)
cur.execute(query)
await cur.execute(query)
def delete_conditions(feature_flag_id: int, ids: List[int]) -> None:
async def delete_conditions(feature_flag_id: int, ids: List[int]) -> None:
"""
Delete feature flag conditions.
"""
@ -570,12 +570,12 @@ def delete_conditions(feature_flag_id: int, ids: List[int]) -> None:
AND feature_flag_id= %(feature_flag_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"feature_flag_id": feature_flag_id, "ids": tuple(ids)})
cur.execute(query)
await cur.execute(query)
def delete_feature_flag(project_id: int, feature_flag_id: int):
async def delete_feature_flag(project_id: int, feature_flag_id: int):
"""
Delete a feature flag.
"""
@ -584,10 +584,10 @@ def delete_feature_flag(project_id: int, feature_flag_id: int):
"feature_flags.feature_flag_id=%(feature_flag_id)s"
]
params = {"project_id": project_id, "feature_flag_id": feature_flag_id}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE feature_flags
SET deleted_at= (now() at time zone 'utc'), is_active=false
WHERE {" AND ".join(conditions)};""", params)
cur.execute(query)
await cur.execute(query)
return {"state": "success"}

View file

@ -1,7 +1,7 @@
from urllib.parse import urlparse
import redis
import requests
import httpx
from decouple import config
from chalicelib.utils import pg_client
@ -34,24 +34,24 @@ HEALTH_ENDPOINTS = {
}
def __check_database_pg(*_):
async def __check_database_pg(*_):
fail_response = {
"health": False,
"details": {
"errors": ["Postgres health-check failed"]
}
}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
try:
cur.execute("SHOW server_version;")
server_version = cur.fetchone()
await cur.execute("SHOW server_version;")
server_version = await cur.fetchone()
except Exception as e:
print("!! health failed: postgres not responding")
print(str(e))
return fail_response
try:
cur.execute("SELECT openreplay_version() AS version;")
schema_version = cur.fetchone()
await cur.execute("SELECT openreplay_version() AS version;")
schema_version = await cur.fetchone()
except Exception as e:
print("!! health failed: openreplay_version not defined")
print(str(e))
@ -76,7 +76,7 @@ def __always_healthy(*_):
}
def __check_be_service(service_name):
async def __check_be_service(service_name):
def fn(*_):
fail_response = {
"health": False,
@ -85,16 +85,13 @@ def __check_be_service(service_name):
}
}
try:
results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2)
async with httpx.AsyncClient() as client:
results = await client.get(HEALTH_ENDPOINTS.get(service_name), timeout=2)
if results.status_code != 200:
print(f"!! issue with the {service_name}-health code:{results.status_code}")
print(results.text)
# fail_response["details"]["errors"].append(results.text)
return fail_response
except requests.exceptions.Timeout:
print(f"!! Timeout getting {service_name}-health")
# fail_response["details"]["errors"].append("timeout")
return fail_response
except Exception as e:
print(f"!! Issue getting {service_name}-health response")
print(str(e))
@ -139,7 +136,7 @@ def __check_redis(*_):
}
def __check_SSL(*_):
async def __check_SSL(*_):
fail_response = {
"health": False,
"details": {
@ -147,7 +144,8 @@ def __check_SSL(*_):
}
}
try:
requests.get(config("SITE_URL"), verify=True, allow_redirects=True)
async with httpx.AsyncClient() as client:
await client.get(config("SITE_URL"), follow_redirects=True)
except Exception as e:
print("!! health failed: SSL Certificate")
print(str(e))
@ -158,23 +156,23 @@ def __check_SSL(*_):
}
def __get_sessions_stats(*_):
with pg_client.PostgresClient() as cur:
async def __get_sessions_stats(*_):
await with pg_client.cursor() as cur:
constraints = ["projects.deleted_at IS NULL"]
query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c,
COALESCE(SUM(events_count),0) AS e_c
FROM public.projects_stats
INNER JOIN public.projects USING(project_id)
WHERE {" AND ".join(constraints)};""")
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return {
"numberOfSessionsCaptured": row["s_c"],
"numberOfEventCaptured": row["e_c"]
}
def get_health():
async def get_health():
health_map = {
"databases": {
"postgres": __check_database_pg
@ -205,7 +203,7 @@ def get_health():
return __process_health(health_map=health_map)
def __process_health(health_map):
async def __process_health(health_map):
response = dict(health_map)
for parent_key in health_map.keys():
if config(f"SKIP_H_{parent_key.upper()}", cast=bool, default=False):
@ -215,14 +213,14 @@ def __process_health(health_map):
if config(f"SKIP_H_{parent_key.upper()}_{element_key.upper()}", cast=bool, default=False):
response[parent_key].pop(element_key)
else:
response[parent_key][element_key] = health_map[parent_key][element_key]()
await response[parent_key][element_key] = health_map[parent_key][element_key]()
else:
response[parent_key] = health_map[parent_key]()
response[parent_key] = await health_map[parent_key]()
return response
def cron():
with pg_client.PostgresClient() as cur:
async def cron():
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT projects.project_id,
projects.created_at,
projects.sessions_last_check_at,
@ -232,8 +230,8 @@ def cron():
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
for r in rows:
insert = False
if r["last_update_at"] is None:
@ -266,8 +264,8 @@ def cron():
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
@ -283,20 +281,20 @@ def cron():
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)
await cur.execute(query)
# this cron is used to correct the sessions&events count every week
def weekly_cron():
with pg_client.PostgresClient(long_query=True) as cur:
async def weekly_cron():
async with pg_client.cursor(long_query=True) as cur:
query = cur.mogrify("""SELECT project_id,
projects_stats.last_update_at
FROM public.projects
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
for r in rows:
if r["last_update_at"] is None:
continue
@ -313,16 +311,16 @@ def weekly_cron():
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
query = cur.mogrify("""UPDATE public.projects_stats
query = await cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=%(sessions_count)s,
events_count=%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)
await cur.execute(query)

View file

@ -3,7 +3,7 @@ import schemas
from chalicelib.utils import helper, pg_client
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
async def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
"project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s",
@ -52,7 +52,7 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage"
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT selector, {q_count}
FROM {query_from}
WHERE {" AND ".join(constraints)}
@ -62,7 +62,7 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
# print(query.decode('UTF-8'))
# print("---------")
try:
cur.execute(query)
await cur.execute(query)
except Exception as err:
print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
@ -70,5 +70,5 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
print(data)
print("--------------------")
raise err
rows = cur.fetchall()
rows = await cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -27,16 +27,16 @@ class BaseIntegration(ABC):
return None
return integration["token"]
def get(self):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(self):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT *
FROM public.oauth_authentication
WHERE user_id=%(user_id)s AND provider=%(provider)s;""",
{"user_id": self._user_id, "provider": self.provider.lower()})
)
return helper.dict_to_camel_case(cur.fetchone())
return helper.dict_to_camel_case(await cur.fetchone())
@abstractmethod
def get_obfuscated(self):

View file

@ -26,10 +26,10 @@ class GitHubIntegration(integration_base.BaseIntegration):
return None
return {"token": helper.obfuscate(text=integration["token"]), "provider": self.provider.lower()}
def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur:
async def update(self, changes, obfuscate=False):
async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute(
await cur.execute(
cur.mogrify(f"""\
UPDATE public.oauth_authentication
SET {','.join(sub_query)}
@ -38,7 +38,7 @@ class GitHubIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id,
**changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"])
return w
@ -46,9 +46,9 @@ class GitHubIntegration(integration_base.BaseIntegration):
def _add(self, data):
pass
def add(self, token, obfuscate=False):
with pg_client.PostgresClient() as cur:
cur.execute(
async def add(self, token, obfuscate=False):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
INSERT INTO public.oauth_authentication(user_id, provider, provider_user_id, token)
VALUES(%(user_id)s, 'github', '', %(token)s)
@ -56,15 +56,15 @@ class GitHubIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id,
"token": token})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"])
return w
# TODO: make a revoke token call
def delete(self):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(self):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
DELETE FROM public.oauth_authentication
WHERE user_id=%(user_id)s AND provider=%(provider)s;""",

View file

@ -42,16 +42,16 @@ class JIRAIntegration(integration_base.BaseIntegration):
return self._issue_handler
# TODO: remove this once jira-oauth is done
def get(self):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(self):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT username, token, url
FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",
{"user_id": self._user_id})
)
data = helper.dict_to_camel_case(cur.fetchone())
data = helper.dict_to_camel_case(await cur.fetchone())
if data is None:
return
@ -68,10 +68,10 @@ class JIRAIntegration(integration_base.BaseIntegration):
integration["provider"] = self.provider.lower()
return integration
def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur:
async def update(self, changes, obfuscate=False):
async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute(
await cur.execute(
cur.mogrify(f"""\
UPDATE public.jira_cloud
SET {','.join(sub_query)}
@ -80,19 +80,19 @@ class JIRAIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id,
**changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if obfuscate:
w["token"] = obfuscate_string(w["token"])
return self.get()
return await self.get()
# TODO: make this generic for all issue tracking integrations
def _add(self, data):
print("a pretty defined abstract method")
return
def add(self, username, token, url):
with pg_client.PostgresClient() as cur:
cur.execute(
async def add(self, username, token, url):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
INSERT INTO public.jira_cloud(username, token, user_id,url)
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
@ -100,12 +100,12 @@ class JIRAIntegration(integration_base.BaseIntegration):
{"user_id": self._user_id, "username": username,
"token": token, "url": url})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
return self.get()
def delete(self):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(self):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
DELETE FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",

View file

@ -2,9 +2,9 @@ import schemas
from chalicelib.utils import pg_client
def get_global_integrations_status(tenant_id, user_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_global_integrations_status(tenant_id, user_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""\
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
@ -57,7 +57,7 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()
current_integrations = await cur.fetchone()
result = []
for k in current_integrations.keys():
result.append({"name": k, "integrated": current_integrations[k]})

View file

@ -4,9 +4,9 @@ from chalicelib.utils import pg_client
SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER]
def get_available_integrations(user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_available_integrations(user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""\
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
@ -17,19 +17,19 @@ def get_available_integrations(user_id):
WHERE user_id = %(user_id)s)) AS jira;""",
{"user_id": user_id})
)
current_integrations = cur.fetchone()
current_integrations = await cur.fetchone()
return dict(current_integrations)
def __get_default_integration(user_id):
current_integrations = get_available_integrations(user_id)
async def __get_default_integration(user_id):
current_integrations = await get_available_integrations(user_id)
return integration_github.PROVIDER if current_integrations["github"] else integration_jira_cloud.PROVIDER if \
current_integrations["jira"] else None
def get_integration(tenant_id, user_id, tool=None, for_delete=False):
async def get_integration(tenant_id, user_id, tool=None, for_delete=False):
if tool is None:
tool = __get_default_integration(user_id=user_id)
tool = await __get_default_integration(user_id=user_id)
if tool is None:
return {"errors": [f"no issue tracking tool found"]}, None
tool = tool.upper()

View file

@ -28,8 +28,8 @@ NAME_QUERY = """\
"""
def get(project_id, issue_id):
with pg_client.PostgresClient() as cur:
async def get(project_id, issue_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""\
SELECT
@ -39,16 +39,16 @@ def get(project_id, issue_id):
AND issue_id = %(issue_id)s;""",
{"project_id": project_id, "issue_id": issue_id}
)
cur.execute(query=query)
data = cur.fetchone()
await cur.execute(query=query)
data = await cur.fetchone()
if data is not None:
data["title"] = helper.get_issue_title(data["type"])
return helper.dict_to_camel_case(data)
def get_by_session_id(session_id, project_id, issue_type=None):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_by_session_id(session_id, project_id, issue_type=None):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""\
SELECT *
FROM events_common.issues
@ -59,12 +59,12 @@ def get_by_session_id(session_id, project_id, issue_type=None):
ORDER BY timestamp;""",
{"session_id": session_id, "project_id": project_id, "type": issue_type})
)
return helper.list_to_camel_case(cur.fetchall())
return helper.list_to_camel_case(await cur.fetchall())
def get_types_by_project(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_types_by_project(project_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""SELECT type,
{ORDER_QUERY}>=0 AS visible,
{ORDER_QUERY} AS order,
@ -73,7 +73,7 @@ def get_types_by_project(project_id):
FROM public.issues
WHERE project_id = %(project_id)s) AS types
ORDER BY "order";""", {"project_id": project_id}))
return helper.list_to_camel_case(cur.fetchall())
return helper.list_to_camel_case(await cur.fetchall())
def get_all_types():

View file

@ -14,8 +14,8 @@ class JobStatus:
CANCELLED = "cancelled"
def get(job_id, project_id):
with pg_client.PostgresClient() as cur:
async def get(job_id, project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""SELECT *
FROM public.jobs
@ -23,8 +23,8 @@ def get(job_id, project_id):
AND project_id= %(project_id)s;""",
{"job_id": job_id, "project_id": project_id}
)
cur.execute(query=query)
data = cur.fetchone()
await cur.execute(query=query)
data = await cur.fetchone()
if data is None:
return {}
@ -33,23 +33,23 @@ def get(job_id, project_id):
return helper.dict_to_camel_case(data)
def get_all(project_id):
with pg_client.PostgresClient() as cur:
async def get_all(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""SELECT *
FROM public.jobs
WHERE project_id = %(project_id)s;""",
{"project_id": project_id}
)
cur.execute(query=query)
data = cur.fetchall()
await cur.execute(query=query)
data = await cur.fetchall()
for record in data:
format_datetime(record)
return helper.list_to_camel_case(data)
def create(project_id, user_id):
with pg_client.PostgresClient() as cur:
async def create(project_id, user_id):
async with pg_client.cursor() as cur:
job = {"status": "scheduled",
"project_id": project_id,
"action": Actions.DELETE_USER_DATA,
@ -62,21 +62,21 @@ def create(project_id, user_id):
VALUES (%(project_id)s, %(description)s, %(status)s, %(action)s,%(reference_id)s, %(start_at)s)
RETURNING *;""", job)
cur.execute(query=query)
await cur.execute(query=query)
r = cur.fetchone()
r = await cur.fetchone()
format_datetime(r)
record = helper.dict_to_camel_case(r)
return record
def cancel_job(job_id, job):
async def cancel_job(job_id, job):
job["status"] = JobStatus.CANCELLED
update(job_id=job_id, job=job)
await update(job_id=job_id, job=job)
def update(job_id, job):
with pg_client.PostgresClient() as cur:
async def update(job_id, job):
async with pg_client.cursor() as cur:
job_data = {
"job_id": job_id,
"errors": job.get("errors"),
@ -91,9 +91,9 @@ def update(job_id, job):
WHERE job_id = %(job_id)s
RETURNING *;""", job_data)
cur.execute(query=query)
await cur.execute(query=query)
r = cur.fetchone()
r = await cur.fetchone()
format_datetime(r)
record = helper.dict_to_camel_case(r)
return record
@ -105,8 +105,8 @@ def format_datetime(r):
r["start_at"] = TimeUTC.datetime_to_timestamp(r["start_at"])
def __get_session_ids_by_user_ids(project_id, user_ids):
with pg_client.PostgresClient() as cur:
async def __get_session_ids_by_user_ids(project_id, user_ids):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""SELECT session_id
FROM public.sessions
@ -114,19 +114,19 @@ def __get_session_ids_by_user_ids(project_id, user_ids):
AND user_id IN %(userId)s
LIMIT 1000;""",
{"project_id": project_id, "userId": tuple(user_ids)})
cur.execute(query=query)
ids = cur.fetchall()
await cur.execute(query=query)
ids = await cur.fetchall()
return [s["session_id"] for s in ids]
def __delete_sessions_by_session_ids(session_ids):
with pg_client.PostgresClient(unlimited_query=True) as cur:
async def __delete_sessions_by_session_ids(session_ids):
async with pg_client.cursor(unlimited_query=True) as cur:
query = cur.mogrify(
"""DELETE FROM public.sessions
WHERE session_id IN %(session_ids)s""",
{"session_ids": tuple(session_ids)}
)
cur.execute(query=query)
await cur.execute(query=query)
def __delete_session_mobs_by_session_ids(session_ids, project_id):
@ -134,31 +134,31 @@ def __delete_session_mobs_by_session_ids(session_ids, project_id):
sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id)
def get_scheduled_jobs():
with pg_client.PostgresClient() as cur:
async def get_scheduled_jobs():
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""SELECT *
FROM public.jobs
WHERE status = %(status)s
AND start_at <= (now() at time zone 'utc');""",
{"status": JobStatus.SCHEDULED})
cur.execute(query=query)
data = cur.fetchall()
await cur.execute(query=query)
data = await cur.fetchall()
return helper.list_to_camel_case(data)
def execute_jobs():
jobs = get_scheduled_jobs()
async def execute_jobs():
jobs = await get_scheduled_jobs()
for job in jobs:
print(f"Executing jobId:{job['jobId']}")
try:
if job["action"] == Actions.DELETE_USER_DATA:
session_ids = __get_session_ids_by_user_ids(project_id=job["projectId"],
session_ids = await __get_session_ids_by_user_ids(project_id=job["projectId"],
user_ids=[job["referenceId"]])
if len(session_ids) > 0:
print(f"Deleting {len(session_ids)} sessions")
__delete_sessions_by_session_ids(session_ids=session_ids)
__delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
await __delete_sessions_by_session_ids(session_ids=session_ids)
await __delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
else:
raise Exception(f"The action '{job['action']}' not supported.")
@ -169,4 +169,4 @@ def execute_jobs():
job["errors"] = str(e)
print(f"Job failed {job['jobId']}")
update(job["jobId"], job)
await update(job["jobId"], job)

View file

@ -1,13 +1,14 @@
from chalicelib.core import log_tools
import requests
import httpx
from schemas import schemas
IN_TY = "bugsnag"
def list_projects(auth_token):
r = requests.get(url="https://api.bugsnag.com/user/organizations",
async def list_projects(auth_token):
async with httpx.AsyncClient() as client:
r = await client.get(url="https://api.bugsnag.com/user/organizations",
params={"per_page": "100"},
headers={"Authorization": "token " + auth_token, "X-Version": "2"})
if r.status_code != 200:
@ -19,8 +20,8 @@ def list_projects(auth_token):
orgs = []
for i in r.json():
pr = requests.get(url="https://api.bugsnag.com/organizations/%s/projects" % i["id"],
async with httpx.AsyncClient() as client:
pr = await client.get(url="https://api.bugsnag.com/organizations/%s/projects" % i["id"],
params={"per_page": "100"},
headers={"Authorization": "token " + auth_token, "X-Version": "2"})
if pr.status_code != 200:
@ -33,43 +34,43 @@ def list_projects(auth_token):
return orgs
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "authorizationToken" in changes:
options["authorizationToken"] = changes.pop("authorizationToken")
if "bugsnagProjectId" in changes:
options["bugsnagProjectId"] = changes.pop("bugsnagProjectId")
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, authorization_token, bugsnag_project_id):
async def add(tenant_id, project_id, authorization_token, bugsnag_project_id):
options = {
"bugsnagProjectId": bugsnag_project_id,
"authorizationToken": authorization_token,
}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ):
s = get(project_id)
async def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"authorizationToken": data.authorization_token,
"bugsnagProjectId": data.bugsnag_project_id})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
authorization_token=data.authorization_token,
bugsnag_project_id=data.bugsnag_project_id)

View file

@ -69,15 +69,15 @@ def list_log_groups(aws_access_key_id, aws_secret_access_key, region):
return __find_groups(logs, None)
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "authorization_token" in changes:
options["authorization_token"] = changes.pop("authorization_token")
@ -85,34 +85,34 @@ def update(tenant_id, project_id, changes):
options["project_id"] = changes.pop("project_id")
if len(options.keys()) > 0:
changes["options"] = options
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
def add(tenant_id, project_id, aws_access_key_id, aws_secret_access_key, log_group_name, region):
return log_tools.add(project_id=project_id, integration=IN_TY,
async def add(tenant_id, project_id, aws_access_key_id, aws_secret_access_key, log_group_name, region):
return await log_tools.add(project_id=project_id, integration=IN_TY,
options={"awsAccessKeyId": aws_access_key_id,
"awsSecretAccessKey": aws_secret_access_key,
"logGroupName": log_group_name, "region": region})
def save_new_token(project_id, token):
update(tenant_id=None, project_id=project_id, changes={"last_token": token})
async def save_new_token(project_id, token):
await update(tenant_id=None, project_id=project_id, changes={"last_token": token})
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"awsAccessKeyId": data.aws_access_key_id,
"awsSecretAccessKey": data.aws_secret_access_key,
"logGroupName": data.log_group_name,
"region": data.region})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
aws_access_key_id=data.aws_access_key_id,
aws_secret_access_key=data.aws_secret_access_key,

View file

@ -4,41 +4,41 @@ from schemas import schemas
IN_TY = "datadog"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "apiKey" in changes:
options["apiKey"] = changes["apiKey"]
if "applicationKey" in changes:
options["applicationKey"] = changes["applicationKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, api_key, application_key):
async def add(tenant_id, project_id, api_key, application_key):
options = {"apiKey": api_key, "applicationKey": application_key}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"apiKey": data.api_key,
"applicationKey": data.application_key})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
api_key=data.api_key,
application_key=data.application_key)

View file

@ -9,15 +9,15 @@ logging.getLogger('elasticsearch').level = logging.ERROR
IN_TY = "elasticsearch"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "host" in changes:
@ -31,28 +31,28 @@ def update(tenant_id, project_id, changes):
if "port" in changes:
options["port"] = changes["port"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, host, api_key_id, api_key, indexes, port):
async def add(tenant_id, project_id, host, api_key_id, api_key, indexes, port):
options = {
"host": host, "apiKeyId": api_key_id, "apiKey": api_key, "indexes": indexes, "port": port
}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"host": data.host, "apiKeyId": data.api_key_id, "apiKey": data.api_key,
"indexes": data.indexes, "port": data.port})
else:
return add(tenant_id=tenant_id, project_id=project_id,
return await add(tenant_id=tenant_id, project_id=project_id,
host=data.host, api_key=data.api_key, api_key_id=data.api_key_id,
indexes=data.indexes, port=data.port)

View file

@ -4,15 +4,15 @@ from schemas import schemas
IN_TY = "newrelic"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "region" in changes:
options["region"] = changes["region"]
@ -21,28 +21,28 @@ def update(tenant_id, project_id, changes):
if "xQueryKey" in changes:
options["xQueryKey"] = changes["xQueryKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, application_id, x_query_key, region):
async def add(tenant_id, project_id, application_id, x_query_key, region):
# region=False => US; region=True => EU
options = {"applicationId": application_id, "xQueryKey": x_query_key, "region": region}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"applicationId": data.application_id,
"xQueryKey": data.x_query_key,
"region": data.region})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
application_id=data.application_id,
x_query_key=data.x_query_key,

View file

@ -4,36 +4,36 @@ from schemas import schemas
IN_TY = "rollbar"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "accessToken" in changes:
options["accessToken"] = changes["accessToken"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, access_token):
async def add(tenant_id, project_id, access_token):
options = {"accessToken": access_token}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"accessToken": data.access_token})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
access_token=data.access_token)

View file

@ -1,19 +1,20 @@
import requests
import httpx
from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "sentry"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "organizationSlug" in changes:
options["organizationSlug"] = changes["organizationSlug"]
@ -22,40 +23,41 @@ def update(tenant_id, project_id, changes):
if "token" in changes:
options["token"] = changes["token"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
def add(tenant_id, project_id, project_slug, organization_slug, token):
async def add(tenant_id, project_id, project_slug, organization_slug, token):
options = {
"organizationSlug": organization_slug, "projectSlug": project_slug, "token": token
}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"projectSlug": data.project_slug,
"organizationSlug": data.organization_slug,
"token": data.token})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
project_slug=data.project_slug,
organization_slug=data.organization_slug,
token=data.token)
def proxy_get(tenant_id, project_id, event_id):
i = get(project_id)
async def proxy_get(tenant_id, project_id, event_id):
i = await get(project_id)
if i is None:
return {}
r = requests.get(
async with httpx.AsyncClient() as client:
r = await client.get(
url="https://sentry.io/api/0/projects/%(organization_slug)s/%(project_slug)s/events/%(event_id)s/" % {
"organization_slug": i["organizationSlug"], "project_slug": i["projectSlug"], "event_id": event_id},
headers={"Authorization": "Bearer " + i["token"]})

View file

@ -4,39 +4,39 @@ from schemas import schemas
IN_TY = "stackdriver"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "serviceAccountCredentials" in changes:
options["serviceAccountCredentials"] = changes["serviceAccountCredentials"]
if "logName" in changes:
options["logName"] = changes["logName"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, service_account_credentials, log_name):
async def add(tenant_id, project_id, service_account_credentials, log_name):
options = {"serviceAccountCredentials": service_account_credentials, "logName": log_name}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"serviceAccountCredentials": data.service_account_credentials,
"logName": data.log_name})
else:
return add(tenant_id=tenant_id, project_id=project_id,
return await add(tenant_id=tenant_id, project_id=project_id,
service_account_credentials=data.service_account_credentials,
log_name=data.log_name)

View file

@ -4,15 +4,15 @@ from schemas import schemas
IN_TY = "sumologic"
def get_all(tenant_id):
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
async def get_all(tenant_id):
return await log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
def get(project_id):
return log_tools.get(project_id=project_id, integration=IN_TY)
async def get(project_id):
return await log_tools.get(project_id=project_id, integration=IN_TY)
def update(tenant_id, project_id, changes):
async def update(tenant_id, project_id, changes):
options = {}
if "region" in changes:
@ -23,31 +23,31 @@ def update(tenant_id, project_id, changes):
if "accessKey" in changes:
options["accessKey"] = changes["accessKey"]
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
return await log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
def add(tenant_id, project_id, access_id, access_key, region):
async def add(tenant_id, project_id, access_id, access_key, region):
options = {
"accessId": access_id,
"accessKey": access_key,
"region": region
}
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
return await log_tools.add(project_id=project_id, integration=IN_TY, options=options)
def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY)
async def delete(tenant_id, project_id):
return await log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema):
s = get(project_id)
async def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema):
s = await get(project_id)
if s is not None:
return update(tenant_id=tenant_id, project_id=project_id,
return await update(tenant_id=tenant_id, project_id=project_id,
changes={"accessId": data.access_id,
"accessKey": data.access_key,
"region": data.region})
else:
return add(tenant_id=tenant_id,
return await add(tenant_id=tenant_id,
project_id=project_id,
access_id=data.access_id,
access_key=data.access_key,

View file

@ -4,10 +4,10 @@ import json
EXCEPT = ["jira_server", "jira_cloud"]
def search(project_id):
async def search(project_id):
result = []
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""\
SELECT supported_integrations.name,
@ -21,17 +21,17 @@ def search(project_id):
FROM unnest(enum_range(NULL::integration_provider)) AS supported_integrations(name);""",
{"project_id": project_id})
)
r = cur.fetchall()
r = await cur.fetchall()
for k in r:
if k["count"] > 0 and k["name"] not in EXCEPT:
result.append({"value": helper.key_to_camel_case(k["name"]), "type": "logTool"})
return {"data": result}
def add(project_id, integration, options):
async def add(project_id, integration, options):
options = json.dumps(options)
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""\
INSERT INTO public.integrations(project_id, provider, options)
@ -39,13 +39,13 @@ def add(project_id, integration, options):
RETURNING *;""",
{"project_id": project_id, "provider": integration, "options": options})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get(project_id, integration):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(project_id, integration):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""\
SELECT integrations.*
@ -56,13 +56,13 @@ def get(project_id, integration):
LIMIT 1;""",
{"project_id": project_id, "provider": integration})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get_all_by_type(integration):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_all_by_type(integration):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""\
SELECT integrations.*
@ -70,19 +70,19 @@ def get_all_by_type(integration):
WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""",
{"provider": integration})
)
r = cur.fetchall()
r = await cur.fetchall()
return helper.list_to_camel_case(r, flatten=True)
def edit(project_id, integration, changes):
async def edit(project_id, integration, changes):
if "projectId" in changes:
changes.pop("project_id")
if "integration" in changes:
changes.pop("integration")
if len(changes.keys()) == 0:
return None
with pg_client.PostgresClient() as cur:
cur.execute(
with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
UPDATE public.integrations
SET options=options||%(changes)s
@ -90,12 +90,12 @@ def edit(project_id, integration, changes):
RETURNING *;""",
{"project_id": project_id, "provider": integration, "changes": json.dumps(changes)})
)
return helper.dict_to_camel_case(helper.flatten_nested_dicts(cur.fetchone()))
return helper.dict_to_camel_case(helper.flatten_nested_dicts(await cur.fetchone()))
def delete(project_id, integration):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(project_id, integration):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
DELETE FROM public.integrations
WHERE project_id=%(project_id)s AND provider=%(provider)s;""",
@ -104,9 +104,9 @@ def delete(project_id, integration):
return {"state": "success"}
def get_all_by_tenant(tenant_id, integration):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_all_by_tenant(tenant_id, integration):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT integrations.*
FROM public.integrations INNER JOIN public.projects USING(project_id)
@ -114,5 +114,5 @@ def get_all_by_tenant(tenant_id, integration):
AND projects.deleted_at ISNULL;""",
{"provider": integration})
)
r = cur.fetchall()
r = await cur.fetchall()
return helper.list_to_camel_case(r, flatten=True)

View file

@ -13,8 +13,8 @@ def column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
async def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool:
async with pg_client.cursor() as cur:
constraints = column_names()
if exclude_index:
del constraints[exclude_index - 1]
@ -26,21 +26,21 @@ def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -
AND deleted_at ISNULL
AND ({" OR ".join(constraints)})) AS exists;""",
{"project_id": project_id, "name": name})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
return row["exists"]
def get(project_id):
with pg_client.PostgresClient() as cur:
async def get(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query)
metas = cur.fetchone()
await cur.execute(query=query)
metas = await cur.fetchone()
results = []
if metas is not None:
for i, k in enumerate(metas.keys()):
@ -49,17 +49,17 @@ def get(project_id):
return results
def get_batch(project_ids):
async def get_batch(project_ids):
if project_ids is None or len(project_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT project_id, {",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query=query)
full_metas = cur.fetchall()
await cur.execute(query=query)
full_metas = await cur.fetchall()
results = {}
if full_metas is not None and len(full_metas) > 0:
for metas in full_metas:
@ -80,7 +80,7 @@ def index_to_colname(index):
def __get_available_index(project_id):
used_indexs = get(project_id)
used_indexs = await get(project_id)
used_indexs = [i["index"] for i in used_indexs]
if len(used_indexs) >= MAX_INDEXES:
return -1
@ -90,15 +90,15 @@ def __get_available_index(project_id):
return i
def __edit(project_id, col_index, colname, new_name):
async def __edit(project_id, col_index, colname, new_name):
if new_name is None or len(new_name) == 0:
return {"errors": ["key value invalid"]}
old_metas = get(project_id)
old_metas = await get(project_id)
old_metas = {k["index"]: k for k in old_metas}
if col_index not in list(old_metas.keys()):
return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if old_metas[col_index]["key"] != new_name:
query = cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
@ -106,76 +106,76 @@ def __edit(project_id, col_index, colname, new_name):
AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name})
cur.execute(query=query)
new_name = cur.fetchone()[colname]
await cur.execute(query=query)
new_name = await cur.fetchone()[colname]
old_metas[col_index]["key"] = new_name
return {"data": old_metas[col_index]}
def edit(tenant_id, project_id, index: int, new_name: str):
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=index):
async def edit(tenant_id, project_id, index: int, new_name: str):
if await __exists_by_name(project_id=project_id, name=new_name, exclude_index=index):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
return await __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
def delete(tenant_id, project_id, index: int):
async def delete(tenant_id, project_id, index: int):
index = int(index)
old_segments = get(project_id)
old_segments = await get(project_id)
old_segments = [k["index"] for k in old_segments]
if index not in old_segments:
return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
colname = index_to_colname(index)
query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= NULL
WHERE project_id = %(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
await cur.execute(query=query)
query = cur.mogrify(f"""UPDATE public.sessions
SET {colname}= NULL
WHERE project_id = %(project_id)s
AND {colname} IS NOT NULL
""",
{"project_id": project_id})
cur.execute(query=query)
await cur.execute(query=query)
return {"data": get(project_id)}
return {"data": await get(project_id)}
def add(tenant_id, project_id, new_name):
async def add(tenant_id, project_id, new_name):
index = __get_available_index(project_id=project_id)
if index < 1:
return {"errors": ["maximum allowed metadata reached"]}
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
colname = index_to_colname(index)
query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= %(key)s
WHERE project_id =%(project_id)s
RETURNING {colname};""",
{"key": new_name, "project_id": project_id})
cur.execute(query=query)
col_val = cur.fetchone()[colname]
await cur.execute(query=query)
col_val = await cur.fetchone()[colname]
return {"data": {"key": col_val, "index": index}}
def search(tenant_id, project_id, key, value):
async def search(tenant_id, project_id, key, value):
value = value + "%"
s_query = []
for f in column_names():
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(s_query)}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
cur.execute(query=query)
all_metas = cur.fetchone()
await cur.execute(query=query)
all_metas = await cur.fetchone()
key = None
for c in all_metas:
if all_metas[c]:
@ -189,29 +189,29 @@ def search(tenant_id, project_id, key, value):
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
cur.execute(query=query)
value = cur.fetchall()
await cur.execute(query=query)
value = await cur.fetchall()
return {"data": [k[key] for k in value]}
def get_available_keys(project_id):
all_metas = get(project_id=project_id)
async def get_available_keys(project_id):
all_metas = await get(project_id=project_id)
return [k["key"] for k in all_metas]
def get_by_session_id(project_id, session_id):
all_metas = get(project_id=project_id)
async def get_by_session_id(project_id, session_id):
all_metas = await get(project_id=project_id)
if len(all_metas) == 0:
return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s
AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
cur.execute(query=query)
session_metas = cur.fetchall()
await cur.execute(query=query)
session_metas = await cur.fetchall()
results = []
for m in session_metas:
r = {}
@ -221,18 +221,18 @@ def get_by_session_id(project_id, session_id):
return results
def get_keys_by_projects(project_ids):
async def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0:
return {}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT project_id,{",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
results = {}
for r in rows:
project_id = r.pop("project_id")
@ -270,7 +270,7 @@ def get_keys_by_projects(project_ids):
# return {"errors": ["duplicate keys"]}
# to_delete = list(set(old_indexes) - set(new_indexes))
#
# with pg_client.PostgresClient() as cur:
# async with pg_client.cursor() as cur:
# for d in to_delete:
# delete(tenant_id=tenant_id, project_id=project_id, index=d)
#
@ -284,10 +284,10 @@ def get_keys_by_projects(project_ids):
# return {"data": get(project_id)}
def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id)
async def get_remaining_metadata_with_count(tenant_id):
all_projects = await projects.get_projects(tenant_id=tenant_id)
results = []
used_metas = get_batch([p["projectId"] for p in all_projects])
used_metas = await get_batch([p["projectId"] for p in all_projects])
for p in all_projects:
if MAX_INDEXES < 0:
remaining = -1

File diff suppressed because it is too large Load diff

View file

@ -3,9 +3,9 @@ from chalicelib.utils.storage import StorageClient
from decouple import config
def sign_keys(project_id, session_id, keys):
async def sign_keys(project_id, session_id, keys):
result = []
project_key = projects.get_project_key(project_id)
project_key = await projects.get_project_key(project_id)
for k in keys:
result.append(StorageClient.get_presigned_url_for_sharing(bucket=config("iosBucket"),
key=f"{project_key}/{session_id}/{k}",

View file

@ -4,9 +4,9 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def get_all(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_all(tenant_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
SELECT notifications.*,
user_viewed_notifications.notification_id NOTNULL AS viewed
@ -19,15 +19,15 @@ def get_all(tenant_id, user_id):
LIMIT 100;""",
{"user_id": user_id})
)
rows = helper.list_to_camel_case(cur.fetchall())
rows = helper.list_to_camel_case(await cur.fetchall())
for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
return rows
def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_all_count(tenant_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
SELECT COALESCE(COUNT(notifications.*),0) AS count
FROM public.notifications
@ -37,19 +37,19 @@ def get_all_count(tenant_id, user_id):
WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"user_id": user_id})
)
row = cur.fetchone()
row = await cur.fetchone()
return row
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
async def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if len(notification_ids) == 0 and endTimestamp is None:
return False
if startTimestamp is None:
startTimestamp = 0
notification_ids = [(user_id, id) for id in notification_ids]
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if len(notification_ids) > 0:
cur.executemany(
await cur.executemany(
"INSERT INTO public.user_viewed_notifications(user_id, notification_id) VALUES (%s,%s) ON CONFLICT DO NOTHING;",
notification_ids)
else:
@ -64,14 +64,14 @@ def view_notification(user_id, notification_ids=[], tenant_id=None, startTimesta
"endTimestamp": endTimestamp}
# print('-------------------')
# print(cur.mogrify(query, params))
cur.execute(cur.mogrify(query, params))
await cur.execute(cur.mogrify(query, params))
return True
def create(notifications):
async def create(notifications):
if len(notifications) == 0:
return []
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
values = []
for n in notifications:
clone = dict(n)
@ -86,10 +86,10 @@ def create(notifications):
"(%(userId)s, %(title)s, %(description)s, %(buttonText)s, %(buttonUrl)s, %(imageUrl)s,%(options)s)",
clone).decode('UTF-8')
)
cur.execute(
await cur.execute(
f"""INSERT INTO public.notifications(user_id, title, description, button_text, button_url, image_url, options)
VALUES {",".join(values)} RETURNING *;""")
rows = helper.list_to_camel_case(cur.fetchall())
rows = helper.list_to_camel_case(await cur.fetchall())
for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
r["viewed"] = False

View file

@ -377,7 +377,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
avg_time_from_previous
FROM n{i})""")
with pg_client.PostgresClient() as cur:
with pg_client.cursor() as cur:
pg_query = f"""\
WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
FROM public.sessions {" ".join(start_join)}
@ -515,7 +515,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# time_constraint=True)
# pg_sub_query.append("user_id IS NOT NULL")
# pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)")
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week,
# COUNT(DISTINCT connexions_list.user_id) AS users_count,
# ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
@ -562,7 +562,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
# time_constraint=True)
# pg_sub_query.append("user_id IS NOT NULL")
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
# FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
# COUNT(DISTINCT connexions_list.user_id) AS users_count,
@ -628,7 +628,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# event_column = JOURNEY_TYPES[event_type]["column"]
# pg_sub_query.append(f"feature.{event_column} = %(value)s")
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# if default:
# # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -718,7 +718,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
#
# pg_sub_query.append(f"feature.{event_column} = %(value)s")
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# if default:
# # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -800,7 +800,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"]
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
# FROM sessions
# WHERE {" AND ".join(pg_sub_query)}
@ -866,7 +866,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
# FROM sessions
# WHERE {" AND ".join(pg_sub_query)}
@ -935,7 +935,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
# pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
# if default:
@ -995,7 +995,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# extra_values["user_id"] = f["value"]
# event_table = JOURNEY_TYPES[event_type]["table"]
# event_column = JOURNEY_TYPES[event_type]["column"]
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s")
# pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s")
# pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
@ -1056,7 +1056,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"]
# pg_sub_query.append(f"length({event_column})>2")
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg
# FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
# WHERE {" AND ".join(pg_sub_query)}
@ -1090,7 +1090,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s")
# extra_values["user_id"] = f["value"]
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart
# FROM (SELECT generated_timestamp AS timestamp,
# COALESCE(COUNT(users), 0) AS count
@ -1120,7 +1120,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
# pg_sub_query.append("user_id IS NOT NULL")
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition
# FROM (SELECT number_of_days, COUNT(user_id) AS count
# FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days
@ -1163,7 +1163,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# event_column = JOURNEY_TYPES[event_type]["column"]
# pg_sub_query.append(f"feature.{event_column} = %(value)s")
#
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# if default:
# # get most used value
# pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
@ -1218,7 +1218,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# "value": helper.string_to_sql_like(text.lower()),
# "platform_0": platform}
# if feature_type == "ALL":
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# sub_queries = []
# for e in JOURNEY_TYPES:
# sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type"
@ -1230,7 +1230,7 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
# cur.execute(cur.mogrify(pg_query, params))
# rows = cur.fetchall()
# elif JOURNEY_TYPES.get(feature_type) is not None:
# with pg_client.PostgresClient() as cur:
# with pg_client.cursor() as cur:
# pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type"
# FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id)
# WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s

View file

@ -10,8 +10,8 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
async def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE deleted_at IS NULL
@ -19,42 +19,42 @@ def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
{"AND project_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
return row["exists"]
def __update(tenant_id, project_id, changes):
async def __update(tenant_id, project_id, changes):
if len(changes.keys()) == 0:
return None
sub_query = []
for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def __create(tenant_id, data):
with pg_client.PostgresClient() as cur:
async def __create(tenant_id, data):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active)
VALUES (%(name)s,%(platform)s,TRUE)
RETURNING project_id;""",
data)
cur.execute(query=query)
project_id = cur.fetchone()["project_id"]
await cur.execute(query=query)
project_id = await cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
with pg_client.PostgresClient() as cur:
async def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
async with pg_client.cursor() as cur:
extra_projection = ""
if gdpr:
extra_projection += ',s.gdpr'
@ -77,8 +77,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
WHERE s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""",
{"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4})
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
# if recorded is requested, check if it was saved or computed
if recorded:
u_values = []
@ -100,7 +100,7 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded)
WHERE projects.project_id=u.project_id;""", params)
cur.execute(query)
await cur.execute(query)
else:
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
@ -109,8 +109,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
return helper.list_to_camel_case(rows)
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
async def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
async with pg_client.cursor() as cur:
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
@ -129,12 +129,12 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
async def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not skip_authorization:
@ -144,7 +144,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
return {"data": __create(tenant_id=tenant_id, data=data.model_dump())}
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
async def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if __exists_by_name(name=data.name, exclude_id=project_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
admin = users.get(user_id=user_id, tenant_id=tenant_id)
@ -154,44 +154,44 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
changes=data.model_dump())}
def delete(tenant_id, user_id, project_id):
async def delete(tenant_id, user_id, project_id):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE project_id = %(project_id)s;""",
{"project_id": project_id})
cur.execute(query=query)
await cur.execute(query=query)
return {"data": {"state": "success"}}
def get_gdpr(project_id):
with pg_client.PostgresClient() as cur:
async def get_gdpr(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()["gdpr"]
await cur.execute(query=query)
row = await cur.fetchone()["gdpr"]
row["projectId"] = project_id
return row
def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
with pg_client.PostgresClient() as cur:
async def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s::jsonb
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump())})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
if not row:
return {"errors": ["something went wrong"]}
row = row["gdpr"]
@ -199,8 +199,8 @@ def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
return row
def get_by_project_key(project_key):
with pg_client.PostgresClient() as cur:
async def get_by_project_key(project_key):
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT project_id,
project_key,
platform,
@ -209,51 +209,51 @@ def get_by_project_key(project_key):
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
return helper.dict_to_camel_case(row)
def get_project_key(project_id):
with pg_client.PostgresClient() as cur:
async def get_project_key(project_id):
async async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT project_key
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
project = cur.fetchone()
await cur.execute(query=query)
project = await cur.fetchone()
return project["project_key"] if project is not None else None
def get_capture_status(project_id):
with pg_client.PostgresClient() as cur:
async def get_capture_status(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def update_capture_status(project_id, changes: schemas.SampleRateSchema):
async def update_capture_status(project_id, changes: schemas.SampleRateSchema):
sample_rate = changes.rate
if changes.capture_all:
sample_rate = 100
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query)
await cur.execute(query=query)
return changes
def get_conditions(project_id):
with pg_client.PostgresClient() as cur:
async def get_conditions(project_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT p.sample_rate AS rate, p.conditional_capture,
COALESCE(
array_agg(
@ -275,15 +275,15 @@ def get_conditions(project_id):
AND p.deleted_at IS NULL
GROUP BY p.sample_rate, p.conditional_capture;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
row = helper.dict_to_camel_case(row)
row["conditions"] = [schemas.ProjectConditions(**c) for c in row["conditions"]]
return row
def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str]:
async def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str]:
errors = []
names = [condition.name for condition in conditions]
@ -300,7 +300,7 @@ def validate_conditions(conditions: List[schemas.ProjectConditions]) -> List[str
return errors
def update_conditions(project_id, changes: schemas.ProjectSettings):
async def update_conditions(project_id, changes: schemas.ProjectSettings):
validation_errors = validate_conditions(changes.conditions)
if validation_errors:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=validation_errors)
@ -309,7 +309,7 @@ def update_conditions(project_id, changes: schemas.ProjectSettings):
for condition in changes.conditions:
conditions.append(condition.model_dump())
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""UPDATE public.projects
SET
sample_rate= %(sample_rate)s,
@ -321,12 +321,12 @@ def update_conditions(project_id, changes: schemas.ProjectSettings):
"sample_rate": changes.rate,
"conditional_capture": changes.conditional_capture
})
cur.execute(query=query)
await cur.execute(query=query)
return update_project_conditions(project_id, changes.conditions)
def create_project_conditions(project_id, conditions):
async def create_project_conditions(project_id, conditions):
rows = []
# insert all conditions rows with single sql query
@ -345,18 +345,18 @@ def create_project_conditions(project_id, conditions):
RETURNING condition_id, {", ".join(columns)}
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
params = [
(project_id, c.name, c.capture_rate, json.dumps([filter_.model_dump() for filter_ in c.filters]))
for c in conditions]
query = cur.mogrify(sql, params)
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return rows
def update_project_condition(project_id, conditions):
async def update_project_condition(project_id, conditions):
values = []
params = {
"project_id": project_id,
@ -375,21 +375,21 @@ def update_project_condition(project_id, conditions):
WHERE c.condition_id = projects_conditions.condition_id AND project_id = %(project_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, params)
cur.execute(query)
await cur.execute(query)
def delete_project_condition(project_id, ids):
async def delete_project_condition(project_id, ids):
sql = """
DELETE FROM projects_conditions
WHERE condition_id IN %(ids)s
AND project_id= %(project_id)s;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(sql, {"project_id": project_id, "ids": tuple(ids)})
cur.execute(query)
await cur.execute(query)
def update_project_conditions(project_id, conditions):
@ -416,12 +416,12 @@ def update_project_conditions(project_id, conditions):
return get_conditions(project_id)
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
async def get_projects_ids(tenant_id):
async with pg_client.cursor() as cur:
query = f"""SELECT s.project_id
FROM public.projects AS s
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;"""
cur.execute(query=query)
rows = cur.fetchall()
await cur.execute(query=query)
rows = await cur.fetchall()
return [r["project_id"] for r in rows]

View file

@ -3,14 +3,14 @@ from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper, smtp
def reset(data: schemas.ForgetPasswordPayloadSchema):
async def reset(data: schemas.ForgetPasswordPayloadSchema):
print(f"====================== reset password {data.email}")
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}
if not smtp.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_users = users.get_by_email_only(data.email)
a_users = await users.get_by_email_only(data.email)
if a_users:
invitation_link = users.generate_new_invitation(user_id=a_users["userId"])
email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link)

View file

@ -2,8 +2,8 @@ from chalicelib.utils import helper, pg_client
from decouple import config
def get_by_session_id(session_id, project_id, start_ts, duration):
with pg_client.PostgresClient() as cur:
async def get_by_session_id(session_id, project_id, start_ts, duration):
async with pg_client.cursor() as cur:
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
duration = 0
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
@ -27,6 +27,6 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
AND resources.timestamp<=%(res_end_ts)s;"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
cur.execute(cur.mogrify(ch_query, params))
rows = cur.fetchall()
await cur.execute(cur.mogrify(ch_query, params))
rows = await cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -5,26 +5,26 @@ from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def create(project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur:
async def create(project_id, user_id, data: schemas.SavedSearchSchema):
async with pg_client.cursor() as cur:
data = data.model_dump()
data["filter"] = json.dumps(data["filter"])
query = cur.mogrify("""\
INSERT INTO public.searches (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""", {"user_id": user_id, "project_id": project_id, **data})
cur.execute(
await cur.execute(
query
)
r = cur.fetchone()
r = await cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
r = helper.dict_to_camel_case(r)
return {"data": r}
def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur:
async def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
async with pg_client.cursor() as cur:
data = data.model_dump()
data["filter"] = json.dumps(data["filter"])
query = cur.mogrify(f"""\
@ -36,19 +36,19 @@ def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
AND project_id= %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"search_id": search_id, "project_id": project_id, "user_id": user_id, **data})
cur.execute(
await cur.execute(
query
)
r = cur.fetchone()
r = await cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
r = helper.dict_to_camel_case(r)
return r
def get_all(project_id, user_id, details=False):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_all(project_id, user_id, details=False):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""\
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
@ -61,7 +61,7 @@ def get_all(project_id, user_id, details=False):
)
)
rows = cur.fetchall()
rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
@ -72,9 +72,9 @@ def get_all(project_id, user_id, details=False):
return rows
def delete(project_id, search_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(project_id, search_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
UPDATE public.searches
SET deleted_at = timezone('utc'::text, now())
@ -87,9 +87,9 @@ def delete(project_id, search_id, user_id):
return {"state": "success"}
def get(search_id, project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(search_id, project_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT
*
@ -102,7 +102,7 @@ def get(search_id, project_id, user_id):
)
)
f = helper.dict_to_camel_case(cur.fetchone())
f = helper.dict_to_camel_case(await cur.fetchone())
if f is None:
return None

View file

@ -39,7 +39,7 @@ COALESCE((SELECT TRUE
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
async def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
platform="web"):
if data.bookmarked:
@ -58,7 +58,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args["sessions_limit_e"] = 200
meta_keys = []
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if errors_only:
main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id,
COALESCE((SELECT TRUE
@ -132,8 +132,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.debug(main_query)
logging.debug("--------------------")
try:
cur.execute(main_query)
sessions = cur.fetchone()
await cur.execute(main_query)
sessions = await cur.fetchone()
except Exception as err:
logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query.decode('UTF-8'))
@ -142,7 +142,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.warning("--------------------")
raise err
if errors_only or ids_only:
return helper.list_to_camel_case(cur.fetchall())
return helper.list_to_camel_case(await cur.fetchall())
if count_only:
return helper.dict_to_camel_case(sessions)
@ -170,7 +170,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
# TODO: remove "table of" search from this function
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
async def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
@ -186,7 +186,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
@ -208,7 +208,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.debug(main_query)
logging.debug("--------------------")
try:
cur.execute(main_query)
await cur.execute(main_query)
except Exception as err:
logging.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
logging.warning(main_query.decode('UTF-8'))
@ -217,9 +217,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.warning("--------------------")
raise err
if view_type == schemas.MetricTimeseriesViewType.line_chart:
sessions = cur.fetchall()
sessions = await cur.fetchall()
else:
sessions = cur.fetchone()["count"]
sessions = await cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table:
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
@ -269,15 +269,15 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(main_query)
sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]:
s.pop("rn")
return sessions
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
async def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
@ -291,7 +291,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = ""
@ -340,20 +340,20 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(main_query)
sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]:
s.pop("rn")
return sessions
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
async def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
full_args["issues_limit"] = data.limit
full_args["issues_limit_s"] = (data.page - 1) * data.limit
full_args["issues_limit_e"] = data.page * data.limit
@ -377,8 +377,8 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema,
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(main_query)
sessions = helper.dict_to_camel_case(await cur.fetchone())
for s in sessions["values"]:
s.pop("rn")
@ -1095,7 +1095,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
async def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
if project_id is None:
all_projects = projects.get_projects(tenant_id=tenant_id)
else:
@ -1117,7 +1117,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
project_ids = list(available_keys.keys())
if len(project_ids) > 0:
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
sub_queries = []
for i in project_ids:
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
@ -1125,9 +1125,9 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
{"id": i, "value": m_value}).decode('UTF-8'))
query = f"""SELECT {", ".join(sub_queries)};"""
cur.execute(query=query)
await cur.execute(query=query)
rows = cur.fetchone()
rows = await cur.fetchone()
sub_queries = []
for i in rows.keys():
@ -1151,15 +1151,15 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
)""",
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
if len(sub_queries) > 0:
cur.execute("\nUNION\n".join(sub_queries))
rows = cur.fetchall()
await cur.execute("\nUNION\n".join(sub_queries))
rows = await cur.fetchall()
for i in rows:
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
return results
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
async def get_user_sessions(project_id, user_id, start_date, end_date):
async with pg_client.cursor() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
if start_date is not None:
constraints.append("s.start_ts >= %(startDate)s")
@ -1170,7 +1170,7 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
FROM public.sessions AS s
WHERE {" AND ".join(constraints)}"""
cur.execute(cur.mogrify(f"""\
await cur.execute(cur.mogrify(f"""\
SELECT s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
@ -1193,12 +1193,12 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
"endDate": end_date
}))
sessions = cur.fetchall()
sessions = await cur.fetchall()
return helper.list_to_camel_case(sessions)
def get_session_user(project_id, user_id):
with pg_client.PostgresClient() as cur:
async def get_session_user(project_id, user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""\
SELECT
@ -1216,32 +1216,32 @@ def get_session_user(project_id, user_id):
""",
{"project_id": project_id, "userId": user_id}
)
cur.execute(query=query)
data = cur.fetchone()
await cur.execute(query=query)
data = await cur.fetchone()
return helper.dict_to_camel_case(data)
def count_all():
with pg_client.PostgresClient(unlimited_query=True) as cur:
cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
row = cur.fetchone()
async def count_all():
async with pg_client.cursor(unlimited_query=True) as cur:
await cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
row = await cur.fetchone()
return row.get("count", 0) if row else 0
def session_exists(project_id, session_id):
with pg_client.PostgresClient() as cur:
async def session_exists(project_id, session_id):
async with pg_client.cursor() as cur:
query = cur.mogrify("""SELECT 1
FROM public.sessions
WHERE session_id=%(session_id)s
AND project_id=%(project_id)s
LIMIT 1;""",
{"project_id": project_id, "session_id": session_id})
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return row is not None
def check_recording_status(project_id: int) -> dict:
async def check_recording_status(project_id: int) -> dict:
query = f"""
WITH project_sessions AS (SELECT COUNT(1) AS full_count,
COUNT(1) FILTER ( WHERE duration IS NOT NULL) AS nn_duration_count
@ -1258,10 +1258,10 @@ def check_recording_status(project_id: int) -> dict:
FROM project_sessions;
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, {"project_id": project_id})
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return {
"recordingStatus": row["recording_status"],
@ -1269,11 +1269,11 @@ def check_recording_status(project_id: int) -> dict:
}
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
async def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
ascending: bool = False) -> dict:
if session_ids is None or len(session_ids) == 0:
return {"total": 0, "sessions": []}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
meta_keys = metadata.get(project_id=project_id)
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
order_direction = 'ASC' if ascending else 'DESC'
@ -1284,8 +1284,8 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
AND session_id IN %(session_ids)s
ORDER BY {sort_by} {order_direction};""", params)
cur.execute(main_query)
rows = cur.fetchall()
await cur.execute(main_query)
rows = await cur.fetchall()
if len(meta_keys) > 0:
for s in rows:
s["metadata"] = {}

View file

@ -6,8 +6,8 @@ from chalicelib.core import integrations_manager, integration_base_issue
import json
def __get_saved_data(project_id, session_id, issue_id, tool):
with pg_client.PostgresClient() as cur:
async def __get_saved_data(project_id, session_id, issue_id, tool):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\
SELECT *
FROM public.assigned_sessions
@ -17,13 +17,13 @@ def __get_saved_data(project_id, session_id, issue_id, tool):
AND provider = %(provider)s;\
""",
{"session_id": session_id, "issue_id": issue_id, "provider": tool.lower()})
cur.execute(
await cur.execute(
query
)
return helper.dict_to_camel_case(cur.fetchone())
return helper.dict_to_camel_case(await cur.fetchone())
def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type,
async def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type,
integration_project_id):
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id)
if error is not None:
@ -43,7 +43,7 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne
return integration_base_issue.proxy_issues_handler(e)
if issue is None or "id" not in issue:
return {"errors": ["something went wrong while creating the issue"]}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""\
INSERT INTO public.assigned_sessions(session_id, issue_id, created_by, provider,provider_data)
VALUES (%(session_id)s, %(issue_id)s, %(creator_id)s, %(provider)s,%(provider_data)s);\
@ -51,14 +51,14 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne
{"session_id": session_id, "creator_id": creator_id,
"issue_id": issue["id"], "provider": integration.provider.lower(),
"provider_data": json.dumps({"integrationProjectId": integration_project_id})})
cur.execute(
await cur.execute(
query
)
issue["provider"] = integration.provider.lower()
return issue
def get_all(project_id, user_id):
async def get_all(project_id, user_id):
available_integrations = integrations_manager.get_available_integrations(user_id=user_id)
no_integration = not any(available_integrations.values())
if no_integration:
@ -67,7 +67,7 @@ def get_all(project_id, user_id):
extra_query = ["sessions.project_id = %(project_id)s"]
if not all_integrations:
extra_query.append("provider IN %(providers)s")
with pg_client.PostgresClient() as cur:
with pg_client.cursor() as cur:
query = cur.mogrify(f"""\
SELECT assigned_sessions.*
FROM public.assigned_sessions
@ -85,12 +85,12 @@ def get_all(project_id, user_id):
return assignments
def get_by_session(tenant_id, user_id, project_id, session_id):
async def get_by_session(tenant_id, user_id, project_id, session_id):
available_integrations = integrations_manager.get_available_integrations(user_id=user_id)
if not any(available_integrations.values()):
return []
extra_query = ["session_id = %(session_id)s", "provider IN %(providers)s"]
with pg_client.PostgresClient() as cur:
with pg_client.cursor() as cur:
query = cur.mogrify(f"""\
SELECT *
FROM public.assigned_sessions

View file

@ -1,4 +1,4 @@
from decouple import config
!from decouple import config
from chalicelib.utils.storage import StorageClient

View file

@ -2,24 +2,24 @@ import schemas
from chalicelib.utils import pg_client
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions(user_id, session_id)
VALUES (%(userId)s,%(session_id)s)
RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id})
)
row = cur.fetchone()
row = await cur.fetchone()
if row:
return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]}
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions
WHERE user_id = %(userId)s
@ -27,13 +27,13 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session
RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id})
)
row = cur.fetchone()
row = await cur.fetchone()
if row:
return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]}
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
async def favorite_session(context: schemas.CurrentContext, project_id, session_id):
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
return remove_favorite_session(context=context, project_id=project_id,
session_id=session_id)
@ -41,9 +41,9 @@ def favorite_session(context: schemas.CurrentContext, project_id, session_id):
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
def favorite_session_exists(session_id, user_id=None):
with pg_client.PostgresClient() as cur:
cur.execute(
async def favorite_session_exists(session_id, user_id=None):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT session_id
FROM public.user_favorite_sessions
@ -52,13 +52,13 @@ def favorite_session_exists(session_id, user_id=None):
{'AND user_id = %(userId)s' if user_id else ''};""",
{"userId": user_id, "session_id": session_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return r is not None
def get_start_end_timestamp(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_start_end_timestamp(project_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
@ -67,5 +67,5 @@ def get_start_end_timestamp(project_id, user_id):
AND project_id = %(project_id)s;""",
{"userId": user_id, "project_id": project_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])

View file

@ -10,8 +10,8 @@ from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur:
async def get_note(tenant_id, project_id, user_id, note_id, share=None):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id)
@ -22,16 +22,16 @@ def get_note(tenant_id, project_id, user_id, note_id, share=None):
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
"note_id": note_id, "share": share})
cur.execute(query=query)
row = cur.fetchone()
await cur.execute(query=query)
row = await cur.fetchone()
row = helper.dict_to_camel_case(row)
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
def get_session_notes(tenant_id, project_id, session_id, user_id):
with pg_client.PostgresClient() as cur:
async def get_session_notes(tenant_id, project_id, session_id, user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
@ -43,16 +43,16 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
{"project_id": project_id, "user_id": user_id,
"tenant_id": tenant_id, "session_id": session_id})
cur.execute(query=query)
rows = cur.fetchall()
await cur.execute(query=query)
rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
with pg_client.PostgresClient() as cur:
async def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
async with pg_client.cursor() as cur:
conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"]
extra_params = {}
if data.tags and len(data.tags) > 0:
@ -73,29 +73,29 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
cur.execute(query=query)
rows = cur.fetchall()
await cur.execute(query=query)
rows = await cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
with pg_client.PostgresClient() as cur:
async def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
**data.model_dump()})
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query)
result = helper.dict_to_camel_case(await cur.fetchone())
if result:
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
return result
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
async def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
sub_query = []
if data.message is not None:
sub_query.append("message = %(message)s")
@ -105,8 +105,8 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
sub_query.append("is_public = %(is_public)s")
if data.timestamp is not None:
sub_query.append("timestamp = %(timestamp)s")
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""UPDATE public.sessions_notes
SET
{" ,".join(sub_query)}
@ -118,16 +118,16 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
)
row = helper.dict_to_camel_case(cur.fetchone())
row = helper.dict_to_camel_case(await cur.fetchone())
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
return {"errors": ["Note not found"]}
def delete(tenant_id, user_id, project_id, note_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(tenant_id, user_id, project_id, note_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(""" UPDATE public.sessions_notes
SET deleted_at = timezone('utc'::text, now())
WHERE note_id = %(note_id)s
@ -139,7 +139,7 @@ def delete(tenant_id, user_id, project_id, note_id):
return {"data": {"state": "success"}}
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
async def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
@ -171,7 +171,7 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
)
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
async def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}

View file

@ -15,9 +15,9 @@ def __group_metadata(session, project_metadata):
# for backward compatibility
def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
async def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True):
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
extra_query = []
if include_fav_viewed:
extra_query.append("""COALESCE((SELECT TRUE
@ -41,9 +41,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
)
cur.execute(query=query)
await cur.execute(query=query)
data = cur.fetchone()
data = await cur.fetchone()
if data is not None:
data = helper.dict_to_camel_case(data)
if full_data:
@ -89,9 +89,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
return None
def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
async def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True):
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
extra_query = []
if include_fav_viewed:
extra_query.append("""COALESCE((SELECT TRUE
@ -115,9 +115,9 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
)
cur.execute(query=query)
await cur.execute(query=query)
data = cur.fetchone()
data = await cur.fetchone()
if data is not None:
data = helper.dict_to_camel_case(data)
if full_data:
@ -150,8 +150,8 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
return None
def get_events(project_id, session_id):
with pg_client.PostgresClient() as cur:
async def get_events(project_id, session_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(
f"""SELECT session_id, platform, start_ts, duration
FROM public.sessions AS s
@ -159,9 +159,9 @@ def get_events(project_id, session_id):
AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id}
)
cur.execute(query=query)
await cur.execute(query=query)
s_data = cur.fetchone()
s_data = await cur.fetchone()
if s_data is not None:
s_data = helper.dict_to_camel_case(s_data)
data = {}

View file

@ -1,9 +1,9 @@
from chalicelib.utils import pg_client
def view_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def view_session(project_id, user_id, session_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_sessions(user_id, session_id)
VALUES (%(userId)s,%(session_id)s)
ON CONFLICT DO NOTHING;""",

View file

@ -28,7 +28,7 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
30: 2.042}
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
async def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
"""
Add minimal timestamp
:param filter_d: dict contains events&filters&...
@ -232,14 +232,14 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
"endTimestamp": filter_d.endTimestamp,
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(n_stages_query, params)
logging.debug("---------------------------------------------------")
logging.debug(query)
logging.debug("---------------------------------------------------")
try:
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
except Exception as err:
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
logging.warning(query.decode('UTF-8'))

View file

@ -72,8 +72,8 @@ async def create_tenant(data: schemas.UserSignupSchema):
VALUES (%(projectName)s, TRUE)
RETURNING project_id, (SELECT api_key FROM t) AS api_key;"""
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(query, params))
with pg_client.cursor() as cur:
await cur.execute(cur.mogrify(query, params))
telemetry.new_client()
r = users.authenticate(email, password)

View file

@ -1,17 +1,17 @@
import requests
from decouple import config
from chalicelib.core import projects
def start_replay(project_id, session_id, device, os_version, mob_url):
r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={
"projectId": project_id,
"projectKey": projects.get_project_key(project_id),
"session_id": session_id,
"device": device,
"osVersion": os_version,
"mobUrl": mob_url
})
async with httpx.AsyncClient() as client:
r = await client.post(config("IOS_MIDDLEWARE") + "/replay", json={
"projectId": project_id,
"projectKey": projects.get_project_key(project_id),
"session_id": session_id,
"device": device,
"osVersion": os_version,
"mobUrl": mob_url
})
if r.status_code != 200:
print("failed replay middleware")
print("status code: %s" % r.status_code)

View file

@ -1,6 +1,6 @@
from urllib.parse import urlparse
import requests
import httpx
from decouple import config
from chalicelib.core import sourcemaps_parser
@ -67,9 +67,10 @@ def format_payload(p, truncate_to_first=False):
return []
def url_exists(url):
async def url_exists(url):
try:
r = requests.head(url, allow_redirects=False)
async with httpx.AsyncClient() as client:
r = await client.head(url, follow_redirects=False)
return r.status_code == 200 and "text/html" not in r.headers.get("Content-Type", "")
except Exception as e:
print(f"!! Issue checking if URL exists: {url}")

View file

@ -1,5 +1,4 @@
import requests
import httpx
from decouple import config
SMR_URL = config("sourcemaps_reader")
@ -20,14 +19,12 @@ def get_original_trace(key, positions, is_url=False):
"isURL": is_url
}
try:
r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
async with httpx.AsyncClient() as client:
r = await client.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
if r.status_code != 200:
print(f"Issue getting sourcemap status_code:{r.status_code}")
return None
return r.json()
except requests.exceptions.Timeout:
print("Timeout getting sourcemap")
return None
except Exception as e:
print("Issue getting sourcemap")
print(e)

View file

@ -3,7 +3,7 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client
def create_tag(project_id: int, data: schemas.TagCreate) -> int:
async def create_tag(project_id: int, data: schemas.TagCreate) -> int:
query = """
INSERT INTO public.tags (project_id, name, selector, ignore_click_rage, ignore_dead_click)
VALUES (%(project_id)s, %(name)s, %(selector)s, %(ignore_click_rage)s, %(ignore_dead_click)s)
@ -18,15 +18,15 @@ def create_tag(project_id: int, data: schemas.TagCreate) -> int:
'ignore_dead_click': data.ignoreDeadClick
}
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, data)
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return row['tag_id']
def list_tags(project_id: int):
async def list_tags(project_id: int):
query = """
SELECT tag_id, name, selector, ignore_click_rage, ignore_dead_click
FROM public.tags
@ -34,36 +34,36 @@ def list_tags(project_id: int):
AND deleted_at IS NULL
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'project_id': project_id})
cur.execute(query)
rows = cur.fetchall()
await cur.execute(query)
rows = await cur.fetchall()
return helper.list_to_camel_case(rows)
def update_tag(project_id: int, tag_id: int, data: schemas.TagUpdate):
async def update_tag(project_id: int, tag_id: int, data: schemas.TagUpdate):
query = """
UPDATE public.tags
SET name = %(name)s
WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'tag_id': tag_id, 'name': data.name, 'project_id': project_id})
cur.execute(query)
await cur.execute(query)
return True
def delete_tag(project_id: int, tag_id: int):
async def delete_tag(project_id: int, tag_id: int):
query = """
UPDATE public.tags
SET deleted_at = now() at time zone 'utc'
WHERE tag_id = %(tag_id)s AND project_id = %(project_id)s
"""
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(query, {'tag_id': tag_id, 'project_id': project_id})
cur.execute(query)
await cur.execute(query)
return True

View file

@ -1,5 +1,5 @@
import httpx
from chalicelib.utils import pg_client
import requests
from chalicelib.core import license
@ -19,9 +19,9 @@ def process_data(data):
}
def compute():
with pg_client.PostgresClient(long_query=True) as cur:
cur.execute(
async def compute():
async with pg_client.cursor(long_query=True) as cur:
await cur.execute(
f"""UPDATE public.tenants
SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) +
(SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') +
@ -36,17 +36,19 @@ def compute():
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
(SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);"""
)
data = cur.fetchone()
data = await cur.fetchone()
if len(data) > 0:
requests.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(data)]})
async with httpx.AsyncClient() as client:
await client.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(data)]})
def new_client():
with pg_client.PostgresClient() as cur:
cur.execute(
async def new_client():
async with pg_client.cursor() as cur:
await cur.execute(
f"""SELECT *, openreplay_version() AS version_number,
(SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email
FROM public.tenants
LIMIT 1;""")
data = cur.fetchone()
requests.post('https://api.openreplay.com/os/signup', json=process_data(data))
data = await cur.fetchone()
async with httpx.AsyncClient() as client:
await client.post('https://api.openreplay.com/os/signup', json=process_data(data))

View file

@ -3,8 +3,8 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client
def get_by_tenant_id(tenant_id):
with pg_client.PostgresClient() as cur:
async def get_by_tenant_id(tenant_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT tenants.tenant_id,
tenants.name,
tenants.api_key,
@ -15,12 +15,12 @@ def get_by_tenant_id(tenant_id):
FROM public.tenants
LIMIT 1;""",
{"tenantId": tenant_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def get_by_api_key(api_key):
with pg_client.PostgresClient() as cur:
async def get_by_api_key(api_key):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT 1 AS tenant_id,
tenants.name,
tenants.created_at
@ -28,41 +28,39 @@ def get_by_api_key(api_key):
WHERE tenants.api_key = %(api_key)s
LIMIT 1;""",
{"api_key": api_key})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def generate_new_api_key(tenant_id):
with pg_client.PostgresClient() as cur:
async def generate_new_api_key(tenant_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.tenants
SET api_key=generate_api_key(20)
RETURNING api_key;""",
{"tenant_id": tenant_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def edit_tenant(tenant_id, changes):
with pg_client.PostgresClient() as cur:
async def edit_tenant(tenant_id, changes):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.tenants
SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])}
RETURNING name, opt_out;""",
{"tenant_id": tenant_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query=query)
return helper.dict_to_camel_case(await cur.fetchone())
def tenants_exists_sync(use_pool=True):
async def tenants_exists_sync(use_pool=True):
with pg_client.PostgresClient(use_pool=use_pool) as cur:
cur.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
out = cur.fetchone()["exists"]
await cur.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
out = await cur.fetchone()["exists"]
return out
async def tenants_exists(use_pool=True):
from app import app
async with app.state.postgresql.connection() as cnx:
async with cnx.transaction() as txn:
row = await cnx.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
row = await row.fetchone()
return row["exists"]
async with pg_client.cusor() as cur:
await cur.execute("SELECT EXISTS(SELECT 1 FROM public.tenants)")
row = await cur.fetchone()
return row["exists"]

View file

@ -3,9 +3,9 @@ from chalicelib.utils.storage import StorageClient
from decouple import config
def get_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def get_test_signals(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
SELECT *
FROM public.ut_tests_signals
LEFT JOIN public.ut_tests_tasks USING (task_id)
@ -13,22 +13,22 @@ def get_test_signals(session_id, project_id):
ORDER BY timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
rows = await cur.fetchall()
return helper.dict_to_camel_case(rows)
def has_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def has_test_signals(session_id, project_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
SELECT EXISTS(SELECT 1 FROM public.ut_tests_signals
WHERE session_id = %(session_id)s) AS has;""",
{"project_id": project_id, "session_id": session_id})
)
row = cur.fetchone()
row = await cur.fetchone()
return row.get("has")
def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool = True):
async def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool = True):
results = []
bucket_name = "uxtesting-records" # config("sessions_bucket")
k = f'{session_id}/ux_webcam_record.webm'

View file

@ -17,8 +17,8 @@ def __generate_invitation_token():
return secrets.token_urlsafe(64)
def create_new_member(email, invitation_token, admin, name, owner=False):
with pg_client.PostgresClient() as cur:
async def create_new_member(email, invitation_token, admin, name, owner=False):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\
WITH u AS (INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
@ -41,15 +41,15 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
{"email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"invitation_token": invitation_token})
cur.execute(query)
row = helper.dict_to_camel_case(cur.fetchone())
await cur.execute(query)
row = helper.dict_to_camel_case(await cur.fetchone())
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
def restore_member(user_id, email, invitation_token, admin, name, owner=False):
with pg_client.PostgresClient() as cur:
async def restore_member(user_id, email, invitation_token, admin, name, owner=False):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""\
WITH ua AS (UPDATE public.basic_authentication
SET invitation_token = %(invitation_token)s,
@ -78,16 +78,16 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
{"user_id": user_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member",
"name": name, "invitation_token": invitation_token})
cur.execute(query)
result = cur.fetchone()
cur.execute(query)
await cur.execute(query)
result = await cur.fetchone()
await cur.execute(query)
result["created_at"] = TimeUTC.datetime_to_timestamp(result["created_at"])
return helper.dict_to_camel_case(result)
def generate_new_invitation(user_id):
async def generate_new_invitation(user_id):
invitation_token = __generate_invitation_token()
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify("""\
UPDATE public.basic_authentication
SET invitation_token = %(invitation_token)s,
@ -97,13 +97,13 @@ def generate_new_invitation(user_id):
WHERE user_id=%(user_id)s
RETURNING invitation_token;""",
{"user_id": user_id, "invitation_token": invitation_token})
cur.execute(
await cur.execute(
query
)
return __get_invitation_link(cur.fetchone().pop("invitation_token"))
return __get_invitation_link(await cur.fetchone().pop("invitation_token"))
def reset_member(tenant_id, editor_id, user_id_to_update):
async def reset_member(tenant_id, editor_id, user_id_to_update):
admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -113,7 +113,7 @@ def reset_member(tenant_id, editor_id, user_id_to_update):
return {"data": {"invitationLink": generate_new_invitation(user_id_to_update)}}
def update(tenant_id, user_id, changes, output=True):
async def update(tenant_id, user_id, changes, output=True):
AUTH_KEYS = ["password", "invitationToken", "invitedAt", "changePwdExpireAt", "changePwdToken"]
if len(changes.keys()) == 0:
return None
@ -130,27 +130,27 @@ def update(tenant_id, user_id, changes, output=True):
else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
if len(sub_query_users) > 0:
query = cur.mogrify(f"""\
UPDATE public.users
SET {" ,".join(sub_query_users)}
WHERE users.user_id = %(user_id)s;""",
{"user_id": user_id, **changes})
cur.execute(query)
await cur.execute(query)
if len(sub_query_bauth) > 0:
query = cur.mogrify(f"""\
UPDATE public.basic_authentication
SET {" ,".join(sub_query_bauth)}
WHERE basic_authentication.user_id = %(user_id)s;""",
{"user_id": user_id, **changes})
cur.execute(query)
await cur.execute(query)
if not output:
return None
return get(user_id=user_id, tenant_id=tenant_id)
def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks):
async def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks):
admin = get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -184,23 +184,23 @@ def __get_invitation_link(invitation_token):
return config("SITE_URL") + config("invitation_link") % invitation_token
def allow_password_change(user_id, delta_min=10):
async def allow_password_change(user_id, delta_min=10):
pass_token = secrets.token_urlsafe(8)
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.basic_authentication
SET change_pwd_expire_at = timezone('utc'::text, now()+INTERVAL '%(delta)s MINUTES'),
change_pwd_token = %(pass_token)s
WHERE user_id = %(user_id)s""",
{"user_id": user_id, "delta": delta_min, "pass_token": pass_token})
cur.execute(
await cur.execute(
query
)
return pass_token
def get(user_id, tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get(user_id, tenant_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
users.user_id,
@ -219,13 +219,13 @@ def get(user_id, tenant_id):
LIMIT 1;""",
{"userId": user_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def generate_new_api_key(user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def generate_new_api_key(user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""UPDATE public.users
SET api_key=generate_api_key(20)
@ -234,13 +234,13 @@ def generate_new_api_key(user_id):
RETURNING api_key;""",
{"userId": user_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def __get_account_info(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def __get_account_info(tenant_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT users.name,
tenants.name AS tenant_name,
@ -250,11 +250,11 @@ def __get_account_info(tenant_id, user_id):
AND users.deleted_at IS NULL;""",
{"tenantId": tenant_id, "userId": user_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
async def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
if changes.opt_out is not None or changes.tenantName is not None and len(changes.tenantName) > 0:
user = get(user_id=user_id, tenant_id=tenant_id)
if not user["superAdmin"] and not user["admin"]:
@ -275,7 +275,7 @@ def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
return {"data": __get_account_info(tenant_id=tenant_id, user_id=user_id)}
def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id):
async def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id):
user = get_member(user_id=user_id_to_update, tenant_id=tenant_id)
_changes = {}
if editor_id != user_id_to_update:
@ -302,9 +302,9 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema,
return {"data": user}
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_by_email_only(email):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
users.user_id,
@ -322,13 +322,13 @@ def get_by_email_only(email):
LIMIT 1;""",
{"email": email})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def get_member(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_member(tenant_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
users.user_id,
@ -348,7 +348,7 @@ def get_member(tenant_id, user_id):
ORDER BY name, user_id""",
{"user_id": user_id})
)
u = helper.dict_to_camel_case(cur.fetchone())
u = helper.dict_to_camel_case(await cur.fetchone())
if u:
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
if u["invitationToken"]:
@ -359,9 +359,9 @@ def get_member(tenant_id, user_id):
return u
def get_members(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_members(tenant_id):
async with pg_client.cursor() as cur:
await cur.execute(
f"""SELECT
users.user_id,
users.email,
@ -379,7 +379,7 @@ def get_members(tenant_id):
WHERE users.deleted_at IS NULL
ORDER BY name, user_id"""
)
r = cur.fetchall()
r = await cur.fetchall()
if len(r):
r = helper.list_to_camel_case(r)
for u in r:
@ -393,7 +393,7 @@ def get_members(tenant_id):
return []
def delete_member(user_id, tenant_id, id_to_delete):
async def delete_member(user_id, tenant_id, id_to_delete):
if user_id == id_to_delete:
return {"errors": ["unauthorized, cannot delete self"]}
@ -408,15 +408,15 @@ def delete_member(user_id, tenant_id, id_to_delete):
if to_delete["superAdmin"]:
return {"errors": ["cannot delete super admin"]}
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""UPDATE public.users
SET deleted_at = timezone('utc'::text, now()),
jwt_iat= NULL, jwt_refresh_jti= NULL,
jwt_refresh_iat= NULL
WHERE user_id=%(user_id)s;""",
{"user_id": id_to_delete}))
cur.execute(
await cur.execute(
cur.mogrify(f"""UPDATE public.basic_authentication
SET password= NULL, invitation_token= NULL,
invited_at= NULL, changed_at= NULL,
@ -426,7 +426,7 @@ def delete_member(user_id, tenant_id, id_to_delete):
return {"data": get_members(tenant_id=tenant_id)}
def change_password(tenant_id, user_id, email, old_password, new_password):
async def change_password(tenant_id, user_id, email, old_password, new_password):
item = get(tenant_id=tenant_id, user_id=user_id)
if item is None:
return {"errors": ["access denied"]}
@ -444,7 +444,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
}
def set_password_invitation(user_id, new_password):
async def set_password_invitation(user_id, new_password):
changes = {"password": new_password}
user = update(tenant_id=-1, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password)
@ -469,9 +469,9 @@ def set_password_invitation(user_id, new_password):
}
def email_exists(email):
with pg_client.PostgresClient() as cur:
cur.execute(
async def email_exists(email):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
count(user_id)
@ -482,13 +482,13 @@ def email_exists(email):
LIMIT 1;""",
{"email": email})
)
r = cur.fetchone()
r = await cur.fetchone()
return r["count"] > 0
def get_deleted_user_by_email(email):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_deleted_user_by_email(email):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
*
@ -499,13 +499,13 @@ def get_deleted_user_by_email(email):
LIMIT 1;""",
{"email": email})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def get_by_invitation_token(token, pass_token=None):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_by_invitation_token(token, pass_token=None):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
*,
@ -518,13 +518,13 @@ def get_by_invitation_token(token, pass_token=None):
LIMIT 1;""",
{"token": token, "pass_token": pass_token})
)
r = cur.fetchone()
r = await cur.fetchone()
return helper.dict_to_camel_case(r)
def auth_exists(user_id, jwt_iat):
with pg_client.PostgresClient() as cur:
cur.execute(
async def auth_exists(user_id, jwt_iat):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""SELECT user_id, EXTRACT(epoch FROM jwt_iat)::BIGINT AS jwt_iat
FROM public.users
WHERE user_id = %(userId)s
@ -532,15 +532,15 @@ def auth_exists(user_id, jwt_iat):
LIMIT 1;""",
{"userId": user_id})
)
r = cur.fetchone()
r = await cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and abs(jwt_iat - r["jwt_iat"]) <= 1
def refresh_auth_exists(user_id, jwt_jti=None):
with pg_client.PostgresClient() as cur:
cur.execute(
async def refresh_auth_exists(user_id, jwt_jti=None):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(f"""SELECT user_id
FROM public.users
WHERE user_id = %(userId)s
@ -549,12 +549,12 @@ def refresh_auth_exists(user_id, jwt_jti=None):
LIMIT 1;""",
{"userId": user_id, "jwt_jti": jwt_jti})
)
r = cur.fetchone()
r = await cur.fetchone()
return r is not None
def change_jwt_iat_jti(user_id):
with pg_client.PostgresClient() as cur:
async def change_jwt_iat_jti(user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'),
jwt_refresh_jti = 0,
@ -564,13 +564,13 @@ def change_jwt_iat_jti(user_id):
jwt_refresh_jti,
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
{"user_id": user_id})
cur.execute(query)
await cur.execute(query)
row = cur.fetchone()
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
def refresh_jwt_iat_jti(user_id):
with pg_client.PostgresClient() as cur:
async def refresh_jwt_iat_jti(user_id):
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '10s'),
jwt_refresh_jti = jwt_refresh_jti + 1
@ -579,13 +579,13 @@ def refresh_jwt_iat_jti(user_id):
jwt_refresh_jti,
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
{"user_id": user_id})
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
def authenticate(email, password, for_change_password=False) -> dict | bool | None:
with pg_client.PostgresClient() as cur:
async def authenticate(email, password, for_change_password=False) -> dict | bool | None:
async with pg_client.cursor() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id,
@ -602,8 +602,8 @@ def authenticate(email, password, for_change_password=False) -> dict | bool | No
LIMIT 1;""",
{"email": email, "password": password})
cur.execute(query)
r = cur.fetchone()
await cur.execute(query)
r = await cur.fetchone()
if r is not None:
if for_change_password:
@ -623,14 +623,14 @@ def authenticate(email, password, for_change_password=False) -> dict | bool | No
return None
def logout(user_id: int):
with pg_client.PostgresClient() as cur:
async def logout(user_id: int):
async with pg_client.cursor() as cur:
query = cur.mogrify(
"""UPDATE public.users
SET jwt_iat = NULL, jwt_refresh_jti = NULL, jwt_refresh_iat = NULL
WHERE user_id = %(user_id)s;""",
{"user_id": user_id})
cur.execute(query)
await cur.execute(query)
def refresh(user_id: int, tenant_id: int = -1) -> dict:
@ -645,9 +645,9 @@ def refresh(user_id: int, tenant_id: int = -1) -> dict:
}
def get_user_role(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_user_role(tenant_id, user_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
users.user_id,
@ -664,13 +664,13 @@ def get_user_role(tenant_id, user_id):
LIMIT 1""",
{"user_id": user_id})
)
return helper.dict_to_camel_case(cur.fetchone())
return helper.dict_to_camel_case(await cur.fetchone())
def get_user_settings(user_id):
async def get_user_settings(user_id):
# read user settings from users.settings:jsonb column
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""SELECT
settings
@ -680,10 +680,10 @@ def get_user_settings(user_id):
LIMIT 1""",
{"user_id": user_id})
)
return helper.dict_to_camel_case(cur.fetchone())
return helper.dict_to_camel_case(await cur.fetchone())
def update_user_module(user_id, data: schemas.ModuleStatus):
async def update_user_module(user_id, data: schemas.ModuleStatus):
# example data = {"settings": {"modules": ['ASSIST', 'METADATA']}
# update user settings from users.settings:jsonb column only update settings.modules
# if module property is not exists, it will be created
@ -705,10 +705,10 @@ def update_user_module(user_id, data: schemas.ModuleStatus):
return update_user_settings(user_id, settings)
def update_user_settings(user_id, settings):
async def update_user_settings(user_id, settings):
# update user settings from users.settings:jsonb column
with pg_client.PostgresClient() as cur:
cur.execute(
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify(
f"""UPDATE public.users
SET settings = %(settings)s
@ -717,4 +717,4 @@ def update_user_settings(user_id, settings):
RETURNING settings;""",
{"user_id": user_id, "settings": json.dumps(settings)})
)
return helper.dict_to_camel_case(cur.fetchone())
return helper.dict_to_camel_case(await cur.fetchone())

View file

@ -1,7 +1,7 @@
import logging
from typing import Optional
import requests
import httpx
from fastapi import HTTPException, status
import schemas
@ -9,66 +9,66 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def get_by_id(webhook_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_by_id(webhook_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
SELECT w.*
FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if w:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return w
def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""SELECT w.*
FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s
AND deleted_at ISNULL AND type=%(webhook_type)s;""",
{"webhook_id": webhook_id, "webhook_type": webhook_type})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if w:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return w
def get_by_type(tenant_id, webhook_type):
with pg_client.PostgresClient() as cur:
cur.execute(
async def get_by_type(tenant_id, webhook_type):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
{"type": webhook_type})
)
webhooks = helper.list_to_camel_case(cur.fetchall())
webhooks = helper.list_to_camel_case(await cur.fetchall())
for w in webhooks:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return webhooks
def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur:
cur.execute("""SELECT w.*
async def get_by_tenant(tenant_id, replace_none=False):
async with pg_client.cursor() as cur:
await cur.execute("""SELECT w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;""")
all = helper.list_to_camel_case(cur.fetchall())
all = helper.list_to_camel_case(await cur.fetchall())
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return all
def update(tenant_id, webhook_id, changes, replace_none=False):
async def update(tenant_id, webhook_id, changes, replace_none=False):
allow_update = ["name", "index", "authHeader", "endpoint"]
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update]
cur.execute(
await cur.execute(
cur.mogrify(f"""\
UPDATE public.webhooks
SET {','.join(sub_query)}
@ -76,7 +76,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
RETURNING *;""",
{"id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
if w is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.")
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
@ -87,18 +87,18 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
return w
def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", replace_none=False):
with pg_client.PostgresClient() as cur:
async def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", replace_none=False):
async with pg_client.cursor() as cur:
query = cur.mogrify("""\
INSERT INTO public.webhooks(endpoint,auth_header,type,name)
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
RETURNING *;""",
{"endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(
await cur.execute(
query
)
w = helper.dict_to_camel_case(cur.fetchone())
w = helper.dict_to_camel_case(await cur.fetchone())
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
if replace_none:
for k in w.keys():
@ -107,9 +107,9 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
async def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
tenant_id: Optional[int] = None) -> bool:
with pg_client.PostgresClient() as cur:
async with pg_client.cursor() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.webhooks
WHERE name ILIKE %(name)s
@ -117,12 +117,12 @@ def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = sch
AND type=%(webhook_type)s
{"AND webhook_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id, "webhook_type": webhook_type})
cur.execute(query)
row = cur.fetchone()
await cur.execute(query)
row = await cur.fetchone()
return row["exists"]
def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
async def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
if len(data.name) > 0 \
and exists_by_name(name=data.name, exclude_id=data.webhook_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
@ -140,9 +140,9 @@ def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
replace_none=replace_none)
def delete(tenant_id, webhook_id):
with pg_client.PostgresClient() as cur:
cur.execute(
async def delete(tenant_id, webhook_id):
async with pg_client.cursor() as cur:
await cur.execute(
cur.mogrify("""\
UPDATE public.webhooks
SET deleted_at = (now() at time zone 'utc')
@ -153,7 +153,7 @@ def delete(tenant_id, webhook_id):
return {"data": {"state": "success"}}
def trigger_batch(data_list):
async def trigger_batch(data_list):
webhooks_map = {}
for w in data_list:
if w["destination"] not in webhooks_map:
@ -164,13 +164,14 @@ def trigger_batch(data_list):
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
def __trigger(hook, data):
async def __trigger(hook, data):
if hook is not None and hook["type"] == 'webhook':
headers = {}
if hook["authHeader"] is not None and len(hook["authHeader"]) > 0:
headers = {"Authorization": hook["authHeader"]}
r = requests.post(url=hook["endpoint"], json=data, headers=headers)
async with httpx.AsyncClient() as client:
r = await client.post(url=hook["endpoint"], json=data, headers=headers)
if r.status_code != 200:
logging.error("=======> webhook: something went wrong for:")
logging.error(hook)

View file

@ -5,41 +5,41 @@ from chalicelib.utils.helper import get_issue_title
LOWEST_BAR_VALUE = 3
def get_config(user_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def get_config(user_id):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
SELECT users.weekly_report
FROM public.users
WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s
LIMIT 1;""", {"user_id": user_id}))
result = cur.fetchone()
result = await cur.fetchone()
return helper.dict_to_camel_case(result)
def edit_config(user_id, weekly_report):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
async def edit_config(user_id, weekly_report):
async with pg_client.cursor() as cur:
await cur.execute(cur.mogrify("""\
UPDATE public.users
SET weekly_report= %(weekly_report)s
WHERE users.deleted_at ISNULL
AND users.user_id=%(user_id)s
RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report}))
result = cur.fetchone()
result = await cur.fetchone()
return helper.dict_to_camel_case(result)
def cron():
async def cron():
if not smtp.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
return
_now = TimeUTC.now()
with pg_client.PostgresClient(unlimited_query=True) as cur:
async with pg_client.cursor(unlimited_query=True) as cur:
params = {"tomorrow": TimeUTC.midnight(delta_days=1),
"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)}
cur.execute(cur.mogrify("""\
await cur.execute(cur.mogrify("""\
SELECT project_id,
name AS project_name,
users.emails AS emails,
@ -86,7 +86,7 @@ def cron():
AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= %(5_week_ago)s
) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall()
projects_data = await cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report query: {_now2 - _now} ms")
_now = _now2
@ -103,7 +103,7 @@ def cron():
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
p["past_month_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1)
cur.execute(cur.mogrify("""
await cur.execute(cur.mogrify("""
SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
(
@ -119,7 +119,7 @@ def cron():
'1 day'::INTERVAL
) AS timestamp_i
ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall()
days_partition = await cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
@ -130,7 +130,7 @@ def cron():
else:
d["value"] = d["issues_count"] * 100 / max_days_partition
d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify("""\
await cur.execute(cur.mogrify("""\
SELECT type, COUNT(*) AS count
FROM events_common.issues INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s
@ -138,7 +138,7 @@ def cron():
GROUP BY type
ORDER BY count DESC, type
LIMIT 4;""", params))
issues_by_type = cur.fetchall()
issues_by_type = await cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
@ -149,7 +149,7 @@ def cron():
i["value"] = LOWEST_BAR_VALUE
else:
i["value"] = i["count"] * 100 / max_issues_by_type
cur.execute(cur.mogrify("""\
await cur.execute(cur.mogrify("""\
SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short,
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
COALESCE((SELECT JSONB_AGG(sub)
@ -170,7 +170,7 @@ def cron():
) AS timestamp_i
GROUP BY timestamp_i
ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall()
issues_breakdown_by_day = await cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
@ -186,7 +186,7 @@ def cron():
else:
j["value"] = j["count"] * 100 / max_days_partition
j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
cur.execute(cur.mogrify("""
await cur.execute(cur.mogrify("""
SELECT type,
COUNT(*) AS issue_count,
COUNT(DISTINCT session_id) AS sessions_count,
@ -219,7 +219,7 @@ def cron():
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
GROUP BY type
ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall()
issues_breakdown_list = await cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2

View file

@ -1,8 +1,7 @@
import logging
import requests
import httpx
from decouple import config
from chalicelib.utils import helper
logger = logging.getLogger(__name__)
@ -17,7 +16,8 @@ def is_valid(response):
logger.info("!! Captcha is disabled")
return True
url, secret = __get_captcha_config()
r = requests.post(url=url, data={"secret": secret, "response": response})
async with httpx.AsyncClient() as client:
r = await client.post(url=url, data={"secret": secret, "response": response})
if r.status_code != 200:
logger.warning("something went wrong")
logger.error(r)

View file

@ -1,7 +1,7 @@
import logging
from datetime import datetime
import requests
import httpx
from fastapi import HTTPException, status
logger = logging.getLogger(__name__)
@ -116,12 +116,13 @@ class githubV3Request:
def __get_request_header(self):
return {"Accept": "application/vnd.github.v3+json", 'Authorization': f'token {self.__token}'}
def get(self, url, params={}):
async def get(self, url, params={}):
results = []
params = {"per_page": 100, **params}
pages = {"next": f"{self.__base}{url}", "last": ""}
while len(pages.keys()) > 0 and pages["next"] != pages["last"]:
response = requests.get(pages["next"], headers=self.__get_request_header(), params=params)
async with httpx.AsyncClient() as client:
response = await client.get(pages["next"], headers=self.__get_request_header(), params=params)
pages = get_response_links(response)
result = response.json()
if response.status_code != 200:
@ -133,6 +134,7 @@ class githubV3Request:
results += result
return results
def post(self, url, body):
response = requests.post(f"{self.__base}{url}", headers=self.__get_request_header(), json=body)
async def post(self, url, body):
async with httpx.AsyncClient() as client:
response = await client.post(f"{self.__base}{url}", headers=self.__get_request_header(), json=body)
return response.json()

View file

@ -2,11 +2,9 @@ import logging
import time
from datetime import datetime
import requests
from fastapi import HTTPException, status
from jira import JIRA
from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
logger = logging.getLogger(__name__)
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
@ -91,11 +89,12 @@ class JiraManager:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_issue_info(issue)
def get_issue_v3(self, issue_id: str):
async def get_issue_v3(self, issue_id: str):
try:
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}?fields={fields}"
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
issue = requests.get(
auth = (self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
async with httpx.AsyncClient() as client:
issue = await client.get(
url,
headers={
"Accept": "application/json"
@ -159,11 +158,12 @@ class JiraManager:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_comment_info(comment)
def add_comment_v3(self, issue_id: str, comment: str):
async def add_comment_v3(self, issue_id: str, comment: str):
try:
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}/comment"
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
comment_response = requests.post(
auth = (self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
async with httpx.AsyncClient() as client:
comment_response = await client.post(
url,
headers={
"Accept": "application/json"

View file

@ -6,6 +6,8 @@ import psycopg2
import psycopg2.extras
from decouple import config
from psycopg2 import pool
import contextlib
logger = logging.getLogger(__name__)
@ -179,3 +181,11 @@ async def terminate():
logging.info("Closed all connexions to PostgreSQL")
except (Exception, psycopg2.DatabaseError) as error:
logging.error("Error while closing all connexions to PostgreSQL", error)
@contextlib.asynccontextmanager
async def cursor():
from app import app
async with app.state.postgresql.connection() as cnx:
with cnx.cursor() as cur:
yield cur

View file

@ -20,7 +20,7 @@ public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
async def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType, str] = None,
@ -29,7 +29,7 @@ def events_search(projectId: int, q: str,
if len(q) == 0:
return {"data": []}
if live:
return assist.autocomplete(project_id=projectId, q=q,
return await assist.autocomplete(project_id=projectId, q=q,
key=key if key is not None else type)
if type in [schemas.FetchFilterType._url]:
type = schemas.EventType.request