feat(chalice): refactored

feat(chalice): fixes
This commit is contained in:
Taha Yassine Kraiem 2022-11-07 16:49:51 +01:00
parent 197997e3d4
commit 62d9cbad23
7 changed files with 70 additions and 71 deletions

View file

@ -2,7 +2,7 @@ import json
import schemas
from chalicelib.core import users
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -48,7 +48,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
if gdpr:
extra_projection += ',s.gdpr'
if recorded:
extra_projection += """, COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT,
extra_projection += """,COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT,
(SELECT MIN(sessions.start_ts)
FROM public.sessions
WHERE sessions.project_id = s.project_id

View file

@ -11,11 +11,7 @@ def reset(data: schemas.ForgetPasswordPayloadSchema):
if not helper.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_users = users.get_by_email_only(data.email)
if len(a_users) > 1:
print(f"multiple users found for [{data.email}] please contact our support")
return {"errors": ["multiple users, please contact our support"]}
elif len(a_users) == 1:
a_users = a_users[0]
if a_users:
invitation_link = users.generate_new_invitation(user_id=a_users["id"])
email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link)
else:

View file

@ -89,9 +89,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
# limit the number of errors to reduce the response-body size
e['source'] == "js_exception"][:500]
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)

View file

@ -343,11 +343,12 @@ def get_by_email_only(email):
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.email = %(email)s
AND users.deleted_at IS NULL;""",
AND users.deleted_at IS NULL
LIMIT 1;""",
{"email": email})
)
r = cur.fetchall()
return helper.list_to_camel_case(r)
r = cur.fetone()
return helper.dict_to_camel_case(r)
def get_by_email_reset(email, reset_token):

View file

@ -150,36 +150,6 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
return helper.dict_to_camel_case(row)
def is_authorized(project_id, tenant_id, user_id=None):
if project_id is None or not str(project_id).isdigit():
return False
with pg_client.PostgresClient() as cur:
role_query = """INNER JOIN LATERAL (SELECT 1
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = %(project_id)s)
) AS role_project ON (TRUE)"""
query = cur.mogrify(f"""\
SELECT project_id
FROM public.projects AS s
{role_query if user_id is not None else ""}
where s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"tenant_id": tenant_id, "project_id": project_id, "user_id": user_id})
cur.execute(
query=query
)
row = cur.fetchone()
return row is not None
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
@ -198,17 +168,6 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
changes={"name": data.name})}
def count_by_tenant(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
count(s.project_id)
FROM public.projects AS s
WHERE s.deleted_at IS NULL
AND tenant_id= %(tenant_id)s;""", {"tenant_id": tenant_id}))
return cur.fetchone()["count"]
def delete(tenant_id, user_id, project_id):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
@ -227,6 +186,17 @@ def delete(tenant_id, user_id, project_id):
return {"data": {"state": "success"}}
def count_by_tenant(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
count(s.project_id)
FROM public.projects AS s
WHERE s.deleted_at IS NULL
AND tenant_id= %(tenant_id)s;""", {"tenant_id": tenant_id}))
return cur.fetchone()["count"]
def get_gdpr(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -318,6 +288,16 @@ def update_capture_status(project_id, changes):
return changes
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT s.project_id
FROM public.projects AS s
WHERE tenant_id =%(tenant_id)s AND s.deleted_at IS NULL
ORDER BY s.project_id;""", {"tenant_id": tenant_id}))
rows = cur.fetchall()
return [r["project_id"] for r in rows]
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
@ -340,6 +320,36 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu
return helper.dict_to_camel_case(row)
def is_authorized(project_id, tenant_id, user_id=None):
if project_id is None or not str(project_id).isdigit():
return False
with pg_client.PostgresClient() as cur:
role_query = """INNER JOIN LATERAL (SELECT 1
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = %(project_id)s)
) AS role_project ON (TRUE)"""
query = cur.mogrify(f"""\
SELECT project_id
FROM public.projects AS s
{role_query if user_id is not None else ""}
where s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"tenant_id": tenant_id, "project_id": project_id, "user_id": user_id})
cur.execute(
query=query
)
row = cur.fetchone()
return row is not None
def is_authorized_batch(project_ids, tenant_id):
if project_ids is None or not len(project_ids):
return False
@ -357,13 +367,3 @@ def is_authorized_batch(project_ids, tenant_id):
)
rows = cur.fetchall()
return [r["project_id"] for r in rows]
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT s.project_id
FROM public.projects AS s
WHERE tenant_id =%(tenant_id)s AND s.deleted_at IS NULL
ORDER BY s.project_id;""", {"tenant_id": tenant_id}))
rows = cur.fetchall()
return [r["project_id"] for r in rows]

View file

@ -91,9 +91,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
# limit the number of errors to reduce the response-body size
e['source'] == "js_exception"][:500]
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)

View file

@ -59,6 +59,7 @@ def __group_metadata(session, project_metadata):
return meta
# This function should not use Clickhouse because it doesn't have `file_key`
def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True):
with pg_client.PostgresClient() as cur:
@ -77,7 +78,8 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
SELECT
s.*,
s.session_id::text AS session_id,
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key,
encode(file_key,'hex') AS file_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
@ -108,9 +110,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
e['source'] == "js_exception"][
:500] # limit the number of errors to reduce the response-body size
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)