Merge remote-tracking branch 'origin/api-v1.10.0' into dev

This commit is contained in:
Taha Yassine Kraiem 2023-02-06 16:15:16 +01:00
commit 47551c8dda
18 changed files with 551 additions and 464 deletions

View file

@ -4,7 +4,8 @@ from os.path import exists as path_exists, getsize
import jwt
import requests
from decouple import config
from starlette.exceptions import HTTPException
from starlette import status
from fastapi import HTTPException
import schemas
from chalicelib.core import projects
@ -194,10 +195,11 @@ def get_ice_servers():
def __get_efs_path():
efs_path = config("FS_DIR")
if not path_exists(efs_path):
raise HTTPException(400, f"EFS not found in path: {efs_path}")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"EFS not found in path: {efs_path}")
if not access(efs_path, R_OK):
raise HTTPException(400, f"EFS found under: {efs_path}; but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"EFS found under: {efs_path}; but it is not readable, please check permissions")
return efs_path
@ -211,11 +213,12 @@ def get_raw_mob_by_id(project_id, session_id):
path_to_file = efs_path + "/" + __get_mob_path(project_id=project_id, session_id=session_id)
if path_exists(path_to_file):
if not access(path_to_file, R_OK):
raise HTTPException(400, f"Replay file found under: {efs_path};" +
f" but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Replay file found under: {efs_path};" +
" but it is not readable, please check permissions")
# getsize return size in bytes, UNPROCESSED_MAX_SIZE is in Kb
if (getsize(path_to_file) / 1000) >= config("UNPROCESSED_MAX_SIZE", cast=int, default=200 * 1000):
raise HTTPException(413, "Replay file too large")
raise HTTPException(status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, detail="Replay file too large")
return path_to_file
return None
@ -231,8 +234,9 @@ def get_raw_devtools_by_id(project_id, session_id):
path_to_file = efs_path + "/" + __get_devtools_path(project_id=project_id, session_id=session_id)
if path_exists(path_to_file):
if not access(path_to_file, R_OK):
raise HTTPException(400, f"Devtools file found under: {efs_path};"
f" but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Devtools file found under: {efs_path};"
" but it is not readable, please check permissions")
return path_to_file

View file

@ -2,6 +2,8 @@ import json
import requests
from decouple import config
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import webhook
@ -11,10 +13,13 @@ from chalicelib.core.collaboration_base import BaseCollaboration
class MSTeams(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.msteams):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url,
webhook_type="msteams",
webhook_type=schemas.WebhookType.msteams,
name=data.name)
return None

View file

@ -2,6 +2,9 @@ import requests
from decouple import config
from datetime import datetime
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
@ -10,10 +13,13 @@ from chalicelib.core.collaboration_base import BaseCollaboration
class Slack(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.slack):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url,
webhook_type="slack",
webhook_type=schemas.WebhookType.slack,
name=data.name)
return None

View file

@ -572,7 +572,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema, ignore_click_map=False):
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
@ -580,9 +580,6 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict())
elif __is_click_map(metric):
# TODO: remove this when UI is able to stop this endpoint calls for clickMap
if ignore_click_map:
return None
if raw_metric["data"]:
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])

View file

@ -1,4 +1,8 @@
import re
from typing import Optional
from fastapi import HTTPException
from starlette import status
from chalicelib.core import projects
from chalicelib.utils import pg_client
@ -10,17 +14,33 @@ def column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
constraints = column_names()
if exclude_index:
del constraints[exclude_index - 1]
for c in constraints:
c += " ILIKE %(name)s"
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
AND ({" OR ".join(constraints)})) AS exists;""",
{"project_id": project_id, "name": name})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def get(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query)
metas = cur.fetchone()
results = []
if metas is not None:
@ -34,15 +54,12 @@ def get_batch(project_ids):
if project_ids is None or len(project_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
project_id, {",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", {"project_ids": tuple(project_ids)})
)
query = cur.mogrify(f"""SELECT project_id, {",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query=query)
full_metas = cur.fetchall()
results = {}
if full_metas is not None and len(full_metas) > 0:
@ -84,17 +101,21 @@ def __edit(project_id, col_index, colname, new_name):
with pg_client.PostgresClient() as cur:
if old_metas[col_index]["key"] != new_name:
cur.execute(cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
WHERE project_id = %(project_id)s AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name}))
query = cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name})
cur.execute(query=query)
new_name = cur.fetchone()[colname]
old_metas[col_index]["key"] = new_name
return {"data": old_metas[col_index]}
def edit(tenant_id, project_id, index: int, new_name: str):
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=index):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
@ -127,12 +148,16 @@ def add(tenant_id, project_id, new_name):
index = __get_available_index(project_id=project_id)
if index < 1:
return {"errors": ["maximum allowed metadata reached"]}
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
with pg_client.PostgresClient() as cur:
colname = index_to_colname(index)
cur.execute(
cur.mogrify(
f"""UPDATE public.projects SET {colname}= %(key)s WHERE project_id =%(project_id)s RETURNING {colname};""",
{"key": new_name, "project_id": project_id}))
query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= %(key)s
WHERE project_id =%(project_id)s
RETURNING {colname};""",
{"key": new_name, "project_id": project_id})
cur.execute(query=query)
col_val = cur.fetchone()[colname]
return {"data": {"key": col_val, "index": index}}
@ -144,17 +169,13 @@ def search(tenant_id, project_id, key, value):
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(s_query)}
FROM public.projects
WHERE
project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(s_query)}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
cur.execute(query=query)
all_metas = cur.fetchone()
key = None
for c in all_metas:
@ -163,17 +184,13 @@ def search(tenant_id, project_id, key, value):
break
if key is None:
return {"errors": ["key does not exist"]}
cur.execute(
cur.mogrify(
f"""\
SELECT
DISTINCT "{key}" AS "{key}"
FROM public.sessions
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT DISTINCT "{key}" AS "{key}"
FROM public.sessions
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
cur.execute(query=query)
value = cur.fetchall()
return {"data": [k[key] for k in value]}
@ -189,14 +206,12 @@ def get_by_session_id(project_id, session_id):
return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
select {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s
AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
cur.execute(query=query)
session_metas = cur.fetchall()
results = []
for m in session_metas:
@ -211,14 +226,11 @@ def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0:
return {}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""\
SELECT
project_id,
{",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
query = cur.mogrify(f"""SELECT project_id,{",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query)
rows = cur.fetchall()

View file

@ -1,4 +1,8 @@
import json
from typing import Optional
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import users
@ -6,6 +10,20 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE deleted_at IS NULL
AND name ILIKE %(name)s
{"AND project_id!=%(exclude_id))s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def __update(tenant_id, project_id, changes):
if len(changes.keys()) == 0:
return None
@ -14,29 +32,23 @@ def __update(tenant_id, project_id, changes):
for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
UPDATE public.projects
SET
{" ,".join(sub_query)}
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
)
query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def __create(tenant_id, name):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.projects (name, active)
VALUES (%(name)s,TRUE)
RETURNING project_id;""",
{"name": name})
)
query = cur.mogrify(f"""INSERT INTO public.projects (name, active)
VALUES (%(name)s,TRUE)
RETURNING project_id;""",
{"name": name})
cur.execute(query=query)
project_id = cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@ -121,49 +133,53 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{extra_select}
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_id": project_id})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_key,
s.name
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
@ -172,6 +188,8 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if __exists_by_name(name=data.name, exclude_id=project_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -185,40 +203,32 @@ def delete(tenant_id, user_id, project_id):
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET
deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE
project_id = %(project_id)s;""",
{"project_id": project_id})
)
query = cur.mogrify("""UPDATE public.projects
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE project_id = %(project_id)s;""",
{"project_id": project_id})
cur.execute(query=query)
return {"data": {"state": "success"}}
def count_by_tenant(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute("""\
SELECT
count(s.project_id)
FROM public.projects AS s
WHERE s.deleted_at IS NULL;""")
query = """SELECT count(1) AS count
FROM public.projects AS s
WHERE s.deleted_at IS NULL;"""
cur.execute(query=query)
return cur.fetchone()["count"]
def get_gdpr(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()["gdpr"]
row["projectId"] = project_id
return row
@ -226,17 +236,13 @@ def get_gdpr(project_id):
def edit_gdpr(project_id, gdpr):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET
gdpr = gdpr|| %(gdpr)s
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
)
query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
cur.execute(query=query)
row = cur.fetchone()
if not row:
return {"errors": ["something went wrong"]}
@ -247,40 +253,36 @@ def edit_gdpr(project_id, gdpr):
def get_internal_project_id(project_key):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_id
FROM public.projects
WHERE project_key =%(project_key)s AND deleted_at ISNULL;""",
{"project_key": project_key})
)
query = cur.mogrify("""SELECT project_id
FROM public.projects
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
return row["project_id"] if row else None
def get_project_key(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_key
FROM public.projects
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT project_key
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
project = cur.fetchone()
return project["project_key"] if project is not None else None
def get_capture_status(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
@ -295,22 +297,22 @@ def update_capture_status(project_id, changes):
if changes.get("captureAll"):
sample_rate = 100
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
)
query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query)
return changes
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(f"""SELECT s.project_id
FROM public.projects AS s
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;""")
query = f"""SELECT s.project_id
FROM public.projects AS s
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;"""
cur.execute(query=query)
rows = cur.fetchall()
return [r["project_id"] for r in rows]

View file

@ -1,7 +1,11 @@
import logging
from typing import Optional
import requests
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -102,7 +106,25 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
tenant_id: Optional[int] = None) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.webhooks
WHERE name ILIKE %(name)s
AND deleted_at ISNULL
AND type=%(webhook_type)s
{"AND webhook_id!=%(exclude_id))s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id, "webhook_type": webhook_type})
cur.execute(query)
row = cur.fetchone()
return row["exists"]
def add_edit(tenant_id, data, replace_none=None):
if "name" in data and len(data["name"]) > 0 \
and exists_by_name(name=data["name"], exclude_id=data.get("webhookId")):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if data.get("webhookId") is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"],
changes={"endpoint": data["endpoint"],

View file

@ -6,7 +6,7 @@ from jira import JIRA
from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
from starlette import status
from starlette.exceptions import HTTPException
from fastapi import HTTPException
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"

View file

@ -3,7 +3,7 @@ import smtplib
from smtplib import SMTPAuthenticationError
from decouple import config
from starlette.exceptions import HTTPException
from fastapi import HTTPException
class EmptySMTP:

View file

@ -62,7 +62,6 @@ def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR
@app.post('/{projectId}/sessions/search', tags=["sessions"])
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
@ -70,7 +69,6 @@ def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchem
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
@app.post('/{projectId}/sessions/search2/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)

View file

@ -230,13 +230,8 @@ def get_custom_metric_errors_list(projectId: int, metric_id: int,
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
# TODO: remove this when UI is able to stop this endpoint calls for clickMap
import re
ignore_click_map = re.match(r".*\/[0-9]+\/dashboard\/[0-9]+$", request.headers.get('referer')) is not None \
or re.match(r".*\/[0-9]+\/metrics$", request.headers.get('referer')) is not None \
if request.headers.get('referer') else False
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data, ignore_click_map=ignore_click_map)
data=data)
return {"data": data}

View file

@ -15,6 +15,10 @@ def transform_email(email: str) -> str:
return email.lower().strip() if isinstance(email, str) else email
def remove_whitespace(value: str) -> str:
return " ".join(value.split()) if isinstance(value, str) else value
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@ -64,7 +68,8 @@ class UpdateTenantSchema(BaseModel):
class CreateProjectSchema(BaseModel):
name: str = Field("my first project")
name: str = Field(default="my first project")
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class CurrentAPIContext(BaseModel):
@ -81,6 +86,8 @@ class CurrentContext(CurrentAPIContext):
class AddCollaborationSchema(BaseModel):
name: str = Field(...)
url: HttpUrl = Field(...)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
_transform_url = validator('url', pre=True, allow_reuse=True)(remove_whitespace)
class EditCollaborationSchema(AddCollaborationSchema):
@ -128,6 +135,7 @@ class CreateEditWebhookSchema(BaseModel):
endpoint: str = Field(...)
authHeader: Optional[str] = Field(None)
name: Optional[str] = Field(...)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class CreateMemberSchema(BaseModel):
@ -137,12 +145,15 @@ class CreateMemberSchema(BaseModel):
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class EditMemberSchema(EditUserSchema):
name: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
@ -156,6 +167,7 @@ class AssignmentSchema(BaseModel):
description: str = Field(...)
title: str = Field(...)
issue_type: str = Field(...)
_transform_title = validator('title', pre=True, allow_reuse=True)(remove_whitespace)
class Config:
alias_generator = attribute_to_camel_case
@ -246,6 +258,7 @@ class SumologicSchema(BaseModel):
class MetadataBasicSchema(BaseModel):
index: Optional[int] = Field(None)
key: str = Field(...)
_transform_key = validator('key', pre=True, allow_reuse=True)(remove_whitespace)
class MetadataListSchema(BaseModel):

View file

@ -628,7 +628,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema, ignore_click_map=False):
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
@ -636,9 +636,6 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict())
elif __is_click_map(metric):
# TODO: remove this when UI is able to stop this endpoint calls for clickMap
if ignore_click_map:
return None
if raw_metric["data"]:
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])

View file

@ -1,4 +1,8 @@
import json
from typing import Optional
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import users
@ -6,6 +10,21 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE deleted_at IS NULL
AND name ILIKE %(name)s
AND tenant_id = %(tenant_id)s
{"AND project_id!=%(exclude_id))s" if exclude_id else ""}) AS exists;""",
{"tenant_id": tenant_id, "name": name, "exclude_id": exclude_id})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def __update(tenant_id, project_id, changes):
if len(changes.keys()) == 0:
return None
@ -14,29 +33,23 @@ def __update(tenant_id, project_id, changes):
for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
UPDATE public.projects
SET
{" ,".join(sub_query)}
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
)
query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def __create(tenant_id, name):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.projects (tenant_id, name, active)
VALUES (%(tenant_id)s,%(name)s,TRUE)
RETURNING project_id;""",
{"tenant_id": tenant_id, "name": name})
)
query = cur.mogrify(f"""INSERT INTO public.projects (tenant_id, name, active)
VALUES (%(tenant_id)s,%(name)s,TRUE)
RETURNING project_id;""",
{"tenant_id": tenant_id, "name": name})
cur.execute(query=query)
project_id = cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@ -44,15 +57,14 @@ def __create(tenant_id, name):
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, user_id=None):
with pg_client.PostgresClient() as cur:
role_query = """INNER JOIN LATERAL (SELECT 1
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
LIMIT 1
) AS role_project ON (TRUE)"""
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
LIMIT 1) AS role_project ON (TRUE)"""
extra_projection = ""
extra_join = ""
if gdpr:
@ -71,9 +83,9 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
if stack_integrations:
extra_join = """LEFT JOIN LATERAL (SELECT COUNT(*) AS count
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
@ -134,29 +146,33 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{extra_select}
FROM public.projects AS s
WHERE s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"tenant_id": tenant_id, "project_id": project_id})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None, tenant_id=tenant_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
@ -167,6 +183,8 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if __exists_by_name(name=data.name, exclude_id=project_id, tenant_id=tenant_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -180,40 +198,34 @@ def delete(tenant_id, user_id, project_id):
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""UPDATE public.projects
SET
deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE
project_id = %(project_id)s;""",
{"project_id": project_id})
)
query = cur.mogrify("""UPDATE public.projects
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE project_id = %(project_id)s;""",
{"project_id": project_id})
cur.execute(query=query)
return {"data": {"state": "success"}}
def count_by_tenant(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
count(s.project_id)
FROM public.projects AS s
WHERE s.deleted_at IS NULL
AND tenant_id= %(tenant_id)s;""", {"tenant_id": tenant_id}))
query = cur.mogrify("""SELECT count(1) AS count
FROM public.projects AS s
WHERE s.deleted_at IS NULL
AND tenant_id= %(tenant_id)s;""",
{"tenant_id": tenant_id})
cur.execute(query=query)
return cur.fetchone()["count"]
def get_gdpr(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()["gdpr"]
row["projectId"] = project_id
return row
@ -221,17 +233,13 @@ def get_gdpr(project_id):
def edit_gdpr(project_id, gdpr):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET
gdpr = gdpr|| %(gdpr)s
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
)
query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
cur.execute(query=query)
row = cur.fetchone()
if not row:
return {"errors": ["something went wrong"]}
@ -242,40 +250,36 @@ def edit_gdpr(project_id, gdpr):
def get_internal_project_id(project_key):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_id
FROM public.projects
where project_key =%(project_key)s AND deleted_at ISNULL;""",
{"project_key": project_key})
)
query = cur.mogrify("""SELECT project_id
FROM public.projects
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
return row["project_id"] if row else None
def get_project_key(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_key
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT project_key
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
project = cur.fetchone()
return project["project_key"] if project is not None else None
def get_capture_status(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
@ -290,45 +294,48 @@ def update_capture_status(project_id, changes):
if changes.get("captureAll"):
sample_rate = 100
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
)
query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query)
return changes
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT s.project_id
FROM public.projects AS s
WHERE tenant_id =%(tenant_id)s AND s.deleted_at IS NULL
ORDER BY s.project_id;""", {"tenant_id": tenant_id}))
query = cur.mogrify("""SELECT s.project_id
FROM public.projects AS s
WHERE tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
ORDER BY s.project_id;""", {"tenant_id": tenant_id})
cur.execute(query=query)
rows = cur.fetchall()
return [r["project_id"] for r in rows]
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_key,
s.name
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.project_key =%(project_key)s
AND s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key, "tenant_id": tenant_id})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
@ -338,27 +345,24 @@ def is_authorized(project_id, tenant_id, user_id=None):
return False
with pg_client.PostgresClient() as cur:
role_query = """INNER JOIN LATERAL (SELECT 1
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = %(project_id)s)
) AS role_project ON (TRUE)"""
FROM users
INNER JOIN roles USING (role_id)
LEFT JOIN roles_projects USING (role_id)
WHERE users.user_id = %(user_id)s
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = %(project_id)s)
) AS role_project ON (TRUE)"""
query = cur.mogrify(f"""\
SELECT project_id
FROM public.projects AS s
{role_query if user_id is not None else ""}
where s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
query = cur.mogrify(f"""SELECT project_id
FROM public.projects AS s
{role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s
AND s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"tenant_id": tenant_id, "project_id": project_id, "user_id": user_id})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return row is not None
@ -367,16 +371,13 @@ def is_authorized_batch(project_ids, tenant_id):
if project_ids is None or not len(project_ids):
return False
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
SELECT project_id
FROM public.projects
WHERE tenant_id =%(tenant_id)s
AND project_id IN %(project_ids)s
AND deleted_at IS NULL;""",
query = cur.mogrify("""SELECT project_id
FROM public.projects
WHERE tenant_id =%(tenant_id)s
AND project_id IN %(project_ids)s
AND deleted_at IS NULL;""",
{"tenant_id": tenant_id, "project_ids": tuple(project_ids)})
cur.execute(
query=query
)
cur.execute(query=query)
rows = cur.fetchall()
return [r["project_id"] for r in rows]

View file

@ -1,64 +1,81 @@
from typing import Optional
from fastapi import HTTPException
from starlette import status
import schemas_ee
from chalicelib.core import users, projects
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT count(1) AS count
FROM public.roles
WHERE tenant_id = %(tenant_id)s
AND name ILIKE %(name)s
AND deleted_at ISNULL
{"role_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"tenant_id": tenant_id, "name": name, "exclude_id": exclude_id})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
if __exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=role_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not data.all_projects and (data.projects is None or len(data.projects) == 0):
return {"errors": ["must specify a project or all projects"]}
if data.projects is not None and len(data.projects) > 0 and not data.all_projects:
data.projects = projects.is_authorized_batch(project_ids=data.projects, tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT 1
FROM public.roles
WHERE role_id = %(role_id)s
query = cur.mogrify("""SELECT 1
FROM public.roles
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = TRUE
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
cur.execute(query=query)
if cur.fetchone() is not None:
return {"errors": ["this role is protected"]}
cur.execute(
cur.mogrify("""\
UPDATE public.roles
SET name= %(name)s,
description= %(description)s,
permissions= %(permissions)s,
all_projects= %(all_projects)s
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND deleted_at ISNULL
AND protected = FALSE
RETURNING *, COALESCE((SELECT ARRAY_AGG(project_id)
FROM roles_projects WHERE roles_projects.role_id=%(role_id)s),'{}') AS projects;""",
{"tenant_id": tenant_id, "role_id": role_id, **data.dict()})
)
query = cur.mogrify("""UPDATE public.roles
SET name= %(name)s,
description= %(description)s,
permissions= %(permissions)s,
all_projects= %(all_projects)s
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND deleted_at ISNULL
AND protected = FALSE
RETURNING *, COALESCE((SELECT ARRAY_AGG(project_id)
FROM roles_projects
WHERE roles_projects.role_id=%(role_id)s),'{}') AS projects;""",
{"tenant_id": tenant_id, "role_id": role_id, **data.dict()})
cur.execute(query=query)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
if not data.all_projects:
d_projects = [i for i in row["projects"] if i not in data.projects]
if len(d_projects) > 0:
cur.execute(
cur.mogrify(
"DELETE FROM roles_projects WHERE role_id=%(role_id)s AND project_id IN %(project_ids)s",
{"role_id": role_id, "project_ids": tuple(d_projects)})
)
query = cur.mogrify("""DELETE FROM roles_projects
WHERE role_id=%(role_id)s
AND project_id IN %(project_ids)s""",
{"role_id": role_id, "project_ids": tuple(d_projects)})
cur.execute(query=query)
n_projects = [i for i in data.projects if i not in row["projects"]]
if len(n_projects) > 0:
cur.execute(
cur.mogrify(
f"""INSERT INTO roles_projects(role_id, project_id)
VALUES {",".join([f"(%(role_id)s,%(project_id_{i})s)" for i in range(len(n_projects))])}""",
{"role_id": role_id, **{f"project_id_{i}": p for i, p in enumerate(n_projects)}})
)
query = cur.mogrify(f"""INSERT INTO roles_projects(role_id, project_id)
VALUES {",".join([f"(%(role_id)s,%(project_id_{i})s)" for i in range(len(n_projects))])}""",
{"role_id": role_id, **{f"project_id_{i}": p for i, p in enumerate(n_projects)}})
cur.execute(query=query)
row["projects"] = data.projects
return helper.dict_to_camel_case(row)
@ -69,45 +86,46 @@ def create(tenant_id, user_id, data: schemas_ee.RolePayloadSchema):
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
if __exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not data.all_projects and (data.projects is None or len(data.projects) == 0):
return {"errors": ["must specify a project or all projects"]}
if data.projects is not None and len(data.projects) > 0 and not data.all_projects:
data.projects = projects.is_authorized_batch(project_ids=data.projects, tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions, all_projects)
VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[], %(all_projects)s)
RETURNING *;""",
{"tenant_id": tenant_id, "name": data.name, "description": data.description,
"permissions": data.permissions, "all_projects": data.all_projects})
)
query = cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions, all_projects)
VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[], %(all_projects)s)
RETURNING *;""",
{"tenant_id": tenant_id, "name": data.name, "description": data.description,
"permissions": data.permissions, "all_projects": data.all_projects})
cur.execute(query=query)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
if not data.all_projects:
role_id = row["role_id"]
cur.execute(
cur.mogrify(f"""INSERT INTO roles_projects(role_id, project_id)
VALUES {",".join(f"(%(role_id)s,%(project_id_{i})s)" for i in range(len(data.projects)))};""",
{"role_id": role_id, **{f"project_id_{i}": p for i, p in enumerate(data.projects)}})
)
query = cur.mogrify(f"""INSERT INTO roles_projects(role_id, project_id)
VALUES {",".join(f"(%(role_id)s,%(project_id_{i})s)" for i in range(len(data.projects)))};""",
{"role_id": role_id, **{f"project_id_{i}": p for i, p in enumerate(data.projects)}})
cur.execute(query=query)
return helper.dict_to_camel_case(row)
def get_roles(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT roles.*, COALESCE(projects, '{}') AS projects
FROM public.roles
LEFT JOIN LATERAL (SELECT array_agg(project_id) AS projects
FROM roles_projects
INNER JOIN projects USING (project_id)
WHERE roles_projects.role_id = roles.role_id
AND projects.deleted_at ISNULL ) AS role_projects ON (TRUE)
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
ORDER BY role_id;""",
{"tenant_id": tenant_id})
)
query = cur.mogrify("""SELECT roles.*, COALESCE(projects, '{}') AS projects
FROM public.roles
LEFT JOIN LATERAL (SELECT array_agg(project_id) AS projects
FROM roles_projects
INNER JOIN projects USING (project_id)
WHERE roles_projects.role_id = roles.role_id
AND projects.deleted_at ISNULL ) AS role_projects ON (TRUE)
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
ORDER BY role_id;""",
{"tenant_id": tenant_id})
cur.execute(query=query)
rows = cur.fetchall()
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
@ -116,14 +134,13 @@ def get_roles(tenant_id):
def get_role_by_name(tenant_id, name):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT *
FROM public.roles
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
AND name ILIKE %(name)s;""",
{"tenant_id": tenant_id, "name": name})
)
query = cur.mogrify("""SELECT *
FROM public.roles
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
AND name ILIKE %(name)s;""",
{"tenant_id": tenant_id, "name": name})
cur.execute(query=query)
row = cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
@ -136,33 +153,30 @@ def delete(tenant_id, user_id, role_id):
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT 1
FROM public.roles
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = TRUE
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
query = cur.mogrify("""SELECT 1
FROM public.roles
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = TRUE
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
cur.execute(query=query)
if cur.fetchone() is not None:
return {"errors": ["this role is protected"]}
cur.execute(
cur.mogrify("""SELECT 1
FROM public.users
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
query = cur.mogrify("""SELECT 1
FROM public.users
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
cur.execute(query=query)
if cur.fetchone() is not None:
return {"errors": ["this role is already attached to other user(s)"]}
cur.execute(
cur.mogrify("""UPDATE public.roles
SET deleted_at = timezone('utc'::text, now())
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = FALSE;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
query = cur.mogrify("""UPDATE public.roles
SET deleted_at = timezone('utc'::text, now())
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = FALSE;""",
{"tenant_id": tenant_id, "role_id": role_id})
cur.execute(query=query)
return get_roles(tenant_id=tenant_id)

View file

@ -1,7 +1,11 @@
import logging
from typing import Optional
import requests
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -108,7 +112,27 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w
def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int],
webhook_type: str = schemas.WebhookType.webhook) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT count(1) AS count
FROM public.webhooks
WHERE name ILIKE %(name)s
AND deleted_at ISNULL
AND tenant_id=%(tenant_id)s
AND type=%(webhook_type)s
{"AND webhook_id!=%(exclude_id))s" if exclude_id else ""}) AS exists;""",
{"tenant_id": tenant_id, "name": name, "exclude_id": exclude_id,
"webhook_type": webhook_type})
cur.execute(query)
row = cur.fetchone()
return row["exists"]
def add_edit(tenant_id, data, replace_none=None):
if "name" in data and len(data["name"]) > 0 \
and exists_by_name(name=data["name"], exclude_id=data.get("webhookId"), tenant_id=tenant_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if data.get("webhookId") is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"],
changes={"endpoint": data["endpoint"],

View file

@ -232,13 +232,8 @@ def get_custom_metric_errors_list(projectId: int, metric_id: int,
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
# TODO: remove this when UI is able to stop this endpoint calls for clickMap
import re
ignore_click_map = re.match(r".*\/[0-9]+\/dashboard\/[0-9]+$", request.headers.get('referer')) is not None \
or re.match(r".*\/[0-9]+\/metrics$", request.headers.get('referer')) is not None \
if request.headers.get('referer') else False
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data, ignore_click_map=ignore_click_map)
data=data)
return {"data": data}

View file

@ -2,7 +2,7 @@ from enum import Enum
from typing import Optional, List, Union, Literal
from pydantic import BaseModel, Field, EmailStr
from pydantic import root_validator
from pydantic import root_validator, validator
import schemas
from chalicelib.utils.TimeUTC import TimeUTC
@ -27,6 +27,7 @@ class RolePayloadSchema(BaseModel):
permissions: List[Permissions] = Field(...)
all_projects: bool = Field(True)
projects: List[int] = Field([])
_transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace)
class Config:
alias_generator = schemas.attribute_to_camel_case
@ -119,6 +120,7 @@ class SessionModel(BaseModel):
class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1)
_transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace)
class AssistRecordPayloadSchema(AssistRecordUpdatePayloadSchema):