openreplay/api/chalicelib/core/metadata.py
Kraiem Taha Yassine a29c02b43a
Api FastApi (#252)
* feat(api): dynamic-api 1/2

* feat(api): dynamic-api 2/2
feat(api): core-api 1/2

* feat(api): changed schemas
feat(api): aipkey authorizer
feat(api): jwt authorizer payload
feat(api): core-api 2/3

* feat(api): apikey authorizer
feat(api): shared context
feat(api): response editor
feat(api): middleware
feat(api): custom router
feat(api): fix auth double call

* feat(api): dashboard
feat(api): insights
feat(api): public api v1

* feat(api): allow full CORS

* feat(api): use decouple-config instead of env
feat(api): fixed conflict slack endpoint
feat(api): fixed favorite errors param

* feat(api): migration fixes

* feat(api): changes

* feat(api): crons

* feat(api): changes and fixes

* feat(api): added new endpoints
feat(api): applied new changes
feat(api): Docker image

* feat(api): EE 1/4

* feat(api): EE core_dynamic

* feat(api): global routers generator

* feat(api): project authorizer
feat(api): docker image
feat(api): crons

* feat(api): EE trace activity

* feat(api): changed ORRouter

* feat(api): EE trace activity parameters&payload

* feat(api): EE trace activity action name & path_format

* feat(db): user trace

* feat(api): EE trace activity ignore routes and hide attribute
feat(api): fix funnel payload schema

* feat(api): mobile support

* feat(api): changed build script

* feat(api): changed mobile sign endpoint
feat(api): changed requirements.txt

* feat(api): changed dockerfile

* feat(api): changed mobile-env-var

* feat(api): removed insights

* feat(api): changed EE Dockerfile

* feat(api): cast session_id to str for signing

* feat(api): fixed error_id type

* feat(api): fixed /errors priority conflict

* feat(api): fixed /errors/{errorId} default params

* feat(api): fixed change password after invitation

* feat(api): use background task for emails instead of low-timeout-api
feat(api): EE fixed missing required params

* feat(api): funnel-insights payload change

* feat(api): funnel-insights payload change

* feat(api): changed edit user payload schema

* feat(api): changed metrics payload schema

* feat(api): changed metrics payload schema

* feat(api): changed edit user default values
feat(api): fixed change error status route

* feat(api): changed edit user

* feat(api): stop user from changing his own role

* feat(api): changed add slack

* feat(api): changed get funnel

* feat(api): changed get funnel on the fly payload
feat(api): changed update payload

* feat(api): changed get funnel on the fly payload

* feat(api): changed update funnel payload

* feat(api): changed get funnel-sessions/issues on the fly payload

* feat(api): fixed funnel missing rangeValue

* feat(api): fixes

* feat(api): iceServers configuration

* feat(api): fix issueId casting

* feat(api): changed issues-sessions endpoint payload-schema

* feat(api): EE changed traces-ignored-routes

* feat(api): EE include core sessions.py

* feat(api): EE check licence on every request if expired

* feat(api): move general stats to dynamic

* feat(api): code cleanup
feat(api): removed sentry

* feat(api): changed traces-ignore-routes

* feat(api): changed dependencies

* feat(api): changed jwt-auth-response code

* feat(api): changed traces-ignore-routes

* feat(api): changed traces-ignore-routes

* feat(api): removed PyTZ
feat(api): migrated time-helper to zoneinfo

* feat(api): EE added missing dependency
feat(api): changed base docker image

* feat(api): merge after roles

* feat(api): EE roles fastapi

* feat(db): handel HTTPExceptions

* feat(db): changed payload schema

* feat(db): changed payload schema

* feat(api): included insights

* feat(api): removed unused helper

* feat(api): merge from dev to fatsapi

* feat(api): merge fixes
feat(api): SAML migration

* feat(api): changed GET /signup response
feat(api): changed EE Dockerfile

* feat(api): changed edition detection

* feat(api): include ee endpoints

* feat(api): add/edit member changes

* feat(api): saml changed redirect

* feat(api): track session's replay
feat(api): track error's details

* feat(api): ignore tracking for read roles

* feat(api): define global queue
feat(api): define global scheduler
feat(api): traces use queue
feat(api): traces batch insert
feat(DB): changed traces schema

* feat(api): fix signup captcha

* feat(api): fix signup captcha

* feat(api): optional roleId
feat(api): set roleId to member if None

* feat(api): fixed edit role

* feat(api): return role details when creating a new member

* feat(api): trace: use BackgroundTasks instead of BackgroundTask to not override previous tasks

* feat(api): trace: use BackgroundTask if no other background task is defined

* feat(api): optimised delete metadata

* feat(api): Notification optional message

* feat(api): fix background-task reference

* feat(api): fix trace-background-task

* feat(api): fixed g-captcha for reset password

* feat(api): fix edit self-user

* feat(api): fixed create github-issue

* feat(api): set misfire_grace_time for crons

* feat(api): removed chalice
feat(api): freeze dependencies

* feat(api): refactored blueprints

* feat(api): /metadata/session_search allow projectId=None

* feat(api): public API, changed userId type

* feat(api): fix upload sourcemaps

* feat(api): user-trace support ApiKey endpoints

* feat(api): fixed user-trace foreign key type

* feat(api): fixed trace schema

* feat(api): trace save auth-method

* feat(api): trace fixed auth-method

* feat(api): trace changed schema
2021-12-16 19:10:12 +01:00

264 lines
9 KiB
Python

from chalicelib.utils import pg_client, helper, dev
from chalicelib.core import projects
import re
MAX_INDEXES = 10
def _get_column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
def get(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(_get_column_names())}
FROM public.projects
WHERE project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
)
metas = cur.fetchone()
results = []
if metas is not None:
for i, k in enumerate(metas.keys()):
if metas[k] is not None:
results.append({"key": metas[k], "index": i + 1})
return results
regex = re.compile(r'^[a-z0-9_-]+$', re.IGNORECASE)
def index_to_colname(index):
if index <= 0 or index > MAX_INDEXES:
raise Exception("metadata index out or bound")
return f"metadata_{index}"
def __get_available_index(project_id):
used_indexs = get(project_id)
used_indexs = [i["index"] for i in used_indexs]
if len(used_indexs) >= MAX_INDEXES:
return -1
i = 1
while i in used_indexs:
i += 1
return i
def __edit(project_id, col_index, colname, new_name):
if new_name is None or len(new_name) == 0:
return {"errors": ["key value invalid"]}
old_metas = get(project_id)
old_metas = {k["index"]: k for k in old_metas}
if col_index not in list(old_metas.keys()):
return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur:
if old_metas[col_index]["key"].lower() != new_name:
cur.execute(cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
WHERE project_id = %(project_id)s AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name}))
new_name = cur.fetchone()[colname]
old_metas[col_index]["key"] = new_name
return {"data": old_metas[col_index]}
def edit(tenant_id, project_id, index: int, new_name: str):
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
def delete(tenant_id, project_id, index: int):
index = int(index)
old_segments = get(project_id)
old_segments = [k["index"] for k in old_segments]
if index not in old_segments:
return {"errors": ["custom field not found"]}
with pg_client.PostgresClient() as cur:
colname = index_to_colname(index)
query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= NULL
WHERE project_id = %(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
query = cur.mogrify(f"""UPDATE public.sessions
SET {colname}= NULL
WHERE project_id = %(project_id)s
AND {colname} IS NOT NULL
""",
{"project_id": project_id})
cur.execute(query=query)
return {"data": get(project_id)}
def add(tenant_id, project_id, new_name):
index = __get_available_index(project_id=project_id)
if index < 1:
return {"errors": ["maximum allowed metadata reached"]}
with pg_client.PostgresClient() as cur:
colname = index_to_colname(index)
cur.execute(
cur.mogrify(
f"""UPDATE public.projects SET {colname}= %(key)s WHERE project_id =%(project_id)s RETURNING {colname};""",
{"key": new_name, "project_id": project_id}))
col_val = cur.fetchone()[colname]
return {"data": {"key": col_val, "index": index}}
def search(tenant_id, project_id, key, value):
value = value + "%"
s_query = []
for f in _get_column_names():
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(s_query)}
FROM public.projects
WHERE
project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
)
all_metas = cur.fetchone()
key = None
for c in all_metas:
if all_metas[c]:
key = c
break
if key is None:
return {"errors": ["key does not exist"]}
cur.execute(
cur.mogrify(
f"""\
SELECT
DISTINCT "{key}" AS "{key}"
FROM public.sessions
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
)
value = cur.fetchall()
return {"data": [k[key] for k in value]}
def get_available_keys(project_id):
all_metas = get(project_id=project_id)
return [k["key"] for k in all_metas]
def get_by_session_id(project_id, session_id):
all_metas = get(project_id=project_id)
if len(all_metas) == 0:
return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
select {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
)
session_metas = cur.fetchall()
results = []
for m in session_metas:
r = {}
for k in m.keys():
r[keys[k]] = m[k]
results.append(r)
return results
def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0:
return {}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""\
SELECT
project_id,
{",".join(_get_column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query)
rows = cur.fetchall()
results = {}
for r in rows:
project_id = r.pop("project_id")
results[project_id] = {}
for m in r:
if r[m] is not None:
results[project_id][m] = r[m]
return results
def add_edit_delete(tenant_id, project_id, new_metas):
old_metas = get(project_id)
old_indexes = [k["index"] for k in old_metas]
new_indexes = [k["index"] for k in new_metas if "index" in k]
new_keys = [k["key"] for k in new_metas]
add_metas = [k["key"] for k in new_metas
if "index" not in k]
new_metas = {k["index"]: {"key": k["key"]} for
k in new_metas if
"index" in k}
old_metas = {k["index"]: {"key": k["key"]} for k in old_metas}
if len(new_keys) > 20:
return {"errors": ["you cannot add more than 20 key"]}
for k in new_metas.keys():
if re.match(regex, new_metas[k]["key"]) is None:
return {"errors": [f"invalid key {k}"]}
for k in add_metas:
if re.match(regex, k) is None:
return {"errors": [f"invalid key {k}"]}
if len(new_indexes) > len(set(new_indexes)):
return {"errors": ["duplicate indexes"]}
if len(new_keys) > len(set(new_keys)):
return {"errors": ["duplicate keys"]}
to_delete = list(set(old_indexes) - set(new_indexes))
with pg_client.PostgresClient() as cur:
for d in to_delete:
delete(tenant_id=tenant_id, project_id=project_id, index=d)
for k in add_metas:
add(tenant_id=tenant_id, project_id=project_id, new_name=k)
for k in new_metas.keys():
if new_metas[k]["key"].lower() != old_metas[k]["key"]:
edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"])
return {"data": get(project_id)}
@dev.timed
def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id)
results = []
for p in all_projects:
used_metas = get(p["projectId"])
if MAX_INDEXES < 0:
remaining = -1
else:
remaining = MAX_INDEXES - len(used_metas)
results.append({**p, "limit": MAX_INDEXES, "remaining": remaining, "count": len(used_metas)})
return results