* ci(deployment): injecting secrets Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * fix: typo * feat(installation): Enterprise license check * fix(install): reset ee cli args Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * Fix typo * Update README.md * feat (tracker-axios): init plugin * fix (tracker-axios): version patch * Fixed alert's unknown metrics handler * fix (tracker-mobx): dev-dependencies and updated package-lock * feat: APIs for user session data deleteion - wip * fix: alert metric value of performance.speed_index * Build and deploy scripts for enterprise edition (#13) * feat(installation): enterprise installation * chore(install): enabling ansible gather_facts Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): quotes for enterprise key Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(installation): enterprise install dbs Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): rename yaml * chore(install): change image tag Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): License key variable added * chore(deployment): Injecting enterprise license key in workers. * chore(install): remove deprecated files * chore(install): make domain_name mandatory in vars.yaml Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(actions): ee workers Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * feat(install): use local docker instead of crictl You can use the images built in the local machine, in installation, without putting that in any external registry. Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * feat: APIs for user session data deleteion * feat: prefix deleted mobs with DEL_ * feat: schedules to delete mobs * chore(ci): fix ee build Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * feat(build): passing build args to internal scripts Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): moving kafka topic creation at the end Kafka pods usually takes time to be active. Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): removing auth service. * chore(install): Adding rancher for cluster management * chore(install): proper name for alerts template * separate requirements and clean up * feat (frontend): typescript support * feat (tracker): 3.0.4: maintain baseURL & connAttempt options * feat(api): changed license validation * feat(api): ee-license fix for unprovided value * feat(api): fixed ee-signup cursor * feat(api): FOS fix replay-mob issue * feat(api): ee log ch-resources query * chore(ci): change openreplay-cli with kube-install.sh Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * ci(actions): change ee naming * feat(api): removed ch-logs * feat(install): injecting ee variables only on ee installation. Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * chore(install): remove licence key from ee Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * fix(install): ch values for chalice * feat(clickhouse): moved creation scripts to EE folder * fix (backend-ee): disable ios tables so far * chore(install): remove deprecated mandatory variables. Signed-off-by: Rajesh Rajendran <rjshrjndrn@gmail.com> * feat(api): remove duplicate files & changed signup * fix(backend-ee): ch prepare after commit * fix(backend-ee): syntax * feat(api): added missing EE tenant column * fix(scripts-ee): correct default clickhouse host * feat(api): changed version_number location * feat(api): ee log ch-errors query * feat(api): ee fix ch-errors query * feat: skip to issue button (#23) * feat(api): 🐛 ee fix ambiguous ch-error query & accounts endpoint * Feature: Autoplay Sessions (#22) * feat: autoplay sessions * change: removed unused import * auto play filter by tab * feat(api): changed JWT authorizer & API_KEY authorizer & fix undefined project_key * feat (backend-devops): Dockerfile for all services in one image * feat(sourcemap-uploader): --verbose argument use instead of --log * feat(api): log middleware * Feature - dom inspector (#28) * feat (frontend): typescript support * feat(frontend): DOM Inspector init * fix(frontend): use tailwind bg * feat(frontend dom-inspector): add element selection & deletion * fix(frontend): todo comment * di - styling wip * feature(di) - editor theme * feat(frontend): parse attributes with RE (+ability to add) * feature(di) - input width * fix(ui): di - review changes Co-authored-by: ShiKhu <alex.kaminsky.11@gmail.com> * chore(install): remove depricated init_dbs * feat(api): ee override multi-tenant-core * fix(frontend-build): gen css types before build * fix(ui) - checking for the license (#30) Co-authored-by: Rajesh Rajendran <rjshrjndrn@gmail.com> Co-authored-by: Mehdi Osman <estradino@users.noreply.github.com> Co-authored-by: ShiKhu <alex.kaminsky.11@gmail.com> Co-authored-by: KRAIEM Taha Yassine <tahayk2@gmail.com> Co-authored-by: Rajesh Rajendran <rjshrjndrn@users.noreply.github.com> Co-authored-by: ourvakan <hi-psi@yandex.com> Co-authored-by: tahayk2@gmail.com <enissay4ever4github>
230 lines
8.7 KiB
Python
230 lines
8.7 KiB
Python
import json
|
|
|
|
from chalicelib.core import users
|
|
from chalicelib.utils import pg_client, helper, dev
|
|
from chalicelib.utils.TimeUTC import TimeUTC
|
|
|
|
|
|
def __update(tenant_id, project_id, changes):
|
|
if len(changes.keys()) == 0:
|
|
return None
|
|
|
|
sub_query = []
|
|
for key in changes.keys():
|
|
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify(f"""\
|
|
UPDATE public.projects
|
|
SET
|
|
{" ,".join(sub_query)}
|
|
WHERE
|
|
project_id = %(project_id)s
|
|
AND deleted_at ISNULL
|
|
RETURNING project_id,name,gdpr;""",
|
|
{"project_id": project_id, **changes})
|
|
)
|
|
return helper.dict_to_camel_case(cur.fetchone())
|
|
|
|
|
|
def __create(tenant_id, name):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify(f"""\
|
|
INSERT INTO public.projects (name, active)
|
|
VALUES (%(name)s,TRUE)
|
|
RETURNING project_id;""",
|
|
{"name": name})
|
|
)
|
|
project_id = cur.fetchone()["project_id"]
|
|
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
|
|
|
|
|
|
@dev.timed
|
|
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(f"""\
|
|
SELECT
|
|
s.project_id, s.name, s.project_key
|
|
{',s.gdpr' if gdpr else ''}
|
|
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
|
|
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
|
|
FROM public.projects AS s
|
|
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
|
|
where s.deleted_at IS NULL
|
|
ORDER BY s.project_id;"""
|
|
)
|
|
rows = cur.fetchall()
|
|
if recording_state:
|
|
for r in rows:
|
|
query = cur.mogrify(
|
|
"select COALESCE(MAX(start_ts),0) AS last from public.sessions where project_id=%(project_id)s;",
|
|
{"project_id": r["project_id"]})
|
|
cur.execute(
|
|
query=query
|
|
)
|
|
status = cur.fetchone()
|
|
if status["last"] < TimeUTC.now(-2):
|
|
r["status"] = "red"
|
|
elif status["last"] < TimeUTC.now(-1):
|
|
r["status"] = "yellow"
|
|
else:
|
|
r["status"] = "green"
|
|
|
|
return helper.list_to_camel_case(rows)
|
|
|
|
|
|
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
|
|
with pg_client.PostgresClient() as cur:
|
|
query = cur.mogrify(f"""\
|
|
SELECT
|
|
s.project_id,
|
|
s.project_key,
|
|
s.name
|
|
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
|
|
{',s.gdpr' if include_gdpr else ''}
|
|
FROM public.projects AS s
|
|
where s.project_id =%(project_id)s
|
|
AND s.deleted_at IS NULL
|
|
LIMIT 1;""",
|
|
{"project_id": project_id})
|
|
|
|
cur.execute(
|
|
query=query
|
|
)
|
|
row = cur.fetchone()
|
|
return helper.dict_to_camel_case(row)
|
|
|
|
|
|
def create(tenant_id, user_id, data):
|
|
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
|
if not admin["admin"] and not admin["superAdmin"]:
|
|
return {"errors": ["unauthorized"]}
|
|
return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))}
|
|
|
|
|
|
def edit(tenant_id, user_id, project_id, data):
|
|
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
|
if not admin["admin"] and not admin["superAdmin"]:
|
|
return {"errors": ["unauthorized"]}
|
|
return {"data": __update(tenant_id=tenant_id, project_id=project_id,
|
|
changes={"name": data.get("name", "my first project")})}
|
|
|
|
|
|
def delete(tenant_id, user_id, project_id):
|
|
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
|
|
|
if not admin["admin"] and not admin["superAdmin"]:
|
|
return {"errors": ["unauthorized"]}
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
UPDATE public.projects
|
|
SET
|
|
deleted_at = timezone('utc'::text, now()),
|
|
active = FALSE
|
|
WHERE
|
|
project_id = %(project_id)s;""",
|
|
{"project_id": project_id})
|
|
)
|
|
return {"data": {"state": "success"}}
|
|
|
|
|
|
def count_by_tenant(tenant_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute("""\
|
|
SELECT
|
|
count(s.project_id)
|
|
FROM public.projects AS s
|
|
where s.deleted_at IS NULL;""")
|
|
return cur.fetchone()["count"]
|
|
|
|
|
|
def get_gdpr(project_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
SELECT
|
|
gdpr
|
|
FROM public.projects AS s
|
|
where s.project_id =%(project_id)s
|
|
AND s.deleted_at IS NULL;""",
|
|
{"project_id": project_id})
|
|
)
|
|
return cur.fetchone()["gdpr"]
|
|
|
|
|
|
def edit_gdpr(project_id, gdpr):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
UPDATE public.projects
|
|
SET
|
|
gdpr = gdpr|| %(gdpr)s
|
|
WHERE
|
|
project_id = %(project_id)s
|
|
AND deleted_at ISNULL
|
|
RETURNING gdpr;""",
|
|
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
|
|
)
|
|
return cur.fetchone()["gdpr"]
|
|
|
|
|
|
def get_internal_project_id(project_key):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
SELECT project_id
|
|
FROM public.projects
|
|
where project_key =%(project_key)s AND deleted_at ISNULL;""",
|
|
{"project_key": project_key})
|
|
)
|
|
row = cur.fetchone()
|
|
return row["project_id"] if row else None
|
|
|
|
|
|
def get_project_key(project_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
SELECT project_key
|
|
FROM public.projects
|
|
where project_id =%(project_id)s AND deleted_at ISNULL;""",
|
|
{"project_id": project_id})
|
|
)
|
|
return cur.fetchone()["project_key"]
|
|
|
|
|
|
def get_capture_status(project_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
SELECT
|
|
sample_rate AS rate, sample_rate=100 AS capture_all
|
|
FROM public.projects
|
|
where project_id =%(project_id)s AND deleted_at ISNULL;""",
|
|
{"project_id": project_id})
|
|
)
|
|
return helper.dict_to_camel_case(cur.fetchone())
|
|
|
|
|
|
def update_capture_status(project_id, changes):
|
|
if "rate" not in changes and "captureAll" not in changes:
|
|
return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]}
|
|
if int(changes["rate"]) < 0 or int(changes["rate"]) > 100:
|
|
return {"errors": ["'rate' must be between 0..100."]}
|
|
sample_rate = 0
|
|
if "rate" in changes:
|
|
sample_rate = int(changes["rate"])
|
|
if changes.get("captureAll"):
|
|
sample_rate = 100
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
UPDATE public.projects
|
|
SET sample_rate= %(sample_rate)s
|
|
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
|
|
{"project_id": project_id, "sample_rate": sample_rate})
|
|
)
|
|
|
|
return changes
|