feat(api): custom metrics CRUD

feat(api): EE fixed add/edit project
feat(api): refactored sessions search
This commit is contained in:
Taha Yassine Kraiem 2022-01-05 19:21:31 +01:00
parent e94de67738
commit 7aafd8bc83
5 changed files with 788 additions and 527 deletions

View file

@ -0,0 +1,200 @@
import json
import schemas
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def try_live(project_id,data: schemas.CreateCustomMetricsSchema):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
for k in s.dict().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = { "project_id": project_id, **data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, title)
VALUES (%(project_id)s, %(user_id)s, %(title)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, title, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(title_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return {"data": r}
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
for k in s.dict().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, title)
VALUES (%(project_id)s, %(user_id)s, %(title)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, title, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(title_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return {"data": r}
def __get_series_id(metric_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT series_id
FROM metric_series
WHERE metric_series.metric_id = %(metric_id)s
AND metric_series.deleted_at ISNULL;""",
{"metric_id": metric_id}
)
)
rows = cur.fetchall()
return [r["series_id"] for r in rows]
def update(metric_id, user_id, data: schemas.UpdateCustomMetricsSchema):
series_ids = __get_series_id(metric_id)
n_series = []
d_series_ids = []
u_series = []
u_series_ids = []
params = {"metric_id": metric_id, "is_public": data.is_public, "title": data.title}
for i, s in enumerate(data.series):
prefix = "u_"
if s.series_id is None:
n_series.append({"i": i, "s": s})
prefix = "n_"
else:
u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id)
ns = s.dict()
for k in ns.keys():
if k == "filter":
ns[k] = json.dumps(ns[k])
params[f"{prefix}{k}_{i}"] = ns[k]
for i in series_ids:
if i not in u_series_ids:
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
with pg_client.PostgresClient() as cur:
sub_queries = []
if len(n_series) > 0:
sub_queries.append(f"""\
n AS (INSERT INTO metric_series (metric_id, index, title, filter)
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_title_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
for s in n_series])}
RETURNING 1)""")
if len(u_series) > 0:
sub_queries.append(f"""\
u AS (UPDATE metric_series
SET title=series.title,
filter=series.filter,
index=series.filter.index
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_title_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
for s in n_series])}) AS series(series_id, index, title, filter)
WHERE metric_id =%(metric_id)s AND series_id=series.series_id
RETURNING 1)""")
if len(d_series_ids) > 0:
sub_queries.append("""\
d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
RETURNING 1)""")
query = cur.mogrify(f"""\
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
UPDATE metrics
SET title = %(title)s, is_public= %(is_public)s WHERE metric_id = %(metric_id)s
RETURNING metric_id;""", params)
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return r
def get_all(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
FROM metrics
LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR is_public)
ORDER BY created_at;""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
rows = helper.list_to_camel_case(rows)
return rows
def delete(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
)
return {"state": "success"}
def get(metric_id, project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
FROM metrics
LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
return helper.dict_to_camel_case(row)

File diff suppressed because it is too large Load diff

View file

@ -10,7 +10,8 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
log_tool_stackdriver, reset_password, sessions_favorite_viewed, \
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, slack, users
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, slack, users, \
custom_metrics
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -1086,3 +1087,38 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
return users.change_password(email=context.email, old_password=data.old_password,
new_password=data.new_password, tenant_id=context.tenant_id,
user_id=context.user_id)
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.try_live(project_id=projectId, data=data)
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.update(user_id=context.user_id, metric_id=metric_id,data=data)}
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

View file

@ -576,3 +576,33 @@ class SentrySchema(BaseModel):
class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CustomMetricCreateSeriesSchema(BaseModel):
title: Optional[str] = Field(None)
index: Optional[int] = Field(None)
filter: Optional[CustomMetricSeriesFilterSchema] = Field([])
class CreateCustomMetricsSchema(BaseModel):
title: str = Field(...)
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
is_public: Optional[bool] = Field(False)
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
series_id: Optional[int] = Field(None)
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)

View file

@ -1,5 +1,6 @@
import json
import schemas
from chalicelib.core import users
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -104,8 +105,8 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
query = cur.mogrify(f"""\
SELECT
s.project_id,
s.name,
s.project_key
s.project_key,
s.name
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
{tracker_query}
@ -129,20 +130,20 @@ def is_authorized(project_id, tenant_id):
return get_project(tenant_id=tenant_id, project_id=project_id) is not None
def create(tenant_id, user_id, data, skip_authorization=False):
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))}
return {"data": __create(tenant_id=tenant_id, name=data.name)}
def edit(tenant_id, user_id, project_id, data):
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
return {"data": __update(tenant_id=tenant_id, project_id=project_id,
changes={"name": data.get("name", "my first project")})}
changes={"name": data.name})}
def delete(tenant_id, user_id, project_id):