feat(api): custom metrics preview

This commit is contained in:
Taha Yassine Kraiem 2022-01-06 14:42:48 +01:00
parent a2d9366c9f
commit f7cc837944
4 changed files with 57 additions and 39 deletions

View file

@ -1,37 +1,19 @@
import json
import schemas
from chalicelib.core import sessions
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def try_live(project_id,data: schemas.CreateCustomMetricsSchema):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
for k in s.dict().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = { "project_id": project_id, **data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, title)
VALUES (%(project_id)s, %(user_id)s, %(title)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, title, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(title_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
def try_live(project_id, data: schemas.TryCustomMetricsSchema):
results = []
for s in data.series:
s.filter.startDate = data.startDate
s.filter.endDate = data.endDate
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density))
return results
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return {"data": r}
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
with pg_client.PostgresClient() as cur:

View file

@ -1,7 +1,7 @@
import schemas
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils import pg_client, helper, dev, metrics_helper
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -199,11 +199,11 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
# ORDER BY favorite DESC, issue_score DESC, {sort} {order};""",
# full_args)
print("--------------------")
print(main_query)
# print("--------------------")
# print(main_query)
cur.execute(main_query)
print("--------------------")
# print("--------------------")
if count_only:
return helper.dict_to_camel_case(cur.fetchone())
sessions = cur.fetchone()
@ -230,6 +230,35 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
}
@dev.timed
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int):
step_size = metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
density=density, factor=1)
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
full_args["step_size"] = step_size
with pg_client.PostgresClient() as cur:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
{query_part})
SELECT generated_timestamp AS timestamp,
COUNT(s) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT 1 AS s
FROM full_sessions
WHERE start_ts >= generated_timestamp
AND start_ts < generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
# print("--------------------")
# print(main_query)
cur.execute(main_query)
# print("--------------------")
sessions = cur.fetchall()
return sessions
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
@ -707,11 +736,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(user_id)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
if not favorite_only and not errors_only:
if not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)"""
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)"""
extra_join = ""
if issue is not None:
extra_join = """

View file

@ -1091,14 +1091,15 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.try_live(project_id=projectId, data=data)
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.try_live(project_id=projectId, data=data)}
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
@ -1116,7 +1117,7 @@ def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentCo
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.update(user_id=context.user_id, metric_id=metric_id,data=data)}
return {"data": custom_metrics.update(user_id=context.user_id, metric_id=metric_id, data=data)}
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])

View file

@ -600,6 +600,12 @@ class CreateCustomMetricsSchema(BaseModel):
alias_generator = attribute_to_camel_case
class TryCustomMetricsSchema(CreateCustomMetricsSchema):
startDate: int = Field(TimeUTC.now(-7))
endDate: int = Field(TimeUTC.now())
density: int = Field(7)
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
series_id: Optional[int] = Field(None)