feat(chalice): refactored

feat(chalice): sessions-insight changes
This commit is contained in:
Taha Yassine Kraiem 2023-01-05 17:51:28 +01:00
parent fbcdee8ea1
commit c060fe9c18
7 changed files with 52 additions and 272 deletions

View file

@ -1,7 +1,7 @@
import json
import schemas
from chalicelib.core import custom_metrics, metrics
from chalicelib.core import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC

1
ee/api/.gitignore vendored
View file

@ -189,6 +189,7 @@ Pipfile.lock
/chalicelib/core/collaboration_msteams.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/countries.py
/chalicelib/core/dashboards.py
#exp /chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
#exp /chalicelib/core/events.py

View file

@ -1,229 +0,0 @@
import json
import schemas
from chalicelib.core import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
if config("EXP_METRICS", cast=bool, default=False):
from . import metrics_exp as metrics
else:
from . import metrics as metrics
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *"""
params = {"userId": user_id, "projectId": project_id, **data.dict()}
if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
RETURNING (SELECT dashboard_id FROM dash)"""
for i, m in enumerate(data.metrics):
params[f"metric_id_{i}"] = m
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
# .get("properties", {}).get("config", {}).get("default", {})
# params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i})
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is None:
return {"errors": ["something went wrong while creating the dashboard"]}
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
def get_dashboards(project_id, user_id):
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT *
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(projectId)s
AND (user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
def get_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
FROM dashboards
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
AND metrics.deleted_at ISNULL
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
) AS all_metric_widgets ON (TRUE)
WHERE dashboards.deleted_at ISNULL
AND dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)
def delete_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboards
SET deleted_at = timezone('utc'::text, now())
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT COALESCE(COUNT(*),0) AS count
FROM dashboard_widgets
WHERE dashboard_id = %(dashboard_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
offset = row["count"]
pg_query = f"""UPDATE dashboards
SET name = %(name)s,
description= %(description)s
{", is_public = %(is_public)s" if data.is_public is not None else ""}
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public)"""
if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
for i, m in enumerate(data.metrics):
params[f"metric_id_{i}"] = m
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
# .get("properties", {}).get("config", {}).get("default", {})
# params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i + offset})
cur.execute(cur.mogrify(pg_query, params))
return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
def get_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT metrics.*, metric_series.series
FROM dashboard_widgets
INNER JOIN dashboards USING (dashboard_id)
INNER JOIN metrics USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE dashboard_id = %(dashboard_id)s
AND widget_id = %(widget_id)s
AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
AND dashboards.deleted_at IS NULL
AND metrics.deleted_at ISNULL
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
with pg_client.PostgresClient() as cur:
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
WHERE EXISTS(SELECT 1 FROM dashboards
WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public))
RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboard_widgets
SET config= %(config)s
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
"widget_id": widget_id, **data.dict()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def remove_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
pg_query = """DELETE FROM dashboard_widgets
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def pin_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboards
SET is_pinned = FALSE
WHERE project_id=%(project_id)s;
UPDATE dashboards
SET is_pinned = True
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
RETURNING *;"""
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCardSchema):
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
# def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema):
# raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
# if raw_metric is None:
# return None
# metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric)
# if metric.is_template:
# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
# else:
# return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
# data=data, metric=raw_metric)

View file

@ -1,6 +1,7 @@
import schemas_ee
from chalicelib.utils import ch_client
from datetime import datetime, timedelta
from chalicelib.utils.TimeUTC import TimeUTC
def _table_slice(table, index):
col = list()
for row in table:
@ -137,12 +138,16 @@ SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.success) as success_rate,
def query_most_errors_by_period(project_id, start_time=(datetime.now()-timedelta(days=1)).strftime('%Y-%m-%d'),
end_time=datetime.now().strftime('%Y-%m-%d'), time_step=3600, conn=None):
function, steps = __handle_timestep(time_step)
query = f"""WITH
{function.format(f"toDateTime64('{start_time}', 0)")} as start,
{function.format(f"toDateTime64('{end_time}', 0)")} as end
SELECT T1.hh, count(T2.session_id) as sessions, T2.name as names, groupUniqArray(T2.source) as sources FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), {steps}))) as hh) AS T1
LEFT JOIN (SELECT session_id, name, source, message, {function.format('datetime')} as dtime FROM experimental.events WHERE project_id = {project_id} AND event_type = 'ERROR') AS T2 ON T2.dtime = T1.hh GROUP BY T1.hh, T2.name ORDER BY T1.hh DESC;
"""
query = f"""WITH {function.format(f"toDateTime64('{start_time}', 0)")} as start,
{function.format(f"toDateTime64('{end_time}', 0)")} as end
SELECT T1.hh, count(T2.session_id) as sessions, T2.name as names,
groupUniqArray(T2.source) as sources
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), {steps}))) as hh) AS T1
LEFT JOIN (SELECT session_id, name, source, message, {function.format('datetime')} as dtime
FROM experimental.events
WHERE project_id = {project_id} AND event_type = 'ERROR') AS T2 ON T2.dtime = T1.hh
GROUP BY T1.hh, T2.name
ORDER BY T1.hh DESC;"""
if conn is None:
with ch_client.ClickHouseClient() as conn:
res = conn.execute(query=query)
@ -242,40 +247,39 @@ def query_click_rage_by_period(project_id, start_time=(datetime.now()-timedelta(
}
def fetch_selected(selectedEvents, project_id, start_time=(datetime.now()-timedelta(days=1)).strftime('%Y-%m-%d'),
end_time=datetime.now().strftime('%Y-%m-%d'), time_step=3600):
assert len(selectedEvents) > 0, """'list of selected events must be non empty. Available events are 'errors',
'network', 'rage' and 'resources''"""
def fetch_selected(project_id, data: schemas_ee.GetInsightsPayloadSchema, time_step=3600, ):
# NO need for assertion here, you should validate it in the schema definition
# assert len(selectedEvents) > 0, """'list of selected events must be non empty. Available events are 'errors', 'network', 'rage' and 'resources''"""
output = {}
with ch_client.ClickHouseClient() as conn:
if 'errors' in selectedEvents:
output['errors'] = query_most_errors_by_period(project_id, start_time, end_time, time_step, conn=conn)
if 'network' in selectedEvents:
if 'errors' in data.selected_events:
output['errors'] = query_most_errors_by_period(project_id, data.startDate, end_time, time_step, conn=conn)
if 'network' in data.selected_events:
output['network'] = query_requests_by_period(project_id, start_time, end_time, time_step, conn=conn)
if 'rage' in selectedEvents:
if 'rage' in data.selected_events:
output['rage'] = query_click_rage_by_period(project_id, start_time, end_time, time_step, conn=conn)
if 'resources' in selectedEvents:
if 'resources' in data.selected_events:
output['resources'] = query_cpu_memory_by_period(project_id, start_time, end_time, time_step, conn=conn)
return output
if __name__ == '__main__':
# configs
start = '2022-04-19'
end = '2022-04-21'
projectId = 1307
time_step = 'hour'
# Errors widget
print('Errors example')
res = query_most_errors_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
print(res)
# Resources widgets
print('resources example')
res = query_cpu_memory_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
# Network widgets
print('Network example')
res = query_requests_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
print(res)
# if __name__ == '__main__':
# # configs
# start = '2022-04-19'
# end = '2022-04-21'
# projectId = 1307
# time_step = 'hour'
#
# # Errors widget
# print('Errors example')
# res = query_most_errors_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
# print(res)
#
# # Resources widgets
# print('resources example')
# res = query_cpu_memory_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
#
# # Network widgets
# print('Network example')
# res = query_requests_by_period(projectId, start_time=start, end_time=end, time_step=time_step)
# print(res)

View file

@ -13,7 +13,7 @@ rm -rf ./chalicelib/core/countries.py
rm -rf ./chalicelib/core/errors_favorite.py
#exp rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_ios.py
#exp rm -rf ./chalicelib/core/dashboards.py
rm -rf ./chalicelib/core/dashboards.py
#exp rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/integration_base.py
rm -rf ./chalicelib/core/integration_base_issue.py

View file

@ -130,7 +130,7 @@ def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...)
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
def sessions_search(projectId: int, data: schemas_ee.GetInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {'data': sessions_insights.fetch_selected(selectedEvents=data.selectedEvents, project_id=projectId,
start_time=data.startDate,
end_time=data.endDate, time_step=data.timestep)}
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId,
start_time=data.startDate,
end_time=data.endDate, time_step=data.timestep)}

View file

@ -43,7 +43,11 @@ class GetInsightsPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-1))
endDate: int = Field(TimeUTC.now())
timestep: str = Field(...)
selectedEvents: List[str] = Field(...)
# list of selected events must be non empty. Available events are 'errors', 'network', 'rage' and 'resources'
selected_events: List[str] = Field(..., min_items=1)
class Config:
alias_generator = schemas.attribute_to_camel_case
class CreateMemberSchema(schemas.CreateMemberSchema):