feat(chalice): fixed same-package-shadow
This commit is contained in:
parent
ed0eb2939f
commit
9967f2ec9c
9 changed files with 3348 additions and 14 deletions
14
ee/api/.gitignore
vendored
14
ee/api/.gitignore
vendored
|
|
@ -180,16 +180,16 @@ Pipfile
|
||||||
.local/*
|
.local/*
|
||||||
|
|
||||||
/chalicelib/core/alerts.py
|
/chalicelib/core/alerts.py
|
||||||
/chalicelib/core/alerts_processor.py
|
#exp /chalicelib/core/alerts_processor.py
|
||||||
/chalicelib/core/announcements.py
|
/chalicelib/core/announcements.py
|
||||||
/chalicelib/core/autocomplete.py
|
/chalicelib/core/autocomplete.py
|
||||||
/chalicelib/core/collaboration_slack.py
|
/chalicelib/core/collaboration_slack.py
|
||||||
/chalicelib/core/countries.py
|
/chalicelib/core/countries.py
|
||||||
/chalicelib/core/errors.py
|
#exp /chalicelib/core/errors.py
|
||||||
/chalicelib/core/errors_favorite.py
|
/chalicelib/core/errors_favorite.py
|
||||||
/chalicelib/core/events.py
|
#exp /chalicelib/core/events.py
|
||||||
/chalicelib/core/events_ios.py
|
/chalicelib/core/events_ios.py
|
||||||
/chalicelib/core/funnels.py
|
#exp /chalicelib/core/funnels.py
|
||||||
/chalicelib/core/integration_base.py
|
/chalicelib/core/integration_base.py
|
||||||
/chalicelib/core/integration_base_issue.py
|
/chalicelib/core/integration_base_issue.py
|
||||||
/chalicelib/core/integration_github.py
|
/chalicelib/core/integration_github.py
|
||||||
|
|
@ -214,7 +214,7 @@ Pipfile
|
||||||
/chalicelib/core/sessions_assignments.py
|
/chalicelib/core/sessions_assignments.py
|
||||||
/chalicelib/core/sessions_metas.py
|
/chalicelib/core/sessions_metas.py
|
||||||
/chalicelib/core/sessions_mobs.py
|
/chalicelib/core/sessions_mobs.py
|
||||||
/chalicelib/core/significance.py
|
#exp /chalicelib/core/significance.py
|
||||||
/chalicelib/core/slack.py
|
/chalicelib/core/slack.py
|
||||||
/chalicelib/core/socket_ios.py
|
/chalicelib/core/socket_ios.py
|
||||||
/chalicelib/core/sourcemaps.py
|
/chalicelib/core/sourcemaps.py
|
||||||
|
|
@ -255,11 +255,11 @@ Pipfile
|
||||||
/chalicelib/core/heatmaps.py
|
/chalicelib/core/heatmaps.py
|
||||||
/routers/subs/insights.py
|
/routers/subs/insights.py
|
||||||
/schemas.py
|
/schemas.py
|
||||||
/chalicelib/core/custom_metrics.py
|
#exp /chalicelib/core/custom_metrics.py
|
||||||
/chalicelib/core/performance_event.py
|
/chalicelib/core/performance_event.py
|
||||||
/chalicelib/core/saved_search.py
|
/chalicelib/core/saved_search.py
|
||||||
/app_alerts.py
|
/app_alerts.py
|
||||||
/build_alerts.sh
|
/build_alerts.sh
|
||||||
/routers/subs/metrics.py
|
/routers/subs/metrics.py
|
||||||
/routers/subs/v1_api.py
|
/routers/subs/v1_api.py
|
||||||
/chalicelib/core/dashboards.py
|
#exp /chalicelib/core/dashboards.py
|
||||||
|
|
|
||||||
241
ee/api/chalicelib/core/alerts_processor.py
Normal file
241
ee/api/chalicelib/core/alerts_processor.py
Normal file
|
|
@ -0,0 +1,241 @@
|
||||||
|
import decimal
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import alerts_listener
|
||||||
|
from chalicelib.core import alerts
|
||||||
|
from chalicelib.utils import pg_client
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
|
||||||
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
from chalicelib.core import sessions_legacy as sessions
|
||||||
|
else:
|
||||||
|
from chalicelib.core import sessions
|
||||||
|
|
||||||
|
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||||
|
|
||||||
|
LeftToDb = {
|
||||||
|
schemas.AlertColumn.performance__dom_content_loaded__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
|
||||||
|
schemas.AlertColumn.performance__first_meaningful_paint__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
|
||||||
|
schemas.AlertColumn.performance__page_load_time__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
|
||||||
|
schemas.AlertColumn.performance__dom_build_time__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(dom_building_time,0))"},
|
||||||
|
schemas.AlertColumn.performance__speed_index__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
|
||||||
|
schemas.AlertColumn.performance__page_response_time__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(response_time,0))"},
|
||||||
|
schemas.AlertColumn.performance__ttfb__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(first_paint_time,0))"},
|
||||||
|
schemas.AlertColumn.performance__time_to_render__average: {
|
||||||
|
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(visually_complete,0))"},
|
||||||
|
schemas.AlertColumn.performance__image_load_time__average: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
|
||||||
|
schemas.AlertColumn.performance__request_load_time__average: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
|
||||||
|
schemas.AlertColumn.resources__load_time__average: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "AVG(NULLIF(resources.duration,0))"},
|
||||||
|
schemas.AlertColumn.resources__missing__count: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
|
||||||
|
schemas.AlertColumn.errors__4xx_5xx__count: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
|
||||||
|
"condition": "status/100!=2"},
|
||||||
|
schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COUNT(session_id)", "condition": "status/100=4"},
|
||||||
|
schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COUNT(session_id)", "condition": "status/100=5"},
|
||||||
|
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
|
||||||
|
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||||
|
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
|
||||||
|
schemas.AlertColumn.performance__crashes__count: {
|
||||||
|
"table": "public.sessions",
|
||||||
|
"formula": "COUNT(DISTINCT session_id)",
|
||||||
|
"condition": "errors_count > 0 AND duration>0"},
|
||||||
|
schemas.AlertColumn.errors__javascript__count: {
|
||||||
|
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||||
|
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
|
||||||
|
schemas.AlertColumn.errors__backend__count: {
|
||||||
|
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||||
|
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
# This is the frequency of execution for each threshold
|
||||||
|
TimeInterval = {
|
||||||
|
15: 3,
|
||||||
|
30: 5,
|
||||||
|
60: 10,
|
||||||
|
120: 20,
|
||||||
|
240: 30,
|
||||||
|
1440: 60,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def can_check(a) -> bool:
|
||||||
|
now = TimeUTC.now()
|
||||||
|
|
||||||
|
repetitionBase = a["options"]["currentPeriod"] \
|
||||||
|
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
|
||||||
|
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||||
|
else a["options"]["previousPeriod"]
|
||||||
|
|
||||||
|
if TimeInterval.get(repetitionBase) is None:
|
||||||
|
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return (a["options"]["renotifyInterval"] <= 0 or
|
||||||
|
a["options"].get("lastNotification") is None or
|
||||||
|
a["options"]["lastNotification"] <= 0 or
|
||||||
|
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||||
|
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||||
|
|
||||||
|
|
||||||
|
def Build(a):
|
||||||
|
now = TimeUTC.now()
|
||||||
|
params = {"project_id": a["projectId"], "now": now}
|
||||||
|
full_args = {}
|
||||||
|
j_s = True
|
||||||
|
if a["seriesId"] is not None:
|
||||||
|
a["filter"]["sort"] = "session_id"
|
||||||
|
a["filter"]["order"] = schemas.SortOrderType.desc
|
||||||
|
a["filter"]["startDate"] = -1
|
||||||
|
a["filter"]["endDate"] = TimeUTC.now()
|
||||||
|
full_args, query_part = sessions.search_query_parts(
|
||||||
|
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
|
||||||
|
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
|
||||||
|
subQ = f"""SELECT COUNT(session_id) AS value
|
||||||
|
{query_part}"""
|
||||||
|
else:
|
||||||
|
colDef = LeftToDb[a["query"]["left"]]
|
||||||
|
subQ = f"""SELECT {colDef["formula"]} AS value
|
||||||
|
FROM {colDef["table"]}
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
|
||||||
|
j_s = colDef.get("joinSessions", True)
|
||||||
|
|
||||||
|
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
|
||||||
|
|
||||||
|
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
|
||||||
|
if a["seriesId"] is not None:
|
||||||
|
q += f""" FROM ({subQ}) AS stat"""
|
||||||
|
else:
|
||||||
|
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
|
||||||
|
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||||
|
{"AND sessions.start_ts <= %(now)s" if j_s else ""}) AS stat"""
|
||||||
|
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
|
||||||
|
else:
|
||||||
|
if a["change"] == schemas.AlertDetectionType.change:
|
||||||
|
if a["seriesId"] is not None:
|
||||||
|
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||||
|
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
|
||||||
|
q += f" FROM ( {sub1} ) AS stat"
|
||||||
|
params = {**params, **full_args,
|
||||||
|
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||||
|
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
|
||||||
|
else:
|
||||||
|
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
|
||||||
|
AND datetime<=toDateTime(%(now)s/1000)
|
||||||
|
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||||
|
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
|
||||||
|
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||||
|
sub2 = f"""{subQ} AND timestamp<%(startDate)s
|
||||||
|
AND timestamp>=%(timestamp_sub2)s
|
||||||
|
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||||
|
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
|
||||||
|
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
|
||||||
|
q += f" FROM ( {sub1} ) AS stat"
|
||||||
|
|
||||||
|
else:
|
||||||
|
if a["seriesId"] is not None:
|
||||||
|
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||||
|
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||||
|
q += f" FROM ({sub1}) AS stat"
|
||||||
|
params = {**params, **full_args,
|
||||||
|
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||||
|
"timestamp_sub2": TimeUTC.now() \
|
||||||
|
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
|
||||||
|
* 60 * 1000}
|
||||||
|
else:
|
||||||
|
sub1 = f"""{subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
|
||||||
|
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||||
|
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
|
||||||
|
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||||
|
sub2 = f"""{subQ} AND timestamp<%(startDate)s
|
||||||
|
AND timestamp>=%(timestamp_sub2)s
|
||||||
|
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||||
|
params["timestamp_sub2"] = TimeUTC.now() \
|
||||||
|
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
|
||||||
|
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||||
|
q += f" FROM ({sub1}) AS stat"
|
||||||
|
|
||||||
|
return q, params
|
||||||
|
|
||||||
|
|
||||||
|
def process():
|
||||||
|
notifications = []
|
||||||
|
all_alerts = alerts_listener.get_all_alerts()
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
for alert in all_alerts:
|
||||||
|
if can_check(alert):
|
||||||
|
logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
|
||||||
|
query, params = Build(alert)
|
||||||
|
query = cur.mogrify(query, params)
|
||||||
|
logging.debug(alert)
|
||||||
|
logging.debug(query)
|
||||||
|
try:
|
||||||
|
cur.execute(query)
|
||||||
|
result = cur.fetchone()
|
||||||
|
if result["valid"]:
|
||||||
|
logging.info("Valid alert, notifying users")
|
||||||
|
notifications.append(generate_notification(alert, result))
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
|
||||||
|
logging.error(str(e))
|
||||||
|
logging.error(query)
|
||||||
|
if len(notifications) > 0:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(f"""UPDATE public.Alerts
|
||||||
|
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
|
||||||
|
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
|
||||||
|
if len(notifications) > 0:
|
||||||
|
alerts.process_notifications(notifications)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_notification(alert, result):
|
||||||
|
return {
|
||||||
|
"alertId": alert["alertId"],
|
||||||
|
"tenantId": alert["tenantId"],
|
||||||
|
"title": alert["name"],
|
||||||
|
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
|
||||||
|
"buttonText": "Check metrics for more details",
|
||||||
|
"buttonUrl": f"/{alert['projectId']}/metrics",
|
||||||
|
"imageUrl": None,
|
||||||
|
"options": {"source": "ALERT", "sourceId": alert["alertId"],
|
||||||
|
"sourceMeta": alert["detectionMethod"],
|
||||||
|
"message": alert["options"]["message"], "projectId": alert["projectId"],
|
||||||
|
"data": {"title": alert["name"],
|
||||||
|
"limitValue": alert["query"]["right"],
|
||||||
|
"actualValue": float(result["value"]) \
|
||||||
|
if isinstance(result["value"], decimal.Decimal) \
|
||||||
|
else result["value"],
|
||||||
|
"operator": alert["query"]["operator"],
|
||||||
|
"trigger": alert["query"]["left"],
|
||||||
|
"alertId": alert["alertId"],
|
||||||
|
"detectionMethod": alert["detectionMethod"],
|
||||||
|
"currentPeriod": alert["options"]["currentPeriod"],
|
||||||
|
"previousPeriod": alert["options"]["previousPeriod"],
|
||||||
|
"createdAt": TimeUTC.now()}},
|
||||||
|
}
|
||||||
551
ee/api/chalicelib/core/custom_metrics.py
Normal file
551
ee/api/chalicelib/core/custom_metrics.py
Normal file
|
|
@ -0,0 +1,551 @@
|
||||||
|
import json
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import funnels, issues
|
||||||
|
from chalicelib.utils import helper, pg_client
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||||
|
print(">>> Using experimental error search")
|
||||||
|
from . import errors_exp as errors
|
||||||
|
else:
|
||||||
|
from . import errors as errors
|
||||||
|
|
||||||
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
from chalicelib.core import sessions_legacy as sessions
|
||||||
|
else:
|
||||||
|
from chalicelib.core import sessions
|
||||||
|
|
||||||
|
PIE_CHART_GROUP = 5
|
||||||
|
|
||||||
|
|
||||||
|
def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
|
||||||
|
results = []
|
||||||
|
for i, s in enumerate(data.series):
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||||
|
view_type=data.view_type, metric_type=data.metric_type,
|
||||||
|
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||||
|
if data.view_type == schemas.MetricTimeseriesViewType.progress:
|
||||||
|
r = {"count": results[-1]}
|
||||||
|
diff = s.filter.endDate - s.filter.startDate
|
||||||
|
s.filter.endDate = s.filter.startDate
|
||||||
|
s.filter.startDate = s.filter.endDate - diff
|
||||||
|
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||||
|
view_type=data.view_type, metric_type=data.metric_type,
|
||||||
|
metric_of=data.metric_of, metric_value=data.metric_value)
|
||||||
|
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
|
||||||
|
# r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \
|
||||||
|
# if r["previousCount"] > 0 else 0
|
||||||
|
r["seriesName"] = s.name if s.name else i + 1
|
||||||
|
r["seriesId"] = s.series_id if s.series_id else None
|
||||||
|
results[-1] = r
|
||||||
|
elif data.view_type == schemas.MetricTableViewType.pie_chart:
|
||||||
|
if len(results[i].get("values", [])) > PIE_CHART_GROUP:
|
||||||
|
results[i]["values"] = results[i]["values"][:PIE_CHART_GROUP] \
|
||||||
|
+ [{
|
||||||
|
"name": "Others", "group": True,
|
||||||
|
"sessionCount": sum(r["sessionCount"] for r in results[i]["values"][PIE_CHART_GROUP:])
|
||||||
|
}]
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
|
||||||
|
return data.metric_type == schemas.MetricType.funnel
|
||||||
|
|
||||||
|
|
||||||
|
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
|
||||||
|
if len(data.series) == 0:
|
||||||
|
return {
|
||||||
|
"stages": [],
|
||||||
|
"totalDropDueToIssues": 0
|
||||||
|
}
|
||||||
|
data.series[0].filter.startDate = data.startTimestamp
|
||||||
|
data.series[0].filter.endDate = data.endTimestamp
|
||||||
|
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||||
|
|
||||||
|
|
||||||
|
def __is_errors_list(data):
|
||||||
|
return data.metric_type == schemas.MetricType.table \
|
||||||
|
and data.metric_of == schemas.TableMetricOfType.errors
|
||||||
|
|
||||||
|
|
||||||
|
def __get_errors_list(project_id, user_id, data):
|
||||||
|
if len(data.series) == 0:
|
||||||
|
return {
|
||||||
|
"total": 0,
|
||||||
|
"errors": []
|
||||||
|
}
|
||||||
|
data.series[0].filter.startDate = data.startTimestamp
|
||||||
|
data.series[0].filter.endDate = data.endTimestamp
|
||||||
|
data.series[0].filter.page = data.page
|
||||||
|
data.series[0].filter.limit = data.limit
|
||||||
|
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def __is_sessions_list(data):
|
||||||
|
return data.metric_type == schemas.MetricType.table \
|
||||||
|
and data.metric_of == schemas.TableMetricOfType.sessions
|
||||||
|
|
||||||
|
|
||||||
|
def __get_sessions_list(project_id, user_id, data):
|
||||||
|
if len(data.series) == 0:
|
||||||
|
print("empty series")
|
||||||
|
return {
|
||||||
|
"total": 0,
|
||||||
|
"sessions": []
|
||||||
|
}
|
||||||
|
data.series[0].filter.startDate = data.startTimestamp
|
||||||
|
data.series[0].filter.endDate = data.endTimestamp
|
||||||
|
data.series[0].filter.page = data.page
|
||||||
|
data.series[0].filter.limit = data.limit
|
||||||
|
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
|
||||||
|
if __is_funnel_chart(data):
|
||||||
|
return __get_funnel_chart(project_id=project_id, data=data)
|
||||||
|
elif __is_errors_list(data):
|
||||||
|
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
|
||||||
|
elif __is_sessions_list(data):
|
||||||
|
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
|
||||||
|
|
||||||
|
series_charts = __try_live(project_id=project_id, data=data)
|
||||||
|
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
|
||||||
|
return series_charts
|
||||||
|
results = [{}] * len(series_charts[0])
|
||||||
|
for i in range(len(results)):
|
||||||
|
for j, series_chart in enumerate(series_charts):
|
||||||
|
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||||
|
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
|
||||||
|
schemas.CustomMetricSessionsPayloadSchema]) \
|
||||||
|
-> Union[schemas.CreateCustomMetricsSchema, None]:
|
||||||
|
if data.series is not None and len(data.series) > 0:
|
||||||
|
metric["series"] = data.series
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
|
||||||
|
if len(data.filters) > 0 or len(data.events) > 0:
|
||||||
|
for s in metric.series:
|
||||||
|
if len(data.filters) > 0:
|
||||||
|
s.filter.filters += data.filters
|
||||||
|
if len(data.events) > 0:
|
||||||
|
s.filter.events += data.events
|
||||||
|
return metric
|
||||||
|
|
||||||
|
|
||||||
|
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
|
||||||
|
if metric is None:
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||||
|
|
||||||
|
return merged_live(project_id=project_id, data=metric, user_id=user_id)
|
||||||
|
# if __is_funnel_chart(metric):
|
||||||
|
# return __get_funnel_chart(project_id=project_id, data=metric)
|
||||||
|
# elif __is_errors_list(metric):
|
||||||
|
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
|
||||||
|
#
|
||||||
|
# series_charts = __try_live(project_id=project_id, data=metric)
|
||||||
|
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
|
||||||
|
# return series_charts
|
||||||
|
# results = [{}] * len(series_charts[0])
|
||||||
|
# for i in range(len(results)):
|
||||||
|
# for j, series_chart in enumerate(series_charts):
|
||||||
|
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||||
|
# metric.series[j].name: series_chart[i]["count"]}
|
||||||
|
# return results
|
||||||
|
|
||||||
|
|
||||||
|
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
results = []
|
||||||
|
for s in metric.series:
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
s.filter.limit = data.limit
|
||||||
|
s.filter.page = data.page
|
||||||
|
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||||
|
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
for s in metric.series:
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
s.filter.limit = data.limit
|
||||||
|
s.filter.page = data.page
|
||||||
|
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||||
|
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
|
||||||
|
|
||||||
|
|
||||||
|
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
for s in metric.series:
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
s.filter.limit = data.limit
|
||||||
|
s.filter.page = data.page
|
||||||
|
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||||
|
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
|
||||||
|
|
||||||
|
|
||||||
|
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||||
|
results = []
|
||||||
|
if data.series is None:
|
||||||
|
return results
|
||||||
|
for s in data.series:
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
s.filter.limit = data.limit
|
||||||
|
s.filter.page = data.page
|
||||||
|
results.append({"seriesId": None, "seriesName": s.name,
|
||||||
|
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
_data = {}
|
||||||
|
for i, s in enumerate(data.series):
|
||||||
|
for k in s.dict().keys():
|
||||||
|
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||||
|
_data[f"index_{i}"] = i
|
||||||
|
_data[f"filter_{i}"] = s.filter.json()
|
||||||
|
series_len = len(data.series)
|
||||||
|
data.series = None
|
||||||
|
params = {"user_id": user_id, "project_id": project_id,
|
||||||
|
"default_config": json.dumps(data.config.dict()),
|
||||||
|
**data.dict(), **_data}
|
||||||
|
query = cur.mogrify(f"""\
|
||||||
|
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||||
|
view_type, metric_type, metric_of, metric_value,
|
||||||
|
metric_format, default_config)
|
||||||
|
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||||
|
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||||
|
%(metric_format)s, %(default_config)s)
|
||||||
|
RETURNING *)
|
||||||
|
INSERT
|
||||||
|
INTO metric_series(metric_id, index, name, filter)
|
||||||
|
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||||
|
for i in range(series_len)])}
|
||||||
|
RETURNING metric_id;""", params)
|
||||||
|
|
||||||
|
cur.execute(
|
||||||
|
query
|
||||||
|
)
|
||||||
|
r = cur.fetchone()
|
||||||
|
if dashboard:
|
||||||
|
return r["metric_id"]
|
||||||
|
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
|
||||||
|
|
||||||
|
|
||||||
|
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
series_ids = [r["seriesId"] for r in metric["series"]]
|
||||||
|
n_series = []
|
||||||
|
d_series_ids = []
|
||||||
|
u_series = []
|
||||||
|
u_series_ids = []
|
||||||
|
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
|
||||||
|
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
|
||||||
|
"metric_type": data.metric_type, "metric_of": data.metric_of,
|
||||||
|
"metric_value": data.metric_value, "metric_format": data.metric_format}
|
||||||
|
for i, s in enumerate(data.series):
|
||||||
|
prefix = "u_"
|
||||||
|
if s.index is None:
|
||||||
|
s.index = i
|
||||||
|
if s.series_id is None or s.series_id not in series_ids:
|
||||||
|
n_series.append({"i": i, "s": s})
|
||||||
|
prefix = "n_"
|
||||||
|
else:
|
||||||
|
u_series.append({"i": i, "s": s})
|
||||||
|
u_series_ids.append(s.series_id)
|
||||||
|
ns = s.dict()
|
||||||
|
for k in ns.keys():
|
||||||
|
if k == "filter":
|
||||||
|
ns[k] = json.dumps(ns[k])
|
||||||
|
params[f"{prefix}{k}_{i}"] = ns[k]
|
||||||
|
for i in series_ids:
|
||||||
|
if i not in u_series_ids:
|
||||||
|
d_series_ids.append(i)
|
||||||
|
params["d_series_ids"] = tuple(d_series_ids)
|
||||||
|
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
sub_queries = []
|
||||||
|
if len(n_series) > 0:
|
||||||
|
sub_queries.append(f"""\
|
||||||
|
n AS (INSERT INTO metric_series (metric_id, index, name, filter)
|
||||||
|
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
|
||||||
|
for s in n_series])}
|
||||||
|
RETURNING 1)""")
|
||||||
|
if len(u_series) > 0:
|
||||||
|
sub_queries.append(f"""\
|
||||||
|
u AS (UPDATE metric_series
|
||||||
|
SET name=series.name,
|
||||||
|
filter=series.filter,
|
||||||
|
index=series.index
|
||||||
|
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
|
||||||
|
for s in u_series])}) AS series(series_id, index, name, filter)
|
||||||
|
WHERE metric_series.metric_id =%(metric_id)s AND metric_series.series_id=series.series_id
|
||||||
|
RETURNING 1)""")
|
||||||
|
if len(d_series_ids) > 0:
|
||||||
|
sub_queries.append("""\
|
||||||
|
d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
|
||||||
|
RETURNING 1)""")
|
||||||
|
query = cur.mogrify(f"""\
|
||||||
|
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
|
||||||
|
UPDATE metrics
|
||||||
|
SET name = %(name)s, is_public= %(is_public)s,
|
||||||
|
view_type= %(view_type)s, metric_type= %(metric_type)s,
|
||||||
|
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
|
||||||
|
metric_format= %(metric_format)s,
|
||||||
|
edited_at = timezone('utc'::text, now())
|
||||||
|
WHERE metric_id = %(metric_id)s
|
||||||
|
AND project_id = %(project_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public)
|
||||||
|
RETURNING metric_id;""", params)
|
||||||
|
cur.execute(query)
|
||||||
|
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_all(project_id, user_id, include_series=False):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
sub_join = ""
|
||||||
|
if include_series:
|
||||||
|
sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)"""
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
f"""SELECT *
|
||||||
|
FROM metrics
|
||||||
|
{sub_join}
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||||
|
FROM (SELECT DISTINCT dashboard_id, name, is_public
|
||||||
|
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND dashboard_widgets.metric_id = metrics.metric_id
|
||||||
|
AND project_id = %(project_id)s
|
||||||
|
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||||
|
) AS connected_dashboards ON (TRUE)
|
||||||
|
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||||
|
FROM users
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND users.user_id = metrics.user_id
|
||||||
|
) AS owner ON (TRUE)
|
||||||
|
WHERE metrics.project_id = %(project_id)s
|
||||||
|
AND metrics.deleted_at ISNULL
|
||||||
|
AND (user_id = %(user_id)s OR metrics.is_public)
|
||||||
|
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
|
||||||
|
{"project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
if include_series:
|
||||||
|
for r in rows:
|
||||||
|
# r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||||
|
for s in r["series"]:
|
||||||
|
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||||
|
else:
|
||||||
|
for r in rows:
|
||||||
|
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||||
|
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
|
||||||
|
rows = helper.list_to_camel_case(rows)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def delete(project_id, metric_id, user_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify("""\
|
||||||
|
UPDATE public.metrics
|
||||||
|
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND metric_id = %(metric_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public);""",
|
||||||
|
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"state": "success"}
|
||||||
|
|
||||||
|
|
||||||
|
def get(metric_id, project_id, user_id, flatten=True):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
"""SELECT *
|
||||||
|
FROM metrics
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||||
|
FROM (SELECT dashboard_id, name, is_public
|
||||||
|
FROM dashboards
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND project_id = %(project_id)s
|
||||||
|
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||||
|
) AS connected_dashboards ON (TRUE)
|
||||||
|
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||||
|
FROM users
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND users.user_id = metrics.user_id
|
||||||
|
) AS owner ON (TRUE)
|
||||||
|
WHERE metrics.project_id = %(project_id)s
|
||||||
|
AND metrics.deleted_at ISNULL
|
||||||
|
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||||
|
AND metrics.metric_id = %(metric_id)s
|
||||||
|
ORDER BY created_at;""",
|
||||||
|
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||||
|
row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"])
|
||||||
|
if flatten:
|
||||||
|
for s in row["series"]:
|
||||||
|
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
sub_query = ""
|
||||||
|
if include_dashboard:
|
||||||
|
sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||||
|
FROM (SELECT dashboard_id, name, is_public
|
||||||
|
FROM dashboards
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND project_id = %(project_id)s
|
||||||
|
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||||
|
) AS connected_dashboards ON (TRUE)"""
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
f"""SELECT *
|
||||||
|
FROM metrics
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)
|
||||||
|
{sub_query}
|
||||||
|
WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL)
|
||||||
|
AND metrics.deleted_at ISNULL
|
||||||
|
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||||
|
AND metrics.metric_id = %(metric_id)s
|
||||||
|
ORDER BY created_at;""",
|
||||||
|
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def get_series_for_alert(project_id, user_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
"""SELECT series_id AS value,
|
||||||
|
metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
|
||||||
|
'count' AS unit,
|
||||||
|
FALSE AS predefined,
|
||||||
|
metric_id,
|
||||||
|
series_id
|
||||||
|
FROM metric_series
|
||||||
|
INNER JOIN metrics USING (metric_id)
|
||||||
|
WHERE metrics.deleted_at ISNULL
|
||||||
|
AND metrics.project_id = %(project_id)s
|
||||||
|
AND metrics.metric_type = 'timeseries'
|
||||||
|
AND (user_id = %(user_id)s OR is_public)
|
||||||
|
ORDER BY name;""",
|
||||||
|
{"project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def change_state(project_id, metric_id, user_id, status):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify("""\
|
||||||
|
UPDATE public.metrics
|
||||||
|
SET active = %(status)s
|
||||||
|
WHERE metric_id = %(metric_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public);""",
|
||||||
|
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||||
|
)
|
||||||
|
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
||||||
|
data: schemas.CustomMetricSessionsPayloadSchema
|
||||||
|
# , range_value=None, start_date=None, end_date=None
|
||||||
|
):
|
||||||
|
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||||
|
if metric is None:
|
||||||
|
return None
|
||||||
|
for s in metric.series:
|
||||||
|
s.filter.startDate = data.startTimestamp
|
||||||
|
s.filter.endDate = data.endTimestamp
|
||||||
|
s.filter.limit = data.limit
|
||||||
|
s.filter.page = data.page
|
||||||
|
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
|
||||||
|
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
|
||||||
|
issue = None
|
||||||
|
for i in issues_list:
|
||||||
|
if i.get("issueId", "") == issue_id:
|
||||||
|
issue = i
|
||||||
|
break
|
||||||
|
if issue is None:
|
||||||
|
issue = issues.get(project_id=project_id, issue_id=issue_id)
|
||||||
|
if issue is not None:
|
||||||
|
issue = {**issue,
|
||||||
|
"affectedSessions": 0,
|
||||||
|
"affectedUsers": 0,
|
||||||
|
"conversionImpact": 0,
|
||||||
|
"lostConversions": 0,
|
||||||
|
"unaffectedSessions": 0}
|
||||||
|
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||||
|
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
|
||||||
|
issue=issue, data=s.filter)
|
||||||
|
if issue is not None else {"total": 0, "sessions": []},
|
||||||
|
"issue": issue}
|
||||||
333
ee/api/chalicelib/core/dashboards.py
Normal file
333
ee/api/chalicelib/core/dashboards.py
Normal file
|
|
@ -0,0 +1,333 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import custom_metrics
|
||||||
|
from chalicelib.utils import helper
|
||||||
|
from chalicelib.utils import pg_client
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_METRICS", cast=bool, default=False):
|
||||||
|
from . import metrics_exp as metrics
|
||||||
|
else:
|
||||||
|
from . import metrics as metrics
|
||||||
|
|
||||||
|
# category name should be lower cased
|
||||||
|
CATEGORY_DESCRIPTION = {
|
||||||
|
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
|
||||||
|
'custom': 'Previously created custom metrics by me and my team.',
|
||||||
|
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
|
||||||
|
'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
|
||||||
|
'resources': 'Find out which resources are missing and those that may be slowing your web app.'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_templates(project_id, user_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
|
||||||
|
FROM (SELECT * , default_config AS config
|
||||||
|
FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)
|
||||||
|
WHERE deleted_at IS NULL
|
||||||
|
AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
|
||||||
|
) AS metrics
|
||||||
|
GROUP BY category
|
||||||
|
ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
|
||||||
|
{"project_id": project_id, "userId": user_id})
|
||||||
|
cur.execute(pg_query)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
for r in rows:
|
||||||
|
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
|
||||||
|
for w in r["widgets"]:
|
||||||
|
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
|
||||||
|
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
|
||||||
|
for s in w["series"]:
|
||||||
|
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||||
|
|
||||||
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
|
||||||
|
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
|
||||||
|
RETURNING *"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, **data.dict()}
|
||||||
|
if data.metrics is not None and len(data.metrics) > 0:
|
||||||
|
pg_query = f"""WITH dash AS ({pg_query})
|
||||||
|
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||||
|
VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
|
||||||
|
RETURNING (SELECT dashboard_id FROM dash)"""
|
||||||
|
for i, m in enumerate(data.metrics):
|
||||||
|
params[f"metric_id_{i}"] = m
|
||||||
|
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
|
||||||
|
# .get("properties", {}).get("config", {}).get("default", {})
|
||||||
|
# params[f"config_{i}"]["position"] = i
|
||||||
|
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
|
||||||
|
params[f"config_{i}"] = json.dumps({"position": i})
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return {"errors": ["something went wrong while creating the dashboard"]}
|
||||||
|
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
|
||||||
|
|
||||||
|
|
||||||
|
def get_dashboards(project_id, user_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = f"""SELECT *
|
||||||
|
FROM dashboards
|
||||||
|
WHERE deleted_at ISNULL
|
||||||
|
AND project_id = %(projectId)s
|
||||||
|
AND (user_id = %(userId)s OR is_public);"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def get_dashboard(project_id, user_id, dashboard_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
|
||||||
|
FROM dashboards
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
|
||||||
|
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
|
||||||
|
FROM metrics
|
||||||
|
INNER JOIN dashboard_widgets USING (metric_id)
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)
|
||||||
|
WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
|
||||||
|
AND metrics.deleted_at ISNULL
|
||||||
|
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
|
||||||
|
) AS all_metric_widgets ON (TRUE)
|
||||||
|
WHERE dashboards.deleted_at ISNULL
|
||||||
|
AND dashboards.project_id = %(projectId)s
|
||||||
|
AND dashboard_id = %(dashboard_id)s
|
||||||
|
AND (dashboards.user_id = %(userId)s OR is_public);"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is not None:
|
||||||
|
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||||
|
for w in row["widgets"]:
|
||||||
|
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
|
||||||
|
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
|
||||||
|
for s in w["series"]:
|
||||||
|
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_dashboard(project_id, user_id, dashboard_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """UPDATE dashboards
|
||||||
|
SET deleted_at = timezone('utc'::text, now())
|
||||||
|
WHERE dashboards.project_id = %(projectId)s
|
||||||
|
AND dashboard_id = %(dashboard_id)s
|
||||||
|
AND (dashboards.user_id = %(userId)s OR is_public);"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
return {"data": {"success": True}}
|
||||||
|
|
||||||
|
|
||||||
|
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """SELECT COALESCE(COUNT(*),0) AS count
|
||||||
|
FROM dashboard_widgets
|
||||||
|
WHERE dashboard_id = %(dashboard_id)s;"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
offset = row["count"]
|
||||||
|
pg_query = f"""UPDATE dashboards
|
||||||
|
SET name = %(name)s,
|
||||||
|
description= %(description)s
|
||||||
|
{", is_public = %(is_public)s" if data.is_public is not None else ""}
|
||||||
|
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
|
||||||
|
WHERE dashboards.project_id = %(projectId)s
|
||||||
|
AND dashboard_id = %(dashboard_id)s
|
||||||
|
AND (dashboards.user_id = %(userId)s OR is_public)"""
|
||||||
|
if data.metrics is not None and len(data.metrics) > 0:
|
||||||
|
pg_query = f"""WITH dash AS ({pg_query})
|
||||||
|
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||||
|
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
|
||||||
|
for i, m in enumerate(data.metrics):
|
||||||
|
params[f"metric_id_{i}"] = m
|
||||||
|
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
|
||||||
|
# .get("properties", {}).get("config", {}).get("default", {})
|
||||||
|
# params[f"config_{i}"]["position"] = i
|
||||||
|
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
|
||||||
|
params[f"config_{i}"] = json.dumps({"position": i + offset})
|
||||||
|
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
|
||||||
|
return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_widget(project_id, user_id, dashboard_id, widget_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """SELECT metrics.*, metric_series.series
|
||||||
|
FROM dashboard_widgets
|
||||||
|
INNER JOIN dashboards USING (dashboard_id)
|
||||||
|
INNER JOIN metrics USING (metric_id)
|
||||||
|
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
|
||||||
|
FROM metric_series
|
||||||
|
WHERE metric_series.metric_id = metrics.metric_id
|
||||||
|
AND metric_series.deleted_at ISNULL
|
||||||
|
) AS metric_series ON (TRUE)
|
||||||
|
WHERE dashboard_id = %(dashboard_id)s
|
||||||
|
AND widget_id = %(widget_id)s
|
||||||
|
AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
|
||||||
|
AND dashboards.deleted_at IS NULL
|
||||||
|
AND metrics.deleted_at ISNULL
|
||||||
|
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
|
||||||
|
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||||
|
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
|
||||||
|
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
|
||||||
|
WHERE EXISTS(SELECT 1 FROM dashboards
|
||||||
|
WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
|
||||||
|
AND dashboard_id = %(dashboard_id)s
|
||||||
|
AND (dashboards.user_id = %(userId)s OR is_public))
|
||||||
|
RETURNING *;"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
|
||||||
|
params["config"] = json.dumps(data.config)
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """UPDATE dashboard_widgets
|
||||||
|
SET config= %(config)s
|
||||||
|
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
|
||||||
|
RETURNING *;"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
|
||||||
|
"widget_id": widget_id, **data.dict()}
|
||||||
|
params["config"] = json.dumps(data.config)
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_widget(project_id, user_id, dashboard_id, widget_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """DELETE FROM dashboard_widgets
|
||||||
|
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
|
||||||
|
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
return {"data": {"success": True}}
|
||||||
|
|
||||||
|
|
||||||
|
def pin_dashboard(project_id, user_id, dashboard_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
pg_query = """UPDATE dashboards
|
||||||
|
SET is_pinned = FALSE
|
||||||
|
WHERE project_id=%(project_id)s;
|
||||||
|
UPDATE dashboards
|
||||||
|
SET is_pinned = True
|
||||||
|
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
|
||||||
|
RETURNING *;"""
|
||||||
|
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
|
||||||
|
cur.execute(cur.mogrify(pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
return helper.dict_to_camel_case(row)
|
||||||
|
|
||||||
|
|
||||||
|
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
|
||||||
|
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
|
||||||
|
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
|
||||||
|
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
|
||||||
|
|
||||||
|
|
||||||
|
PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
|
||||||
|
schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
|
||||||
|
schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
|
||||||
|
schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
|
||||||
|
schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
|
||||||
|
schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
|
||||||
|
schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
|
||||||
|
schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
|
||||||
|
schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
|
||||||
|
schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
|
||||||
|
schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
|
||||||
|
schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
|
||||||
|
schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
|
||||||
|
schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
|
||||||
|
schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
|
||||||
|
schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
|
||||||
|
schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
|
||||||
|
schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
|
||||||
|
schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
|
||||||
|
schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
|
||||||
|
schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
|
||||||
|
schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
|
||||||
|
schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
|
||||||
|
schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
|
||||||
|
schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
|
||||||
|
schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
|
||||||
|
schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
|
||||||
|
schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
|
||||||
|
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
|
||||||
|
|
||||||
|
|
||||||
|
def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
|
||||||
|
raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
|
||||||
|
include_dashboard=False)
|
||||||
|
if raw_metric is None:
|
||||||
|
return None
|
||||||
|
metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
|
||||||
|
if metric.is_template and metric.predefined_key is None:
|
||||||
|
return None
|
||||||
|
if metric.is_template:
|
||||||
|
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
|
||||||
|
else:
|
||||||
|
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
|
||||||
|
metric=raw_metric)
|
||||||
|
|
||||||
|
|
||||||
|
def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
|
||||||
|
raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
|
||||||
|
if raw_metric is None:
|
||||||
|
return None
|
||||||
|
metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
|
||||||
|
if metric.is_template:
|
||||||
|
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
|
||||||
|
else:
|
||||||
|
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
|
||||||
|
data=data, metric=raw_metric)
|
||||||
786
ee/api/chalicelib/core/errors.py
Normal file
786
ee/api/chalicelib/core/errors.py
Normal file
|
|
@ -0,0 +1,786 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import sourcemaps
|
||||||
|
from chalicelib.utils import pg_client, helper
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
from chalicelib.utils.metrics_helper import __get_step_size
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
from chalicelib.core import sessions_legacy as sessions
|
||||||
|
else:
|
||||||
|
from chalicelib.core import sessions
|
||||||
|
|
||||||
|
|
||||||
|
def get(error_id, family=False):
|
||||||
|
if family:
|
||||||
|
return get_batch([error_id])
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
|
||||||
|
{"error_id": error_id})
|
||||||
|
cur.execute(query=query)
|
||||||
|
result = cur.fetchone()
|
||||||
|
if result is not None:
|
||||||
|
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
|
||||||
|
return helper.dict_to_camel_case(result)
|
||||||
|
|
||||||
|
|
||||||
|
def get_batch(error_ids):
|
||||||
|
if len(error_ids) == 0:
|
||||||
|
return []
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""
|
||||||
|
WITH RECURSIVE error_family AS (
|
||||||
|
SELECT *
|
||||||
|
FROM public.errors
|
||||||
|
WHERE error_id IN %(error_ids)s
|
||||||
|
UNION
|
||||||
|
SELECT child_errors.*
|
||||||
|
FROM public.errors AS child_errors
|
||||||
|
INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
|
||||||
|
)
|
||||||
|
SELECT *
|
||||||
|
FROM error_family;""",
|
||||||
|
{"error_ids": tuple(error_ids)})
|
||||||
|
cur.execute(query=query)
|
||||||
|
errors = cur.fetchall()
|
||||||
|
for e in errors:
|
||||||
|
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||||
|
return helper.list_to_camel_case(errors)
|
||||||
|
|
||||||
|
|
||||||
|
def __flatten_sort_key_count_version(data, merge_nested=False):
|
||||||
|
if data is None:
|
||||||
|
return []
|
||||||
|
return sorted(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": f'{o["name"]}@{v["version"]}',
|
||||||
|
"count": v["count"]
|
||||||
|
} for o in data for v in o["partition"]
|
||||||
|
],
|
||||||
|
key=lambda o: o["count"], reverse=True) if merge_nested else \
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": o["name"],
|
||||||
|
"count": o["count"],
|
||||||
|
} for o in data
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def __process_tags(row):
|
||||||
|
return [
|
||||||
|
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
|
||||||
|
{"name": "browser.ver",
|
||||||
|
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
|
||||||
|
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
|
||||||
|
{"name": "OS.ver",
|
||||||
|
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
|
||||||
|
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
|
||||||
|
{"name": "device",
|
||||||
|
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
|
||||||
|
{"name": "country", "partitions": row.pop("country_partition")}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_details(project_id, error_id, user_id, **data):
|
||||||
|
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||||
|
pg_sub_query24.append("error_id = %(error_id)s")
|
||||||
|
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||||
|
pg_sub_query30.append("error_id = %(error_id)s")
|
||||||
|
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||||
|
pg_basic_query.append("error_id = %(error_id)s")
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
data["startDate24"] = TimeUTC.now(-1)
|
||||||
|
data["endDate24"] = TimeUTC.now()
|
||||||
|
data["startDate30"] = TimeUTC.now(-30)
|
||||||
|
data["endDate30"] = TimeUTC.now()
|
||||||
|
density24 = int(data.get("density24", 24))
|
||||||
|
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||||
|
density30 = int(data.get("density30", 30))
|
||||||
|
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||||
|
params = {
|
||||||
|
"startDate24": data['startDate24'],
|
||||||
|
"endDate24": data['endDate24'],
|
||||||
|
"startDate30": data['startDate30'],
|
||||||
|
"endDate30": data['endDate30'],
|
||||||
|
"project_id": project_id,
|
||||||
|
"userId": user_id,
|
||||||
|
"step_size24": step_size24,
|
||||||
|
"step_size30": step_size30,
|
||||||
|
"error_id": error_id}
|
||||||
|
|
||||||
|
main_pg_query = f"""\
|
||||||
|
SELECT error_id,
|
||||||
|
name,
|
||||||
|
message,
|
||||||
|
users,
|
||||||
|
sessions,
|
||||||
|
last_occurrence,
|
||||||
|
first_occurrence,
|
||||||
|
last_session_id,
|
||||||
|
browsers_partition,
|
||||||
|
os_partition,
|
||||||
|
device_partition,
|
||||||
|
country_partition,
|
||||||
|
chart24,
|
||||||
|
chart30
|
||||||
|
FROM (SELECT error_id,
|
||||||
|
name,
|
||||||
|
message,
|
||||||
|
COUNT(DISTINCT user_uuid) AS users,
|
||||||
|
COUNT(DISTINCT session_id) AS sessions
|
||||||
|
FROM public.errors
|
||||||
|
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||||
|
INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE error_id = %(error_id)s
|
||||||
|
GROUP BY error_id, name, message) AS details
|
||||||
|
INNER JOIN (SELECT error_id,
|
||||||
|
MAX(timestamp) AS last_occurrence,
|
||||||
|
MIN(timestamp) AS first_occurrence
|
||||||
|
FROM events.errors
|
||||||
|
WHERE error_id = %(error_id)s
|
||||||
|
GROUP BY error_id) AS time_details USING (error_id)
|
||||||
|
INNER JOIN (SELECT error_id,
|
||||||
|
session_id AS last_session_id,
|
||||||
|
user_os,
|
||||||
|
user_os_version,
|
||||||
|
user_browser,
|
||||||
|
user_browser_version,
|
||||||
|
user_device,
|
||||||
|
user_device_type,
|
||||||
|
user_uuid
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE error_id = %(error_id)s
|
||||||
|
ORDER BY errors.timestamp DESC
|
||||||
|
LIMIT 1) AS last_session_details USING (error_id)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_browser AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors
|
||||||
|
INNER JOIN sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
GROUP BY user_browser
|
||||||
|
ORDER BY count DESC) AS count_per_browser_query
|
||||||
|
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||||
|
FROM (SELECT user_browser_version AS version,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
AND sessions.user_browser = count_per_browser_query.name
|
||||||
|
GROUP BY user_browser_version
|
||||||
|
ORDER BY count DESC) AS version_details
|
||||||
|
) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_os AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
GROUP BY user_os
|
||||||
|
ORDER BY count DESC) AS count_per_os_details
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||||
|
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
AND sessions.user_os = count_per_os_details.name
|
||||||
|
GROUP BY user_os_version
|
||||||
|
ORDER BY count DESC) AS count_per_version_details
|
||||||
|
GROUP BY count_per_os_details.name ) AS os_version_details
|
||||||
|
ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_device_type AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
GROUP BY user_device_type
|
||||||
|
ORDER BY count DESC) AS count_per_device_details
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||||
|
FROM (SELECT CASE
|
||||||
|
WHEN user_device = '' OR user_device ISNULL
|
||||||
|
THEN 'unknown'
|
||||||
|
ELSE user_device END AS version,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
AND sessions.user_device_type = count_per_device_details.name
|
||||||
|
GROUP BY user_device
|
||||||
|
ORDER BY count DESC) AS count_per_device_v_details
|
||||||
|
GROUP BY count_per_device_details.name ) AS device_version_details
|
||||||
|
ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||||
|
FROM (SELECT user_country AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_basic_query)}
|
||||||
|
GROUP BY user_country
|
||||||
|
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||||
|
FROM (SELECT generated_timestamp AS timestamp,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
|
||||||
|
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||||
|
FROM events.errors
|
||||||
|
INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query24)}
|
||||||
|
) AS chart_details ON (TRUE)
|
||||||
|
GROUP BY generated_timestamp
|
||||||
|
ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
|
||||||
|
FROM (SELECT generated_timestamp AS timestamp,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
|
||||||
|
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
|
||||||
|
ON (TRUE)
|
||||||
|
GROUP BY timestamp
|
||||||
|
ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
|
||||||
|
"""
|
||||||
|
|
||||||
|
# print("--------------------")
|
||||||
|
# print(cur.mogrify(main_pg_query, params))
|
||||||
|
# print("--------------------")
|
||||||
|
cur.execute(cur.mogrify(main_pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return {"errors": ["error not found"]}
|
||||||
|
row["tags"] = __process_tags(row)
|
||||||
|
|
||||||
|
query = cur.mogrify(
|
||||||
|
f"""SELECT error_id, status, session_id, start_ts,
|
||||||
|
parent_error_id,session_id, user_anonymous_id,
|
||||||
|
user_id, user_uuid, user_browser, user_browser_version,
|
||||||
|
user_os, user_os_version, user_device, payload,
|
||||||
|
FALSE AS favorite,
|
||||||
|
True AS viewed
|
||||||
|
FROM public.errors AS pe
|
||||||
|
INNER JOIN events.errors AS ee USING (error_id)
|
||||||
|
INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE pe.project_id = %(project_id)s
|
||||||
|
AND error_id = %(error_id)s
|
||||||
|
ORDER BY start_ts DESC
|
||||||
|
LIMIT 1;""",
|
||||||
|
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
|
||||||
|
cur.execute(query=query)
|
||||||
|
status = cur.fetchone()
|
||||||
|
|
||||||
|
if status is not None:
|
||||||
|
row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||||
|
row["status"] = status.pop("status")
|
||||||
|
row["parent_error_id"] = status.pop("parent_error_id")
|
||||||
|
row["favorite"] = status.pop("favorite")
|
||||||
|
row["viewed"] = status.pop("viewed")
|
||||||
|
row["last_hydrated_session"] = status
|
||||||
|
else:
|
||||||
|
row["stack"] = []
|
||||||
|
row["last_hydrated_session"] = None
|
||||||
|
row["status"] = "untracked"
|
||||||
|
row["parent_error_id"] = None
|
||||||
|
row["favorite"] = False
|
||||||
|
row["viewed"] = False
|
||||||
|
return {"data": helper.dict_to_camel_case(row)}
|
||||||
|
|
||||||
|
|
||||||
|
def get_details_chart(project_id, error_id, user_id, **data):
|
||||||
|
pg_sub_query = __get_basic_constraints()
|
||||||
|
pg_sub_query.append("error_id = %(error_id)s")
|
||||||
|
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
|
||||||
|
pg_sub_query_chart.append("error_id = %(error_id)s")
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
if data.get("startDate") is None:
|
||||||
|
data["startDate"] = TimeUTC.now(-7)
|
||||||
|
else:
|
||||||
|
data["startDate"] = int(data["startDate"])
|
||||||
|
if data.get("endDate") is None:
|
||||||
|
data["endDate"] = TimeUTC.now()
|
||||||
|
else:
|
||||||
|
data["endDate"] = int(data["endDate"])
|
||||||
|
density = int(data.get("density", 7))
|
||||||
|
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
|
||||||
|
params = {
|
||||||
|
"startDate": data['startDate'],
|
||||||
|
"endDate": data['endDate'],
|
||||||
|
"project_id": project_id,
|
||||||
|
"userId": user_id,
|
||||||
|
"step_size": step_size,
|
||||||
|
"error_id": error_id}
|
||||||
|
|
||||||
|
main_pg_query = f"""\
|
||||||
|
SELECT %(error_id)s AS error_id,
|
||||||
|
browsers_partition,
|
||||||
|
os_partition,
|
||||||
|
device_partition,
|
||||||
|
country_partition,
|
||||||
|
chart
|
||||||
|
FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_browser AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
GROUP BY user_browser
|
||||||
|
ORDER BY count DESC) AS count_per_browser_query
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||||
|
FROM (SELECT user_browser_version AS version,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
AND user_browser = count_per_browser_query.name
|
||||||
|
GROUP BY user_browser_version
|
||||||
|
ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
|
||||||
|
ON (TRUE)) AS browser_details) AS browser_details
|
||||||
|
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_os AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
GROUP BY user_os
|
||||||
|
ORDER BY count DESC) AS count_per_os_details
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
|
||||||
|
FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
AND user_os = count_per_os_details.name
|
||||||
|
GROUP BY user_os_version
|
||||||
|
ORDER BY count DESC) AS count_per_version_query
|
||||||
|
) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||||
|
FROM (SELECT *
|
||||||
|
FROM (SELECT user_device_type AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
GROUP BY user_device_type
|
||||||
|
ORDER BY count DESC) AS count_per_device_details
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
|
||||||
|
FROM (SELECT CASE
|
||||||
|
WHEN user_device = '' OR user_device ISNULL
|
||||||
|
THEN 'unknown'
|
||||||
|
ELSE user_device END AS version,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
AND user_device_type = count_per_device_details.name
|
||||||
|
GROUP BY user_device_type, user_device
|
||||||
|
ORDER BY count DESC) AS count_per_device_details
|
||||||
|
) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||||
|
FROM (SELECT user_country AS name,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
GROUP BY user_country
|
||||||
|
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||||
|
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
|
||||||
|
FROM (SELECT generated_timestamp AS timestamp,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||||
|
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||||
|
FROM events.errors
|
||||||
|
INNER JOIN public.sessions USING (session_id)
|
||||||
|
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||||
|
) AS chart_details ON (TRUE)
|
||||||
|
GROUP BY generated_timestamp
|
||||||
|
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||||
|
|
||||||
|
cur.execute(cur.mogrify(main_pg_query, params))
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return {"errors": ["error not found"]}
|
||||||
|
row["tags"] = __process_tags(row)
|
||||||
|
return {"data": helper.dict_to_camel_case(row)}
|
||||||
|
|
||||||
|
|
||||||
|
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||||
|
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
|
||||||
|
project_key="project_id"):
|
||||||
|
if project_key is None:
|
||||||
|
ch_sub_query = []
|
||||||
|
else:
|
||||||
|
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||||
|
if time_constraint:
|
||||||
|
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
|
||||||
|
f"timestamp < %({endTime_arg_name})s"]
|
||||||
|
if chart:
|
||||||
|
ch_sub_query += [f"timestamp >= generated_timestamp",
|
||||||
|
f"timestamp < generated_timestamp + %({step_size_name})s"]
|
||||||
|
if platform == schemas.PlatformType.mobile:
|
||||||
|
ch_sub_query.append("user_device_type = 'mobile'")
|
||||||
|
elif platform == schemas.PlatformType.desktop:
|
||||||
|
ch_sub_query.append("user_device_type = 'desktop'")
|
||||||
|
return ch_sub_query
|
||||||
|
|
||||||
|
|
||||||
|
def __get_sort_key(key):
|
||||||
|
return {
|
||||||
|
schemas.ErrorSort.occurrence: "max_datetime",
|
||||||
|
schemas.ErrorSort.users_count: "users",
|
||||||
|
schemas.ErrorSort.sessions_count: "sessions"
|
||||||
|
}.get(key, 'max_datetime')
|
||||||
|
|
||||||
|
|
||||||
|
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
||||||
|
empty_response = {
|
||||||
|
'total': 0,
|
||||||
|
'errors': []
|
||||||
|
}
|
||||||
|
|
||||||
|
platform = None
|
||||||
|
for f in data.filters:
|
||||||
|
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||||
|
platform = f.value[0]
|
||||||
|
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
|
||||||
|
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||||
|
"pe.project_id=%(project_id)s"]
|
||||||
|
# To ignore Script error
|
||||||
|
pg_sub_query.append("pe.message!='Script error.'")
|
||||||
|
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
|
||||||
|
# pg_sub_query_chart.append("source ='js_exception'")
|
||||||
|
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||||
|
statuses = []
|
||||||
|
error_ids = None
|
||||||
|
if data.startDate is None:
|
||||||
|
data.startDate = TimeUTC.now(-30)
|
||||||
|
if data.endDate is None:
|
||||||
|
data.endDate = TimeUTC.now(1)
|
||||||
|
if len(data.events) > 0 or len(data.filters) > 0:
|
||||||
|
print("-- searching for sessions before errors")
|
||||||
|
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||||
|
error_status=data.status)
|
||||||
|
if len(statuses) == 0:
|
||||||
|
return empty_response
|
||||||
|
error_ids = [e["errorId"] for e in statuses]
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
|
||||||
|
sort = __get_sort_key('datetime')
|
||||||
|
if data.sort is not None:
|
||||||
|
sort = __get_sort_key(data.sort)
|
||||||
|
order = schemas.SortOrderType.desc
|
||||||
|
if data.order is not None:
|
||||||
|
order = data.order
|
||||||
|
extra_join = ""
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"startDate": data.startDate,
|
||||||
|
"endDate": data.endDate,
|
||||||
|
"project_id": project_id,
|
||||||
|
"userId": user_id,
|
||||||
|
"step_size": step_size}
|
||||||
|
if data.status != schemas.ErrorStatus.all:
|
||||||
|
pg_sub_query.append("status = %(error_status)s")
|
||||||
|
params["error_status"] = data.status
|
||||||
|
if data.limit is not None and data.page is not None:
|
||||||
|
params["errors_offset"] = (data.page - 1) * data.limit
|
||||||
|
params["errors_limit"] = data.limit
|
||||||
|
else:
|
||||||
|
params["errors_offset"] = 0
|
||||||
|
params["errors_limit"] = 200
|
||||||
|
|
||||||
|
if error_ids is not None:
|
||||||
|
params["error_ids"] = tuple(error_ids)
|
||||||
|
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||||
|
# if data.bookmarked:
|
||||||
|
# pg_sub_query.append("ufe.user_id = %(userId)s")
|
||||||
|
# extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||||
|
if data.query is not None and len(data.query) > 0:
|
||||||
|
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
|
||||||
|
params["error_query"] = helper.values_for_operator(value=data.query,
|
||||||
|
op=schemas.SearchEventOperator._contains)
|
||||||
|
|
||||||
|
main_pg_query = f"""SELECT full_count,
|
||||||
|
error_id,
|
||||||
|
name,
|
||||||
|
message,
|
||||||
|
users,
|
||||||
|
sessions,
|
||||||
|
last_occurrence,
|
||||||
|
first_occurrence,
|
||||||
|
chart
|
||||||
|
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
|
||||||
|
FROM (SELECT error_id,
|
||||||
|
name,
|
||||||
|
message,
|
||||||
|
COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users,
|
||||||
|
COUNT(DISTINCT session_id) AS sessions,
|
||||||
|
MAX(timestamp) AS max_datetime,
|
||||||
|
MIN(timestamp) AS min_datetime
|
||||||
|
FROM events.errors
|
||||||
|
INNER JOIN public.errors AS pe USING (error_id)
|
||||||
|
INNER JOIN public.sessions USING (session_id)
|
||||||
|
{extra_join}
|
||||||
|
WHERE {" AND ".join(pg_sub_query)}
|
||||||
|
GROUP BY error_id, name, message
|
||||||
|
ORDER BY {sort} {order}) AS details
|
||||||
|
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
|
||||||
|
) AS details
|
||||||
|
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
|
||||||
|
MIN(timestamp) AS first_occurrence
|
||||||
|
FROM events.errors
|
||||||
|
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
|
||||||
|
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
|
||||||
|
FROM (SELECT generated_timestamp AS timestamp,
|
||||||
|
COUNT(session_id) AS count
|
||||||
|
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||||
|
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||||
|
FROM events.errors
|
||||||
|
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||||
|
) AS sessions ON (TRUE)
|
||||||
|
GROUP BY timestamp
|
||||||
|
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||||
|
|
||||||
|
# print("--------------------")
|
||||||
|
# print(cur.mogrify(main_pg_query, params))
|
||||||
|
# print("--------------------")
|
||||||
|
|
||||||
|
cur.execute(cur.mogrify(main_pg_query, params))
|
||||||
|
rows = cur.fetchall()
|
||||||
|
total = 0 if len(rows) == 0 else rows[0]["full_count"]
|
||||||
|
|
||||||
|
if total == 0:
|
||||||
|
rows = []
|
||||||
|
else:
|
||||||
|
if len(statuses) == 0:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""SELECT error_id,
|
||||||
|
COALESCE((SELECT TRUE
|
||||||
|
FROM public.user_viewed_errors AS ve
|
||||||
|
WHERE errors.error_id = ve.error_id
|
||||||
|
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
|
||||||
|
FROM public.errors
|
||||||
|
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||||
|
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||||
|
"user_id": user_id})
|
||||||
|
cur.execute(query=query)
|
||||||
|
statuses = helper.list_to_camel_case(cur.fetchall())
|
||||||
|
statuses = {
|
||||||
|
s["errorId"]: s for s in statuses
|
||||||
|
}
|
||||||
|
|
||||||
|
for r in rows:
|
||||||
|
r.pop("full_count")
|
||||||
|
if r["error_id"] in statuses:
|
||||||
|
r["viewed"] = statuses[r["error_id"]]["viewed"]
|
||||||
|
else:
|
||||||
|
r["viewed"] = False
|
||||||
|
|
||||||
|
return {
|
||||||
|
'total': total,
|
||||||
|
'errors': helper.list_to_camel_case(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __save_stacktrace(error_id, data):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""UPDATE public.errors
|
||||||
|
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
|
||||||
|
WHERE error_id = %(error_id)s;""",
|
||||||
|
{"error_id": error_id, "data": json.dumps(data)})
|
||||||
|
cur.execute(query=query)
|
||||||
|
|
||||||
|
|
||||||
|
def get_trace(project_id, error_id):
|
||||||
|
error = get(error_id=error_id, family=False)
|
||||||
|
if error is None:
|
||||||
|
return {"errors": ["error not found"]}
|
||||||
|
if error.get("source", "") != "js_exception":
|
||||||
|
return {"errors": ["this source of errors doesn't have a sourcemap"]}
|
||||||
|
if error.get("payload") is None:
|
||||||
|
return {"errors": ["null payload"]}
|
||||||
|
if error.get("stacktrace") is not None:
|
||||||
|
return {"sourcemapUploaded": True,
|
||||||
|
"trace": error.get("stacktrace"),
|
||||||
|
"preparsed": True}
|
||||||
|
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
|
||||||
|
if all_exists:
|
||||||
|
__save_stacktrace(error_id=error_id, data=trace)
|
||||||
|
return {"sourcemapUploaded": all_exists,
|
||||||
|
"trace": trace,
|
||||||
|
"preparsed": False}
|
||||||
|
|
||||||
|
|
||||||
|
def get_sessions(start_date, end_date, project_id, user_id, error_id):
|
||||||
|
extra_constraints = ["s.project_id = %(project_id)s",
|
||||||
|
"s.start_ts >= %(startDate)s",
|
||||||
|
"s.start_ts <= %(endDate)s",
|
||||||
|
"e.error_id = %(error_id)s"]
|
||||||
|
if start_date is None:
|
||||||
|
start_date = TimeUTC.now(-7)
|
||||||
|
if end_date is None:
|
||||||
|
end_date = TimeUTC.now()
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"startDate": start_date,
|
||||||
|
"endDate": end_date,
|
||||||
|
"project_id": project_id,
|
||||||
|
"userId": user_id,
|
||||||
|
"error_id": error_id}
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
f"""SELECT s.project_id,
|
||||||
|
s.session_id::text AS session_id,
|
||||||
|
s.user_uuid,
|
||||||
|
s.user_id,
|
||||||
|
s.user_agent,
|
||||||
|
s.user_os,
|
||||||
|
s.user_browser,
|
||||||
|
s.user_device,
|
||||||
|
s.user_country,
|
||||||
|
s.start_ts,
|
||||||
|
s.duration,
|
||||||
|
s.events_count,
|
||||||
|
s.pages_count,
|
||||||
|
s.errors_count,
|
||||||
|
s.issue_types,
|
||||||
|
COALESCE((SELECT TRUE
|
||||||
|
FROM public.user_favorite_sessions AS fs
|
||||||
|
WHERE s.session_id = fs.session_id
|
||||||
|
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||||
|
COALESCE((SELECT TRUE
|
||||||
|
FROM public.user_viewed_sessions AS fs
|
||||||
|
WHERE s.session_id = fs.session_id
|
||||||
|
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||||
|
FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
|
||||||
|
WHERE {" AND ".join(extra_constraints)}
|
||||||
|
ORDER BY s.start_ts DESC;""",
|
||||||
|
params)
|
||||||
|
cur.execute(query=query)
|
||||||
|
sessions_list = []
|
||||||
|
total = cur.rowcount
|
||||||
|
row = cur.fetchone()
|
||||||
|
while row is not None and len(sessions_list) < 100:
|
||||||
|
sessions_list.append(row)
|
||||||
|
row = cur.fetchone()
|
||||||
|
|
||||||
|
return {
|
||||||
|
'total': total,
|
||||||
|
'sessions': helper.list_to_camel_case(sessions_list)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ACTION_STATE = {
|
||||||
|
"unsolve": 'unresolved',
|
||||||
|
"solve": 'resolved',
|
||||||
|
"ignore": 'ignored'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def change_state(project_id, user_id, error_id, action):
|
||||||
|
errors = get(error_id, family=True)
|
||||||
|
print(len(errors))
|
||||||
|
status = ACTION_STATE.get(action)
|
||||||
|
if errors is None or len(errors) == 0:
|
||||||
|
return {"errors": ["error not found"]}
|
||||||
|
if errors[0]["status"] == status:
|
||||||
|
return {"errors": [f"error is already {status}"]}
|
||||||
|
|
||||||
|
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
|
||||||
|
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"userId": user_id,
|
||||||
|
"error_ids": tuple([e["errorId"] for e in errors]),
|
||||||
|
"status": status}
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""UPDATE public.errors
|
||||||
|
SET status = %(status)s
|
||||||
|
WHERE error_id IN %(error_ids)s
|
||||||
|
RETURNING status""",
|
||||||
|
params)
|
||||||
|
cur.execute(query=query)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is not None:
|
||||||
|
for e in errors:
|
||||||
|
e["status"] = row["status"]
|
||||||
|
return {"data": errors}
|
||||||
|
|
||||||
|
|
||||||
|
MAX_RANK = 2
|
||||||
|
|
||||||
|
|
||||||
|
def __status_rank(status):
|
||||||
|
return {
|
||||||
|
'unresolved': MAX_RANK - 2,
|
||||||
|
'ignored': MAX_RANK - 1,
|
||||||
|
'resolved': MAX_RANK
|
||||||
|
}.get(status)
|
||||||
|
|
||||||
|
|
||||||
|
def merge(error_ids):
|
||||||
|
error_ids = list(set(error_ids))
|
||||||
|
errors = get_batch(error_ids)
|
||||||
|
if len(error_ids) <= 1 or len(error_ids) > len(errors):
|
||||||
|
return {"errors": ["invalid list of ids"]}
|
||||||
|
error_ids = [e["errorId"] for e in errors]
|
||||||
|
parent_error_id = error_ids[0]
|
||||||
|
status = "unresolved"
|
||||||
|
for e in errors:
|
||||||
|
if __status_rank(status) < __status_rank(e["status"]):
|
||||||
|
status = e["status"]
|
||||||
|
if __status_rank(status) == MAX_RANK:
|
||||||
|
break
|
||||||
|
params = {
|
||||||
|
"error_ids": tuple(error_ids),
|
||||||
|
"parent_error_id": parent_error_id,
|
||||||
|
"status": status
|
||||||
|
}
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""UPDATE public.errors
|
||||||
|
SET parent_error_id = %(parent_error_id)s, status = %(status)s
|
||||||
|
WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""",
|
||||||
|
params)
|
||||||
|
cur.execute(query=query)
|
||||||
|
# row = cur.fetchone()
|
||||||
|
|
||||||
|
return {"data": "success"}
|
||||||
|
|
||||||
|
|
||||||
|
def format_first_stack_frame(error):
|
||||||
|
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
|
||||||
|
for s in error["stack"]:
|
||||||
|
for c in s.get("context", []):
|
||||||
|
for sci, sc in enumerate(c):
|
||||||
|
if isinstance(sc, str) and len(sc) > 1000:
|
||||||
|
c[sci] = sc[:1000]
|
||||||
|
# convert bytes to string:
|
||||||
|
if isinstance(s["filename"], bytes):
|
||||||
|
s["filename"] = s["filename"].decode("utf-8")
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(
|
||||||
|
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
|
||||||
|
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
|
||||||
|
FROM (SELECT root_error.error_id
|
||||||
|
FROM events.errors
|
||||||
|
INNER JOIN public.errors AS root_error USING (error_id)
|
||||||
|
LEFT JOIN user_viewed USING (error_id)
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND timestamp >= %(startTimestamp)s
|
||||||
|
AND timestamp <= %(endTimestamp)s
|
||||||
|
AND source = 'js_exception'
|
||||||
|
AND root_error.status = 'unresolved'
|
||||||
|
AND user_viewed.error_id ISNULL
|
||||||
|
LIMIT 1
|
||||||
|
) AS timed_errors;""",
|
||||||
|
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
|
||||||
|
"endTimestamp": endTimestamp})
|
||||||
|
cur.execute(query=query)
|
||||||
|
row = cur.fetchone()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"data": helper.dict_to_camel_case(row)
|
||||||
|
}
|
||||||
399
ee/api/chalicelib/core/events.py
Normal file
399
ee/api/chalicelib/core/events.py
Normal file
|
|
@ -0,0 +1,399 @@
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import issues
|
||||||
|
from chalicelib.core import metadata
|
||||||
|
from chalicelib.core import sessions_metas
|
||||||
|
|
||||||
|
from chalicelib.utils import pg_client, helper
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||||
|
from . import autocomplete_exp as autocomplete
|
||||||
|
else:
|
||||||
|
from . import autocomplete as autocomplete
|
||||||
|
|
||||||
|
|
||||||
|
def get_customs_by_sessionId2_pg(session_id, project_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(cur.mogrify("""\
|
||||||
|
SELECT
|
||||||
|
c.*,
|
||||||
|
'CUSTOM' AS type
|
||||||
|
FROM events_common.customs AS c
|
||||||
|
WHERE
|
||||||
|
c.session_id = %(session_id)s
|
||||||
|
ORDER BY c.timestamp;""",
|
||||||
|
{"project_id": project_id, "session_id": session_id})
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return helper.dict_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def __merge_cells(rows, start, count, replacement):
|
||||||
|
rows[start] = replacement
|
||||||
|
rows = rows[:start + 1] + rows[start + count:]
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||||
|
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||||
|
if len(click_rage_issues) == 0:
|
||||||
|
return rows
|
||||||
|
|
||||||
|
for c in click_rage_issues:
|
||||||
|
merge_count = c.get("payload")
|
||||||
|
if merge_count is not None:
|
||||||
|
merge_count = merge_count.get("count", 3)
|
||||||
|
else:
|
||||||
|
merge_count = 3
|
||||||
|
for i in range(len(rows)):
|
||||||
|
if rows[i]["timestamp"] == c["timestamp"]:
|
||||||
|
rows = __merge_cells(rows=rows,
|
||||||
|
start=i,
|
||||||
|
count=merge_count,
|
||||||
|
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||||
|
break
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(cur.mogrify("""\
|
||||||
|
SELECT
|
||||||
|
c.*,
|
||||||
|
'CLICK' AS type
|
||||||
|
FROM events.clicks AS c
|
||||||
|
WHERE
|
||||||
|
c.session_id = %(session_id)s
|
||||||
|
ORDER BY c.timestamp;""",
|
||||||
|
{"project_id": project_id, "session_id": session_id})
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
if group_clickrage:
|
||||||
|
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||||
|
|
||||||
|
cur.execute(cur.mogrify("""
|
||||||
|
SELECT
|
||||||
|
i.*,
|
||||||
|
'INPUT' AS type
|
||||||
|
FROM events.inputs AS i
|
||||||
|
WHERE
|
||||||
|
i.session_id = %(session_id)s
|
||||||
|
ORDER BY i.timestamp;""",
|
||||||
|
{"project_id": project_id, "session_id": session_id})
|
||||||
|
)
|
||||||
|
rows += cur.fetchall()
|
||||||
|
cur.execute(cur.mogrify("""\
|
||||||
|
SELECT
|
||||||
|
l.*,
|
||||||
|
l.path AS value,
|
||||||
|
l.path AS url,
|
||||||
|
'LOCATION' AS type
|
||||||
|
FROM events.pages AS l
|
||||||
|
WHERE
|
||||||
|
l.session_id = %(session_id)s
|
||||||
|
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||||
|
rows += cur.fetchall()
|
||||||
|
rows = helper.list_to_camel_case(rows)
|
||||||
|
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def __pg_errors_query(source=None, value_length=None):
|
||||||
|
if value_length is None or value_length > 2:
|
||||||
|
return f"""((SELECT DISTINCT ON(lg.message)
|
||||||
|
lg.message AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.message ILIKE %(svalue)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5)
|
||||||
|
UNION DISTINCT
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(svalue)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5)
|
||||||
|
UNION DISTINCT
|
||||||
|
(SELECT DISTINCT ON(lg.message)
|
||||||
|
lg.message AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.message ILIKE %(value)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5)
|
||||||
|
UNION DISTINCT
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(value)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5));"""
|
||||||
|
return f"""((SELECT DISTINCT ON(lg.message)
|
||||||
|
lg.message AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.message ILIKE %(svalue)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5)
|
||||||
|
UNION DISTINCT
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
source,
|
||||||
|
'{event_type.ERROR.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(svalue)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
{"AND source = %(source)s" if source is not None else ""}
|
||||||
|
LIMIT 5));"""
|
||||||
|
|
||||||
|
|
||||||
|
def __search_pg_errors(project_id, value, key=None, source=None):
|
||||||
|
now = TimeUTC.now()
|
||||||
|
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(__pg_errors_query(source,
|
||||||
|
value_length=len(value)),
|
||||||
|
{"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||||
|
"svalue": helper.string_to_sql_like("^" + value),
|
||||||
|
"source": source}))
|
||||||
|
results = helper.list_to_camel_case(cur.fetchall())
|
||||||
|
print(f"{TimeUTC.now() - now} : errors")
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def __search_pg_errors_ios(project_id, value, key=None, source=None):
|
||||||
|
now = TimeUTC.now()
|
||||||
|
if len(value) > 2:
|
||||||
|
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||||
|
lg.reason AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.reason ILIKE %(svalue)s
|
||||||
|
LIMIT 5)
|
||||||
|
UNION ALL
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(svalue)s
|
||||||
|
LIMIT 5)
|
||||||
|
UNION ALL
|
||||||
|
(SELECT DISTINCT ON(lg.reason)
|
||||||
|
lg.reason AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.reason ILIKE %(value)s
|
||||||
|
LIMIT 5)
|
||||||
|
UNION ALL
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(value)s
|
||||||
|
LIMIT 5);"""
|
||||||
|
else:
|
||||||
|
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||||
|
lg.reason AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.reason ILIKE %(svalue)s
|
||||||
|
LIMIT 5)
|
||||||
|
UNION ALL
|
||||||
|
(SELECT DISTINCT ON(lg.name)
|
||||||
|
lg.name AS value,
|
||||||
|
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||||
|
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||||
|
WHERE
|
||||||
|
s.project_id = %(project_id)s
|
||||||
|
AND lg.project_id = %(project_id)s
|
||||||
|
AND lg.name ILIKE %(svalue)s
|
||||||
|
LIMIT 5);"""
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||||
|
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||||
|
results = helper.list_to_camel_case(cur.fetchall())
|
||||||
|
print(f"{TimeUTC.now() - now} : errors")
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def __search_pg_metadata(project_id, value, key=None, source=None):
|
||||||
|
meta_keys = metadata.get(project_id=project_id)
|
||||||
|
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||||
|
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
|
||||||
|
return []
|
||||||
|
sub_from = []
|
||||||
|
if key is not None:
|
||||||
|
meta_keys = {key: meta_keys[key]}
|
||||||
|
|
||||||
|
for k in meta_keys.keys():
|
||||||
|
colname = metadata.index_to_colname(meta_keys[k])
|
||||||
|
if len(value) > 2:
|
||||||
|
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||||
|
FROM public.sessions
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND {colname} ILIKE %(svalue)s LIMIT 5)
|
||||||
|
UNION
|
||||||
|
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||||
|
FROM public.sessions
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND {colname} ILIKE %(value)s LIMIT 5))
|
||||||
|
""")
|
||||||
|
else:
|
||||||
|
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||||
|
FROM public.sessions
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(cur.mogrify(f"""\
|
||||||
|
SELECT key, value, 'METADATA' AS TYPE
|
||||||
|
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||||
|
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||||
|
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||||
|
results = helper.list_to_camel_case(cur.fetchall())
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class event_type:
|
||||||
|
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
|
||||||
|
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
|
||||||
|
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
|
||||||
|
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
|
||||||
|
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
|
||||||
|
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
|
||||||
|
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
|
||||||
|
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
|
||||||
|
column=None) # column=None because errors are searched by name or message
|
||||||
|
METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
|
||||||
|
# IOS
|
||||||
|
CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label")
|
||||||
|
INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
|
||||||
|
VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name")
|
||||||
|
CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
|
||||||
|
REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url")
|
||||||
|
ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
|
||||||
|
column=None) # column=None because errors are searched by name or message
|
||||||
|
|
||||||
|
|
||||||
|
SUPPORTED_TYPES = {
|
||||||
|
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
|
||||||
|
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
|
||||||
|
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
|
||||||
|
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
|
||||||
|
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.LOCATION.ui_type)),
|
||||||
|
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
|
||||||
|
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
|
||||||
|
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.REQUEST.ui_type)),
|
||||||
|
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.GRAPHQL.ui_type)),
|
||||||
|
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.STATEACTION.ui_type)),
|
||||||
|
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
|
||||||
|
query=None),
|
||||||
|
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
|
||||||
|
query=None),
|
||||||
|
# IOS
|
||||||
|
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.CLICK_IOS.ui_type)),
|
||||||
|
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.INPUT_IOS.ui_type)),
|
||||||
|
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.VIEW_IOS.ui_type)),
|
||||||
|
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.CUSTOM_IOS.ui_type)),
|
||||||
|
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
|
||||||
|
query=autocomplete.__generic_query(
|
||||||
|
typename=event_type.REQUEST_IOS.ui_type)),
|
||||||
|
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
|
||||||
|
query=None),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_errors_by_session_id(session_id, project_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(cur.mogrify(f"""\
|
||||||
|
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||||
|
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||||
|
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||||
|
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||||
|
errors = cur.fetchall()
|
||||||
|
for e in errors:
|
||||||
|
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||||
|
return helper.list_to_camel_case(errors)
|
||||||
|
|
||||||
|
|
||||||
|
def search(text, event_type, project_id, source, key):
|
||||||
|
if not event_type:
|
||||||
|
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||||
|
|
||||||
|
if event_type in SUPPORTED_TYPES.keys():
|
||||||
|
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||||
|
# for IOS events autocomplete
|
||||||
|
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||||
|
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||||
|
# source=source)
|
||||||
|
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||||
|
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||||
|
source=source)
|
||||||
|
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
|
||||||
|
return sessions_metas.search(text, event_type, project_id)
|
||||||
|
elif event_type.endswith("_IOS") \
|
||||||
|
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
|
||||||
|
return sessions_metas.search(text, event_type, project_id)
|
||||||
|
else:
|
||||||
|
return {"errors": ["unsupported event"]}
|
||||||
|
|
||||||
|
return {"data": rows}
|
||||||
380
ee/api/chalicelib/core/funnels.py
Normal file
380
ee/api/chalicelib/core/funnels.py
Normal file
|
|
@ -0,0 +1,380 @@
|
||||||
|
import json
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import chalicelib.utils.helper
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import significance
|
||||||
|
from chalicelib.utils import dev
|
||||||
|
from chalicelib.utils import helper, pg_client
|
||||||
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
from chalicelib.core import sessions_legacy as sessions
|
||||||
|
else:
|
||||||
|
from chalicelib.core import sessions
|
||||||
|
|
||||||
|
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||||
|
|
||||||
|
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||||
|
|
||||||
|
|
||||||
|
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
|
||||||
|
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
|
||||||
|
schemas.EventType.location, schemas.EventType.custom,
|
||||||
|
schemas.EventType.click_ios, schemas.EventType.input_ios,
|
||||||
|
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
|
||||||
|
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
|
||||||
|
|
||||||
|
|
||||||
|
def __parse_events(f_events: List[dict]):
|
||||||
|
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||||
|
|
||||||
|
|
||||||
|
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
|
||||||
|
return [e.dict() for e in f_events]
|
||||||
|
|
||||||
|
|
||||||
|
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
||||||
|
if f_events is None:
|
||||||
|
return
|
||||||
|
events = []
|
||||||
|
for e in f_events:
|
||||||
|
if e.operator is None:
|
||||||
|
e.operator = schemas.SearchEventOperator._is
|
||||||
|
|
||||||
|
if not isinstance(e.value, list):
|
||||||
|
e.value = [e.value]
|
||||||
|
is_any = sessions._isAny_opreator(e.operator)
|
||||||
|
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
|
||||||
|
continue
|
||||||
|
events.append(e)
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
def __transform_old_funnels(events):
|
||||||
|
for e in events:
|
||||||
|
if not isinstance(e.get("value"), list):
|
||||||
|
e["value"] = [e["value"]]
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
|
||||||
|
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||||
|
filter.events = filter_stages(stages=filter.events)
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify("""\
|
||||||
|
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
|
||||||
|
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
|
||||||
|
RETURNING *;""",
|
||||||
|
{"user_id": user_id, "project_id": project_id, "name": name,
|
||||||
|
"filter": json.dumps(filter.dict()),
|
||||||
|
"is_public": is_public})
|
||||||
|
|
||||||
|
cur.execute(
|
||||||
|
query
|
||||||
|
)
|
||||||
|
r = cur.fetchone()
|
||||||
|
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||||
|
r = helper.dict_to_camel_case(r)
|
||||||
|
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||||
|
return {"data": r}
|
||||||
|
|
||||||
|
|
||||||
|
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
|
||||||
|
s_query = []
|
||||||
|
if filter is not None:
|
||||||
|
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||||
|
s_query.append("filter = %(filter)s::jsonb")
|
||||||
|
if name is not None and len(name) > 0:
|
||||||
|
s_query.append("name = %(name)s")
|
||||||
|
if is_public is not None:
|
||||||
|
s_query.append("is_public = %(is_public)s")
|
||||||
|
if len(s_query) == 0:
|
||||||
|
return {"errors": ["Nothing to update"]}
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(f"""\
|
||||||
|
UPDATE public.funnels
|
||||||
|
SET {" , ".join(s_query)}
|
||||||
|
WHERE funnel_id=%(funnel_id)s
|
||||||
|
AND project_id = %(project_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public)
|
||||||
|
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||||
|
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
|
||||||
|
"project_id": project_id})
|
||||||
|
# print("--------------------")
|
||||||
|
# print(query)
|
||||||
|
# print("--------------------")
|
||||||
|
cur.execute(
|
||||||
|
query
|
||||||
|
)
|
||||||
|
r = cur.fetchone()
|
||||||
|
if r is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||||
|
r = helper.dict_to_camel_case(r)
|
||||||
|
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||||
|
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
|
||||||
|
return {"data": r}
|
||||||
|
|
||||||
|
|
||||||
|
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
f"""\
|
||||||
|
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
|
||||||
|
{",filter" if details else ""}
|
||||||
|
FROM public.funnels
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND funnels.deleted_at IS NULL
|
||||||
|
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
|
||||||
|
{"project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = cur.fetchall()
|
||||||
|
rows = helper.list_to_camel_case(rows)
|
||||||
|
for row in rows:
|
||||||
|
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||||
|
if details:
|
||||||
|
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
|
||||||
|
if row.get("filter") is not None and row["filter"].get("events") is not None:
|
||||||
|
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
|
||||||
|
|
||||||
|
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
|
||||||
|
end_date=end_date)
|
||||||
|
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
|
||||||
|
project_id=project_id, user_id=None, count_only=True)
|
||||||
|
row["sessionsCount"] = counts["countSessions"]
|
||||||
|
row["usersCount"] = counts["countUsers"]
|
||||||
|
filter_clone = dict(row["filter"])
|
||||||
|
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
|
||||||
|
row["stages"] = overview["stages"]
|
||||||
|
row.pop("filter")
|
||||||
|
row["stagesCount"] = len(row["stages"])
|
||||||
|
# TODO: ask david to count it alone
|
||||||
|
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
|
||||||
|
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
|
||||||
|
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
|
||||||
|
row["filter"] = helper.old_search_payload_to_flat(filter_clone)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def get_possible_issue_types(project_id):
|
||||||
|
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
|
||||||
|
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||||
|
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||||
|
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||||
|
'js_error']]
|
||||||
|
|
||||||
|
|
||||||
|
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||||
|
if start_date is not None and end_date is not None:
|
||||||
|
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||||
|
elif range_value is not None and len(range_value) > 0:
|
||||||
|
filter_d["rangeValue"] = range_value
|
||||||
|
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||||
|
else:
|
||||||
|
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||||
|
|
||||||
|
|
||||||
|
def delete(project_id, funnel_id, user_id):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify("""\
|
||||||
|
UPDATE public.funnels
|
||||||
|
SET deleted_at = timezone('utc'::text, now())
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND funnel_id = %(funnel_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public);""",
|
||||||
|
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"data": {"state": "success"}}
|
||||||
|
|
||||||
|
|
||||||
|
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||||
|
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
|
||||||
|
data.events = filter_stages(data.events)
|
||||||
|
data.events = __fix_stages(data.events)
|
||||||
|
if len(data.events) == 0:
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
|
||||||
|
start_date=data.startDate, end_date=data.endDate)
|
||||||
|
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||||
|
return sessions.search_sessions(data=data, project_id=project_id,
|
||||||
|
user_id=user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||||
|
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||||
|
insights = helper.list_to_camel_case(insights)
|
||||||
|
if len(insights) > 0:
|
||||||
|
# fix: this fix for huge drop count
|
||||||
|
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||||
|
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||||
|
# end fix
|
||||||
|
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||||
|
return {"data": {"stages": insights,
|
||||||
|
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||||
|
|
||||||
|
|
||||||
|
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||||
|
data.events = filter_stages(__parse_events(data.events))
|
||||||
|
if len(data.events) == 0:
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||||
|
start_date=data.startDate,
|
||||||
|
end_date=data.endDate)
|
||||||
|
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
|
||||||
|
data.events = __fix_stages(data.events)
|
||||||
|
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||||
|
insights = helper.list_to_camel_case(insights)
|
||||||
|
if len(insights) > 0:
|
||||||
|
# fix: this fix for huge drop count
|
||||||
|
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||||
|
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||||
|
# end fix
|
||||||
|
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||||
|
return {"data": {"stages": insights,
|
||||||
|
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||||
|
|
||||||
|
|
||||||
|
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||||
|
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
|
||||||
|
data.events = filter_stages(__parse_events(data.events))
|
||||||
|
data.events = __fix_stages(data.events)
|
||||||
|
if len(data.events) == 0:
|
||||||
|
return {"stages": [], "totalDropDueToIssues": 0}
|
||||||
|
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||||
|
insights = helper.list_to_camel_case(insights)
|
||||||
|
if len(insights) > 0:
|
||||||
|
# TODO: check if this correct
|
||||||
|
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||||
|
if len(insights) == 0:
|
||||||
|
total_drop_due_to_issues = 0
|
||||||
|
else:
|
||||||
|
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
|
||||||
|
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||||
|
return {"stages": insights,
|
||||||
|
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||||
|
|
||||||
|
|
||||||
|
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||||
|
return {"data": {
|
||||||
|
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||||
|
}}
|
||||||
|
|
||||||
|
|
||||||
|
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||||
|
data.events = filter_stages(data.events)
|
||||||
|
data.events = __fix_stages(data.events)
|
||||||
|
if len(data.events) == 0:
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||||
|
start_date=data.startDate,
|
||||||
|
end_date=data.endDate)
|
||||||
|
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||||
|
if len(data.events) < 2:
|
||||||
|
return {"issues": []}
|
||||||
|
return {
|
||||||
|
"issues": helper.dict_to_camel_case(
|
||||||
|
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||||
|
last_stage=len(data.events)))}
|
||||||
|
|
||||||
|
|
||||||
|
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||||
|
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
|
||||||
|
data.events = filter_stages(data.events)
|
||||||
|
data.events = __fix_stages(data.events)
|
||||||
|
if len(data.events) < 0:
|
||||||
|
return {"issues": []}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issues": helper.dict_to_camel_case(
|
||||||
|
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||||
|
last_stage=len(data.events)))}
|
||||||
|
|
||||||
|
|
||||||
|
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
cur.execute(
|
||||||
|
cur.mogrify(
|
||||||
|
"""\
|
||||||
|
SELECT
|
||||||
|
*
|
||||||
|
FROM public.funnels
|
||||||
|
WHERE project_id = %(project_id)s
|
||||||
|
AND deleted_at IS NULL
|
||||||
|
AND funnel_id = %(funnel_id)s
|
||||||
|
AND (user_id = %(user_id)s OR is_public);""",
|
||||||
|
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
f = helper.dict_to_camel_case(cur.fetchone())
|
||||||
|
if f is None:
|
||||||
|
return None
|
||||||
|
if f.get("filter") is not None and f["filter"].get("events") is not None:
|
||||||
|
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
|
||||||
|
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||||
|
f["filter"]["events"] = __parse_events(f["filter"]["events"])
|
||||||
|
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||||
|
if fix_stages:
|
||||||
|
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
|
||||||
|
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
|
||||||
|
if flatten:
|
||||||
|
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
|
||||||
|
start_date=None, end_date=None):
|
||||||
|
if len(data.events) == 0:
|
||||||
|
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||||
|
if f is None:
|
||||||
|
return {"errors": ["funnel not found"]}
|
||||||
|
data.startDate = data.startDate if data.startDate is not None else start_date
|
||||||
|
data.endDate = data.endDate if data.endDate is not None else end_date
|
||||||
|
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
|
||||||
|
end_date=data.endDate)
|
||||||
|
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||||
|
|
||||||
|
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
|
||||||
|
.get("issues", {})
|
||||||
|
issues = issues.get("significant", []) + issues.get("insignificant", [])
|
||||||
|
issue = None
|
||||||
|
for i in issues:
|
||||||
|
if i.get("issueId", "") == issue_id:
|
||||||
|
issue = i
|
||||||
|
break
|
||||||
|
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
|
||||||
|
data=data) if issue is not None else {"total": 0, "sessions": []},
|
||||||
|
# "stages": helper.list_to_camel_case(insights),
|
||||||
|
# "totalDropDueToIssues": total_drop_due_to_issues,
|
||||||
|
"issue": issue}
|
||||||
644
ee/api/chalicelib/core/significance.py
Normal file
644
ee/api/chalicelib/core/significance.py
Normal file
|
|
@ -0,0 +1,644 @@
|
||||||
|
__author__ = "AZNAUROV David"
|
||||||
|
__maintainer__ = "KRAIEM Taha Yassine"
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core import events, metadata
|
||||||
|
from chalicelib.utils import dev
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
from chalicelib.core import sessions_legacy as sessions
|
||||||
|
else:
|
||||||
|
from chalicelib.core import sessions
|
||||||
|
|
||||||
|
"""
|
||||||
|
todo: remove LIMIT from the query
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
import math
|
||||||
|
import warnings
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from psycopg2.extras import RealDictRow
|
||||||
|
from chalicelib.utils import pg_client, helper
|
||||||
|
|
||||||
|
SIGNIFICANCE_THRSH = 0.4
|
||||||
|
|
||||||
|
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
|
||||||
|
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
|
||||||
|
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
|
||||||
|
|
||||||
|
|
||||||
|
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
||||||
|
"""
|
||||||
|
Add minimal timestamp
|
||||||
|
:param filter_d: dict contains events&filters&...
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
stages: [dict] = filter_d.get("events", [])
|
||||||
|
filters: [dict] = filter_d.get("filters", [])
|
||||||
|
filter_issues = filter_d.get("issueTypes")
|
||||||
|
if filter_issues is None or len(filter_issues) == 0:
|
||||||
|
filter_issues = []
|
||||||
|
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||||
|
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
|
||||||
|
"s.start_ts <= %(endTimestamp)s"]
|
||||||
|
filter_extra_from = []
|
||||||
|
n_stages_query = []
|
||||||
|
values = {}
|
||||||
|
if len(filters) > 0:
|
||||||
|
meta_keys = None
|
||||||
|
for i, f in enumerate(filters):
|
||||||
|
if not isinstance(f["value"], list):
|
||||||
|
f.value = [f["value"]]
|
||||||
|
if len(f["value"]) == 0 or f["value"] is None:
|
||||||
|
continue
|
||||||
|
f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
|
||||||
|
# filter_args = _multiple_values(f["value"])
|
||||||
|
op = sessions.__get_sql_operator(f["operator"])
|
||||||
|
|
||||||
|
filter_type = f["type"]
|
||||||
|
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
|
||||||
|
f_k = f"f_value{i}"
|
||||||
|
values = {**values,
|
||||||
|
**sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
|
||||||
|
value_key=f_k)}
|
||||||
|
if filter_type == schemas.FilterType.user_browser:
|
||||||
|
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
|
||||||
|
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
|
||||||
|
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
|
||||||
|
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
|
||||||
|
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
|
||||||
|
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
|
||||||
|
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
elif filter_type == schemas.FilterType.duration:
|
||||||
|
if len(f["value"]) > 0 and f["value"][0] is not None:
|
||||||
|
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
|
||||||
|
values["minDuration"] = f["value"][0]
|
||||||
|
if len(f["value"]) > 1 and f["value"][1] is not None and int(f["value"][1]) > 0:
|
||||||
|
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
|
||||||
|
values["maxDuration"] = f["value"][1]
|
||||||
|
elif filter_type == schemas.FilterType.referrer:
|
||||||
|
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||||
|
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
|
||||||
|
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
|
||||||
|
elif filter_type == events.event_type.METADATA.ui_type:
|
||||||
|
if meta_keys is None:
|
||||||
|
meta_keys = metadata.get(project_id=project_id)
|
||||||
|
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||||
|
# op = sessions.__get_sql_operator(f["operator"])
|
||||||
|
if f.get("key") in meta_keys.keys():
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(
|
||||||
|
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
|
||||||
|
value_key=f_k))
|
||||||
|
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||||
|
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
|
||||||
|
# op = sessions.__get_sql_operator(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||||
|
elif filter_type in [schemas.FilterType.user_anonymous_id,
|
||||||
|
schemas.FilterType.user_anonymous_id_ios]:
|
||||||
|
# op = sessions.__get_sql_operator(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||||
|
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
|
||||||
|
# op = sessions.__get_sql_operator(f["operator"])
|
||||||
|
first_stage_extra_constraints.append(
|
||||||
|
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||||
|
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||||
|
i = -1
|
||||||
|
for s in stages:
|
||||||
|
|
||||||
|
if s.get("operator") is None:
|
||||||
|
s["operator"] = "is"
|
||||||
|
|
||||||
|
if not isinstance(s["value"], list):
|
||||||
|
s["value"] = [s["value"]]
|
||||||
|
is_any = sessions._isAny_opreator(s["operator"])
|
||||||
|
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
|
||||||
|
continue
|
||||||
|
i += 1
|
||||||
|
if i == 0:
|
||||||
|
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||||
|
else:
|
||||||
|
extra_from = []
|
||||||
|
op = sessions.__get_sql_operator(s["operator"])
|
||||||
|
event_type = s["type"].upper()
|
||||||
|
if event_type == events.event_type.CLICK.ui_type:
|
||||||
|
next_table = events.event_type.CLICK.table
|
||||||
|
next_col_name = events.event_type.CLICK.column
|
||||||
|
elif event_type == events.event_type.INPUT.ui_type:
|
||||||
|
next_table = events.event_type.INPUT.table
|
||||||
|
next_col_name = events.event_type.INPUT.column
|
||||||
|
elif event_type == events.event_type.LOCATION.ui_type:
|
||||||
|
next_table = events.event_type.LOCATION.table
|
||||||
|
next_col_name = events.event_type.LOCATION.column
|
||||||
|
elif event_type == events.event_type.CUSTOM.ui_type:
|
||||||
|
next_table = events.event_type.CUSTOM.table
|
||||||
|
next_col_name = events.event_type.CUSTOM.column
|
||||||
|
# IOS --------------
|
||||||
|
elif event_type == events.event_type.CLICK_IOS.ui_type:
|
||||||
|
next_table = events.event_type.CLICK_IOS.table
|
||||||
|
next_col_name = events.event_type.CLICK_IOS.column
|
||||||
|
elif event_type == events.event_type.INPUT_IOS.ui_type:
|
||||||
|
next_table = events.event_type.INPUT_IOS.table
|
||||||
|
next_col_name = events.event_type.INPUT_IOS.column
|
||||||
|
elif event_type == events.event_type.VIEW_IOS.ui_type:
|
||||||
|
next_table = events.event_type.VIEW_IOS.table
|
||||||
|
next_col_name = events.event_type.VIEW_IOS.column
|
||||||
|
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
|
||||||
|
next_table = events.event_type.CUSTOM_IOS.table
|
||||||
|
next_col_name = events.event_type.CUSTOM_IOS.column
|
||||||
|
else:
|
||||||
|
print("=================UNDEFINED")
|
||||||
|
continue
|
||||||
|
|
||||||
|
values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
|
||||||
|
value_key=f"value{i + 1}")}
|
||||||
|
if sessions.__is_negation_operator(op) and i > 0:
|
||||||
|
op = sessions.__reverse_sql_operator(op)
|
||||||
|
main_condition = "left_not.session_id ISNULL"
|
||||||
|
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
|
||||||
|
FROM {next_table} AS s_main
|
||||||
|
WHERE s_main.{next_col_name} {op} %(value{i + 1})s
|
||||||
|
AND s_main.timestamp >= T{i}.stage{i}_timestamp
|
||||||
|
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
|
||||||
|
else:
|
||||||
|
if is_any:
|
||||||
|
main_condition = "TRUE"
|
||||||
|
else:
|
||||||
|
main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
|
||||||
|
values=s["value"], value_key=f"value{i + 1}")
|
||||||
|
n_stages_query.append(f"""
|
||||||
|
(SELECT main.session_id,
|
||||||
|
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
|
||||||
|
'{event_type}' AS type,
|
||||||
|
'{s["operator"]}' AS operator
|
||||||
|
FROM {next_table} AS main {" ".join(extra_from)}
|
||||||
|
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
|
||||||
|
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
|
||||||
|
AND {main_condition}
|
||||||
|
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
|
||||||
|
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
|
||||||
|
GROUP BY main.session_id)
|
||||||
|
AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
|
||||||
|
""")
|
||||||
|
if len(n_stages_query) == 0:
|
||||||
|
return []
|
||||||
|
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
|
||||||
|
n_stages_query += ") AS stages_t"
|
||||||
|
|
||||||
|
n_stages_query = f"""
|
||||||
|
SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM (
|
||||||
|
SELECT * FROM (
|
||||||
|
SELECT * FROM
|
||||||
|
{n_stages_query}
|
||||||
|
LEFT JOIN LATERAL
|
||||||
|
(
|
||||||
|
SELECT * FROM
|
||||||
|
(SELECT ISE.session_id,
|
||||||
|
ISS.type as issue_type,
|
||||||
|
ISE.timestamp AS issue_timestamp,
|
||||||
|
ISS.context_string as issue_context,
|
||||||
|
ISS.issue_id as issue_id
|
||||||
|
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||||
|
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||||
|
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
|
||||||
|
AND ISS.project_id=%(project_id)s
|
||||||
|
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
|
||||||
|
) AS issues_t
|
||||||
|
USING (session_id)) AS stages_and_issues_t
|
||||||
|
inner join sessions USING(session_id);
|
||||||
|
"""
|
||||||
|
|
||||||
|
# LIMIT 10000
|
||||||
|
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
|
||||||
|
"issueTypes": tuple(filter_issues), **values}
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
# print("---------------------------------------------------")
|
||||||
|
# print(cur.mogrify(n_stages_query, params))
|
||||||
|
# print("---------------------------------------------------")
|
||||||
|
cur.execute(cur.mogrify(n_stages_query, params))
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def pearson_corr(x: list, y: list):
|
||||||
|
n = len(x)
|
||||||
|
if n != len(y):
|
||||||
|
raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
|
||||||
|
|
||||||
|
if n < 2:
|
||||||
|
warnings.warn(f'x and y must have length at least 2. Got {n} instead')
|
||||||
|
return None, None, False
|
||||||
|
|
||||||
|
# If an input is constant, the correlation coefficient is not defined.
|
||||||
|
if all(t == x[0] for t in x) or all(t == y[0] for t in y):
|
||||||
|
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||||
|
return None, None, False
|
||||||
|
|
||||||
|
if n == 2:
|
||||||
|
return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0
|
||||||
|
|
||||||
|
xmean = sum(x) / len(x)
|
||||||
|
ymean = sum(y) / len(y)
|
||||||
|
|
||||||
|
xm = [el - xmean for el in x]
|
||||||
|
ym = [el - ymean for el in y]
|
||||||
|
|
||||||
|
normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
|
||||||
|
normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
|
||||||
|
|
||||||
|
threshold = 1e-8
|
||||||
|
if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
|
||||||
|
# If all the values in x (likewise y) are very close to the mean,
|
||||||
|
# the loss of precision that occurs in the subtraction xm = x - xmean
|
||||||
|
# might result in large errors in r.
|
||||||
|
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||||
|
|
||||||
|
r = sum(
|
||||||
|
i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
|
||||||
|
|
||||||
|
# Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
|
||||||
|
# However, if r < 0, we don't care, as our problem is to find only positive correlations
|
||||||
|
r = max(min(r, 1.0), 0.0)
|
||||||
|
|
||||||
|
# approximated confidence
|
||||||
|
if n < 31:
|
||||||
|
t_c = T_VALUES[n]
|
||||||
|
elif n < 50:
|
||||||
|
t_c = 2.02
|
||||||
|
else:
|
||||||
|
t_c = 2
|
||||||
|
if r >= 0.999:
|
||||||
|
confidence = 1
|
||||||
|
else:
|
||||||
|
confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
|
||||||
|
|
||||||
|
if confidence > SIGNIFICANCE_THRSH:
|
||||||
|
return r, confidence, True
|
||||||
|
else:
|
||||||
|
return r, confidence, False
|
||||||
|
|
||||||
|
|
||||||
|
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
|
||||||
|
"""
|
||||||
|
Returns two lists with binary values 0/1:
|
||||||
|
|
||||||
|
transitions ::: if transited from the first stage to the last - 1
|
||||||
|
else - 0
|
||||||
|
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
|
||||||
|
the values are lists
|
||||||
|
if an issue happened between the first stage to the last - 1
|
||||||
|
else - 0
|
||||||
|
|
||||||
|
For a small task of calculating a total drop due to issues,
|
||||||
|
we need to disregard the issue type when creating the `errors`-like array.
|
||||||
|
The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
|
||||||
|
The `transitions` array stays the same
|
||||||
|
"""
|
||||||
|
transitions = []
|
||||||
|
n_sess_affected = 0
|
||||||
|
errors = {}
|
||||||
|
for issue in all_issues_with_context:
|
||||||
|
split = issue.split('__^__')
|
||||||
|
errors[issue] = {
|
||||||
|
"errors": [],
|
||||||
|
"issue_type": split[0],
|
||||||
|
"context": split[1]}
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
t = 0
|
||||||
|
first_ts = row[f'stage{first_stage}_timestamp']
|
||||||
|
last_ts = row[f'stage{last_stage}_timestamp']
|
||||||
|
if first_ts is None:
|
||||||
|
continue
|
||||||
|
elif first_ts is not None and last_ts is not None:
|
||||||
|
t = 1
|
||||||
|
transitions.append(t)
|
||||||
|
|
||||||
|
ic_present = False
|
||||||
|
for issue_type_with_context in errors:
|
||||||
|
ic = 0
|
||||||
|
issue_type = errors[issue_type_with_context]["issue_type"]
|
||||||
|
context = errors[issue_type_with_context]["context"]
|
||||||
|
if row['issue_type'] is not None:
|
||||||
|
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
|
||||||
|
context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
|
||||||
|
if issue_type == row['issue_type'] and context == context_in_row:
|
||||||
|
ic = 1
|
||||||
|
ic_present = True
|
||||||
|
errors[issue_type_with_context]["errors"].append(ic)
|
||||||
|
|
||||||
|
if ic_present and t:
|
||||||
|
n_sess_affected += 1
|
||||||
|
|
||||||
|
# def tuple_or(t: tuple):
|
||||||
|
# x = 0
|
||||||
|
# for el in t:
|
||||||
|
# x |= el
|
||||||
|
# return x
|
||||||
|
def tuple_or(t: tuple):
|
||||||
|
for el in t:
|
||||||
|
if el > 0:
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
errors = {key: errors[key]["errors"] for key in errors}
|
||||||
|
all_errors = [tuple_or(t) for t in zip(*errors.values())]
|
||||||
|
|
||||||
|
return transitions, errors, all_errors, n_sess_affected
|
||||||
|
|
||||||
|
|
||||||
|
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param rows:
|
||||||
|
:param first_stage:
|
||||||
|
:param last_stage:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
affected_users = defaultdict(lambda: set())
|
||||||
|
affected_sessions = defaultdict(lambda: set())
|
||||||
|
contexts = defaultdict(lambda: None)
|
||||||
|
n_affected_users_dict = defaultdict(lambda: None)
|
||||||
|
n_affected_sessions_dict = defaultdict(lambda: None)
|
||||||
|
all_issues_with_context = set()
|
||||||
|
n_issues_dict = defaultdict(lambda: 0)
|
||||||
|
issues_by_session = defaultdict(lambda: 0)
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
|
||||||
|
# check that the session has reached the first stage of subfunnel:
|
||||||
|
if row[f'stage{first_stage}_timestamp'] is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
iss = row['issue_type']
|
||||||
|
iss_ts = row['issue_timestamp']
|
||||||
|
|
||||||
|
# check that the issue exists and belongs to subfunnel:
|
||||||
|
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
|
||||||
|
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
|
||||||
|
context_string = row['issue_context'] if row['issue_context'] is not None else ''
|
||||||
|
issue_with_context = iss + '__^__' + context_string
|
||||||
|
contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
|
||||||
|
all_issues_with_context.add(issue_with_context)
|
||||||
|
n_issues_dict[issue_with_context] += 1
|
||||||
|
if row['user_uuid'] is not None:
|
||||||
|
affected_users[issue_with_context].add(row['user_uuid'])
|
||||||
|
|
||||||
|
affected_sessions[issue_with_context].add(row['session_id'])
|
||||||
|
issues_by_session[row[f'session_id']] += 1
|
||||||
|
|
||||||
|
if len(affected_users) > 0:
|
||||||
|
n_affected_users_dict.update({
|
||||||
|
iss: len(affected_users[iss]) for iss in affected_users
|
||||||
|
})
|
||||||
|
if len(affected_sessions) > 0:
|
||||||
|
n_affected_sessions_dict.update({
|
||||||
|
iss: len(affected_sessions[iss]) for iss in affected_sessions
|
||||||
|
})
|
||||||
|
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
|
||||||
|
|
||||||
|
|
||||||
|
def count_sessions(rows, n_stages):
|
||||||
|
session_counts = {i: set() for i in range(1, n_stages + 1)}
|
||||||
|
for ind, row in enumerate(rows):
|
||||||
|
for i in range(1, n_stages + 1):
|
||||||
|
if row[f"stage{i}_timestamp"] is not None:
|
||||||
|
session_counts[i].add(row[f"session_id"])
|
||||||
|
session_counts = {i: len(session_counts[i]) for i in session_counts}
|
||||||
|
return session_counts
|
||||||
|
|
||||||
|
|
||||||
|
def count_users(rows, n_stages):
|
||||||
|
users_in_stages = defaultdict(lambda: set())
|
||||||
|
|
||||||
|
for ind, row in enumerate(rows):
|
||||||
|
for i in range(1, n_stages + 1):
|
||||||
|
if row[f"stage{i}_timestamp"] is not None:
|
||||||
|
users_in_stages[i].add(row["user_uuid"])
|
||||||
|
|
||||||
|
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
|
||||||
|
|
||||||
|
return users_count
|
||||||
|
|
||||||
|
|
||||||
|
def get_stages(stages, rows):
|
||||||
|
n_stages = len(stages)
|
||||||
|
session_counts = count_sessions(rows, n_stages)
|
||||||
|
users_counts = count_users(rows, n_stages)
|
||||||
|
|
||||||
|
stages_list = []
|
||||||
|
for i, stage in enumerate(stages):
|
||||||
|
|
||||||
|
drop = None
|
||||||
|
if i != 0:
|
||||||
|
if session_counts[i] == 0:
|
||||||
|
drop = 0
|
||||||
|
elif session_counts[i] > 0:
|
||||||
|
drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
|
||||||
|
|
||||||
|
stages_list.append(
|
||||||
|
{"value": stage["value"],
|
||||||
|
"type": stage["type"],
|
||||||
|
"operator": stage["operator"],
|
||||||
|
"sessionsCount": session_counts[i + 1],
|
||||||
|
"drop_pct": drop,
|
||||||
|
"usersCount": users_counts[i + 1],
|
||||||
|
"dropDueToIssues": 0
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return stages_list
|
||||||
|
|
||||||
|
|
||||||
|
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param stages:
|
||||||
|
:param rows:
|
||||||
|
:param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
|
||||||
|
:param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
n_stages = len(stages)
|
||||||
|
|
||||||
|
if first_stage is None:
|
||||||
|
first_stage = 1
|
||||||
|
if last_stage is None:
|
||||||
|
last_stage = n_stages
|
||||||
|
if last_stage > n_stages:
|
||||||
|
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||||
|
last_stage = n_stages
|
||||||
|
|
||||||
|
n_critical_issues = 0
|
||||||
|
issues_dict = dict({"significant": [],
|
||||||
|
"insignificant": []})
|
||||||
|
session_counts = count_sessions(rows, n_stages)
|
||||||
|
drop = session_counts[first_stage] - session_counts[last_stage]
|
||||||
|
|
||||||
|
all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
|
||||||
|
rows, first_stage, last_stage)
|
||||||
|
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
|
||||||
|
all_issues_with_context,
|
||||||
|
first_stage, last_stage)
|
||||||
|
|
||||||
|
print("len(transitions) =", len(transitions))
|
||||||
|
|
||||||
|
if any(all_errors):
|
||||||
|
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||||
|
if total_drop_corr is not None and drop is not None:
|
||||||
|
total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
|
||||||
|
else:
|
||||||
|
total_drop_due_to_issues = 0
|
||||||
|
else:
|
||||||
|
total_drop_due_to_issues = 0
|
||||||
|
|
||||||
|
if drop_only:
|
||||||
|
return total_drop_due_to_issues
|
||||||
|
for issue in all_issues_with_context:
|
||||||
|
|
||||||
|
if not any(errors[issue]):
|
||||||
|
continue
|
||||||
|
r, confidence, is_sign = pearson_corr(transitions, errors[issue])
|
||||||
|
|
||||||
|
if r is not None and drop is not None and is_sign:
|
||||||
|
lost_conversions = int(r * affected_sessions[issue])
|
||||||
|
else:
|
||||||
|
lost_conversions = None
|
||||||
|
if r is None:
|
||||||
|
r = 0
|
||||||
|
split = issue.split('__^__')
|
||||||
|
issues_dict['significant' if is_sign else 'insignificant'].append({
|
||||||
|
"type": split[0],
|
||||||
|
"title": helper.get_issue_title(split[0]),
|
||||||
|
"affected_sessions": affected_sessions[issue],
|
||||||
|
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
|
||||||
|
"lost_conversions": lost_conversions,
|
||||||
|
"affected_users": affected_users_dict[issue],
|
||||||
|
"conversion_impact": round(r * 100),
|
||||||
|
"context_string": contexts[issue]["context"],
|
||||||
|
"issue_id": contexts[issue]["id"]
|
||||||
|
})
|
||||||
|
|
||||||
|
if is_sign:
|
||||||
|
n_critical_issues += n_issues_dict[issue]
|
||||||
|
|
||||||
|
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||||
|
|
||||||
|
|
||||||
|
def get_top_insights(filter_d, project_id):
|
||||||
|
output = []
|
||||||
|
stages = filter_d.get("events", [])
|
||||||
|
# TODO: handle 1 stage alone
|
||||||
|
if len(stages) == 0:
|
||||||
|
print("no stages found")
|
||||||
|
return output, 0
|
||||||
|
elif len(stages) == 1:
|
||||||
|
# TODO: count sessions, and users for single stage
|
||||||
|
output = [{
|
||||||
|
"type": stages[0]["type"],
|
||||||
|
"value": stages[0]["value"],
|
||||||
|
"dropPercentage": None,
|
||||||
|
"operator": stages[0]["operator"],
|
||||||
|
"sessionsCount": 0,
|
||||||
|
"dropPct": 0,
|
||||||
|
"usersCount": 0,
|
||||||
|
"dropDueToIssues": 0
|
||||||
|
|
||||||
|
}]
|
||||||
|
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=None, count_only=True)
|
||||||
|
output[0]["sessionsCount"] = counts["countSessions"]
|
||||||
|
output[0]["usersCount"] = counts["countUsers"]
|
||||||
|
return output, 0
|
||||||
|
# The result of the multi-stage query
|
||||||
|
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||||
|
if len(rows) == 0:
|
||||||
|
return get_stages(stages, []), 0
|
||||||
|
# Obtain the first part of the output
|
||||||
|
stages_list = get_stages(stages, rows)
|
||||||
|
# Obtain the second part of the output
|
||||||
|
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
|
||||||
|
last_stage=filter_d.get("lastStage"), drop_only=True)
|
||||||
|
return stages_list, total_drop_due_to_issues
|
||||||
|
|
||||||
|
|
||||||
|
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
|
||||||
|
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
|
||||||
|
stages = filter_d.get("events", [])
|
||||||
|
# The result of the multi-stage query
|
||||||
|
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||||
|
# print(json.dumps(rows[0],indent=4))
|
||||||
|
# return
|
||||||
|
if len(rows) == 0:
|
||||||
|
return output
|
||||||
|
# Obtain the second part of the output
|
||||||
|
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||||
|
last_stage=last_stage)
|
||||||
|
output['total_drop_due_to_issues'] = total_drop_due_to_issues
|
||||||
|
# output['critical_issues_count'] = n_critical_issues
|
||||||
|
output = {**output, **issues_dict}
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
|
||||||
|
output = dict()
|
||||||
|
stages = filter_d["events"]
|
||||||
|
# TODO: handle 1 stage alone
|
||||||
|
if len(stages) == 0:
|
||||||
|
return {"stages": [],
|
||||||
|
"criticalIssuesCount": 0}
|
||||||
|
elif len(stages) == 1:
|
||||||
|
# TODO: count sessions, and users for single stage
|
||||||
|
output["stages"] = [{
|
||||||
|
"type": stages[0]["type"],
|
||||||
|
"value": stages[0]["value"],
|
||||||
|
"sessionsCount": None,
|
||||||
|
"dropPercentage": None,
|
||||||
|
"usersCount": None
|
||||||
|
}]
|
||||||
|
return output
|
||||||
|
# The result of the multi-stage query
|
||||||
|
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||||
|
if len(rows) == 0:
|
||||||
|
# PS: not sure what to return if rows are empty
|
||||||
|
output["stages"] = [{
|
||||||
|
"type": stages[0]["type"],
|
||||||
|
"value": stages[0]["value"],
|
||||||
|
"sessionsCount": None,
|
||||||
|
"dropPercentage": None,
|
||||||
|
"usersCount": None
|
||||||
|
}]
|
||||||
|
output['criticalIssuesCount'] = 0
|
||||||
|
return output
|
||||||
|
# Obtain the first part of the output
|
||||||
|
stages_list = get_stages(stages, rows)
|
||||||
|
|
||||||
|
# Obtain the second part of the output
|
||||||
|
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||||
|
last_stage=last_stage)
|
||||||
|
|
||||||
|
output['stages'] = stages_list
|
||||||
|
output['criticalIssuesCount'] = n_critical_issues
|
||||||
|
return output
|
||||||
|
|
@ -1,17 +1,17 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
rm -rf ./chalicelib/core/alerts.py
|
rm -rf ./chalicelib/core/alerts.py
|
||||||
rm -rf ./chalicelib/core/alerts_processor.py
|
#exp rm -rf ./chalicelib/core/alerts_processor.py
|
||||||
rm -rf ./chalicelib/core/announcements.py
|
rm -rf ./chalicelib/core/announcements.py
|
||||||
rm -rf ./chalicelib/core/autocomplete.py
|
rm -rf ./chalicelib/core/autocomplete.py
|
||||||
rm -rf ./chalicelib/core/collaboration_slack.py
|
rm -rf ./chalicelib/core/collaboration_slack.py
|
||||||
rm -rf ./chalicelib/core/countries.py
|
rm -rf ./chalicelib/core/countries.py
|
||||||
rm -rf ./chalicelib/core/errors.py
|
#exp rm -rf ./chalicelib/core/errors.py
|
||||||
rm -rf ./chalicelib/core/errors_favorite.py
|
rm -rf ./chalicelib/core/errors_favorite.py
|
||||||
rm -rf ./chalicelib/core/events.py
|
#exp rm -rf ./chalicelib/core/events.py
|
||||||
rm -rf ./chalicelib/core/events_ios.py
|
rm -rf ./chalicelib/core/events_ios.py
|
||||||
rm -rf ./chalicelib/core/dashboards.py
|
#exp rm -rf ./chalicelib/core/dashboards.py
|
||||||
rm -rf ./chalicelib/core/funnels.py
|
#exp rm -rf ./chalicelib/core/funnels.py
|
||||||
rm -rf ./chalicelib/core/integration_base.py
|
rm -rf ./chalicelib/core/integration_base.py
|
||||||
rm -rf ./chalicelib/core/integration_base_issue.py
|
rm -rf ./chalicelib/core/integration_base_issue.py
|
||||||
rm -rf ./chalicelib/core/integration_github.py
|
rm -rf ./chalicelib/core/integration_github.py
|
||||||
|
|
@ -36,7 +36,7 @@ rm -rf ./chalicelib/core/sessions.py
|
||||||
rm -rf ./chalicelib/core/sessions_assignments.py
|
rm -rf ./chalicelib/core/sessions_assignments.py
|
||||||
rm -rf ./chalicelib/core/sessions_metas.py
|
rm -rf ./chalicelib/core/sessions_metas.py
|
||||||
rm -rf ./chalicelib/core/sessions_mobs.py
|
rm -rf ./chalicelib/core/sessions_mobs.py
|
||||||
rm -rf ./chalicelib/core/significance.py
|
#exp rm -rf ./chalicelib/core/significance.py
|
||||||
rm -rf ./chalicelib/core/slack.py
|
rm -rf ./chalicelib/core/slack.py
|
||||||
rm -rf ./chalicelib/core/socket_ios.py
|
rm -rf ./chalicelib/core/socket_ios.py
|
||||||
rm -rf ./chalicelib/core/sourcemaps.py
|
rm -rf ./chalicelib/core/sourcemaps.py
|
||||||
|
|
@ -78,7 +78,7 @@ rm -rf ./routers/subs/insights.py
|
||||||
rm -rf ./schemas.py
|
rm -rf ./schemas.py
|
||||||
rm -rf ./routers/subs/v1_api.py
|
rm -rf ./routers/subs/v1_api.py
|
||||||
rm -rf ./routers/subs/metrics.py
|
rm -rf ./routers/subs/metrics.py
|
||||||
rm -rf ./chalicelib/core/custom_metrics.py
|
#exp rm -rf ./chalicelib/core/custom_metrics.py
|
||||||
rm -rf ./chalicelib/core/performance_event.py
|
rm -rf ./chalicelib/core/performance_event.py
|
||||||
rm -rf ./chalicelib/core/saved_search.py
|
rm -rf ./chalicelib/core/saved_search.py
|
||||||
rm -rf ./app_alerts.py
|
rm -rf ./app_alerts.py
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue