Merge remote-tracking branch 'origin/api-v1.8.0' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-08-26 19:43:46 +01:00
commit edfdcd02ed
22 changed files with 581 additions and 74 deletions

View file

@ -52,8 +52,8 @@ def create(project_id, data: schemas.AlertSchema):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s)
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id, change)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s, %(change)s)
RETURNING *;""",
{"project_id": project_id, **data})
)
@ -75,7 +75,8 @@ def update(id, data: schemas.AlertSchema):
detection_method = %(detection_method)s,
query = %(query)s,
options = %(options)s,
series_id = %(series_id)s
series_id = %(series_id)s,
change = %(change)s
WHERE alert_id =%(id)s AND deleted_at ISNULL
RETURNING *;""",
{"id": id, **data})

View file

@ -12,7 +12,8 @@ def get_all_alerts():
(EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at,
alerts.name,
alerts.series_id,
filter
filter,
change
FROM public.alerts
LEFT JOIN metric_series USING (series_id)
INNER JOIN projects USING (project_id)

View file

@ -1,12 +1,16 @@
import decimal
import logging
from decouple import config
import schemas
from chalicelib.core import alerts_listener
from chalicelib.core import sessions, alerts
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
@ -41,7 +45,7 @@ LeftToDb = {
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.resources__missing__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE"},
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
schemas.AlertColumn.errors__4xx_5xx__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
@ -53,8 +57,9 @@ LeftToDb = {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.performance__crashes__count: {
"table": "(SELECT *, start_ts AS timestamp FROM public.sessions WHERE errors_count > 0) AS sessions",
"formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0"},
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "errors_count > 0 AND duration>0"},
schemas.AlertColumn.errors__javascript__count: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
@ -94,7 +99,8 @@ def can_check(a) -> bool:
def Build(a):
params = {"project_id": a["projectId"]}
now = TimeUTC.now()
params = {"project_id": a["projectId"], "now": now}
full_args = {}
j_s = True
if a["seriesId"] is not None:
@ -121,11 +127,12 @@ def Build(a):
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}) AS stat"""
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
{"AND sessions.start_ts <= %(now)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["options"]["change"] == schemas.AlertDetectionChangeType.change:
if a["change"] == schemas.AlertDetectionType.change:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
@ -135,7 +142,9 @@ def Build(a):
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
AND datetime<=toDateTime(%(now)s/1000)
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
@ -155,8 +164,9 @@ def Build(a):
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
* 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
sub1 = f"""{subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
@ -185,30 +195,7 @@ def process():
result = cur.fetchone()
if result["valid"]:
logging.info("Valid alert, notifying users")
notifications.append({
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,
"options": {"source": "ALERT", "sourceId": alert["alertId"],
"sourceMeta": alert["detectionMethod"],
"message": alert["options"]["message"], "projectId": alert["projectId"],
"data": {"title": alert["name"],
"limitValue": alert["query"]["right"],
"actualValue": float(result["value"]) \
if isinstance(result["value"], decimal.Decimal) \
else result["value"],
"operator": alert["query"]["operator"],
"trigger": alert["query"]["left"],
"alertId": alert["alertId"],
"detectionMethod": alert["detectionMethod"],
"currentPeriod": alert["options"]["currentPeriod"],
"previousPeriod": alert["options"]["previousPeriod"],
"createdAt": TimeUTC.now()}},
})
notifications.append(generate_notification(alert, result))
except Exception as e:
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
logging.error(str(e))
@ -220,3 +207,30 @@ def process():
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0:
alerts.process_notifications(notifications)
def generate_notification(alert, result):
return {
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,
"options": {"source": "ALERT", "sourceId": alert["alertId"],
"sourceMeta": alert["detectionMethod"],
"message": alert["options"]["message"], "projectId": alert["projectId"],
"data": {"title": alert["name"],
"limitValue": alert["query"]["right"],
"actualValue": float(result["value"]) \
if isinstance(result["value"], decimal.Decimal) \
else result["value"],
"operator": alert["query"]["operator"],
"trigger": alert["query"]["left"],
"alertId": alert["alertId"],
"detectionMethod": alert["detectionMethod"],
"currentPeriod": alert["options"]["currentPeriod"],
"previousPeriod": alert["options"]["previousPeriod"],
"createdAt": TimeUTC.now()}},
}

View file

@ -279,7 +279,7 @@ class _AlertMessageSchema(BaseModel):
value: str = Field(...)
class AlertDetectionChangeType(str, Enum):
class AlertDetectionType(str, Enum):
percent = "percent"
change = "change"
@ -290,7 +290,6 @@ class _AlertOptionSchema(BaseModel):
previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(15)
lastNotification: Optional[int] = Field(None)
renotifyInterval: Optional[int] = Field(720)
change: Optional[AlertDetectionChangeType] = Field(None)
class AlertColumn(str, Enum):
@ -339,6 +338,7 @@ class AlertDetectionMethod(str, Enum):
class AlertSchema(BaseModel):
name: str = Field(...)
detection_method: AlertDetectionMethod = Field(...)
change: Optional[AlertDetectionType] = Field(default=AlertDetectionType.change)
description: Optional[str] = Field(None)
options: _AlertOptionSchema = Field(...)
query: _AlertQuerySchema = Field(...)
@ -356,11 +356,6 @@ class AlertSchema(BaseModel):
def alert_validator(cls, values):
if values.get("query") is not None and values["query"].left == AlertColumn.custom:
assert values.get("series_id") is not None, "series_id should not be null for CUSTOM alert"
if values.get("detectionMethod") is not None \
and values["detectionMethod"] == AlertDetectionMethod.change \
and values.get("options") is not None:
assert values["options"].change is not None, \
"options.change should not be null for detection method 'change'"
return values
class Config:

1
ee/api/.gitignore vendored
View file

@ -184,6 +184,7 @@ Pipfile
/chalicelib/core/announcements.py
/chalicelib/core/autocomplete.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/countries.py
/chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
/chalicelib/core/events.py

View file

@ -26,3 +26,9 @@ if config("EXP_METRICS", cast=bool, default=False):
from . import metrics_exp as metrics
else:
from . import metrics as metrics
if config("EXP_ALERTS", cast=bool, default=False):
print(">>> Using experimental alerts")
from . import alerts_processor_exp as alerts_processor
else:
from . import alerts_processor as alerts_processor

View file

@ -12,7 +12,8 @@ def get_all_alerts():
(EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at,
alerts.name,
alerts.series_id,
filter
filter,
change
FROM public.alerts
LEFT JOIN metric_series USING (series_id)
INNER JOIN projects USING (project_id)

View file

@ -0,0 +1,224 @@
import logging
from decouple import config
import schemas
from chalicelib.core import alerts_listener, alerts_processor
from chalicelib.core import sessions, alerts
from chalicelib.utils import pg_client, ch_client, exp_ch_helper
from chalicelib.utils.TimeUTC import TimeUTC
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_event_time ,0)),0)",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__first_meaningful_paint__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__page_load_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(load_event_time ,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__dom_build_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(dom_building_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__speed_index__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(speed_index,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__page_response_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(response_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__ttfb__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(first_contentful_paint_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__time_to_render__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(visually_complete,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__image_load_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='img'"
},
schemas.AlertColumn.performance__request_load_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='fetch'"
},
schemas.AlertColumn.resources__load_time__average: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))"
},
schemas.AlertColumn.resources__missing__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "COUNT(DISTINCT url_hostpath)",
"condition": "success= FALSE AND type='img'"
},
schemas.AlertColumn.errors__4xx_5xx__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)!=2"
},
schemas.AlertColumn.errors__4xx__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==4"
},
schemas.AlertColumn.errors__5xx__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==5"
},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source='js_exception'"
},
schemas.AlertColumn.performance__crashes__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_sessions_table(timestamp)} AS sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "duration>0 AND errors_count>0"
},
schemas.AlertColumn.errors__javascript__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source='js_exception'"
},
schemas.AlertColumn.errors__backend__count: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source!='js_exception'"
},
}
def Build(a):
now = TimeUTC.now()
params = {"project_id": a["projectId"], "now": now}
full_args = {}
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part = sessions.search_query_parts_ch(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:
colDef = LeftToDb[a["query"]["left"]]
params["event_type"] = LeftToDb[a["query"]["left"]].get("eventType")
subQ = f"""SELECT {colDef["formula"]} AS value
FROM {colDef["table"](now)}
WHERE project_id = %(project_id)s
{"AND event_type=%(event_type)s" if params["event_type"] else ""}
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
q += f""" FROM ({subQ}
AND datetime>=toDateTime(%(startDate)s/1000)
AND datetime<=toDateTime(%(now)s/1000) ) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["change"] == schemas.AlertDetectionType.change:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
q += f" FROM ( {sub1} ) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} AND datetime>=toDateTime(%(startDate)s/1000)
AND datetime<=toDateTime(%(now)s/1000)"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND datetime<toDateTime(%(startDate)s/1000)
AND datetime>=toDateTime(%(timestamp_sub2)s/1000)"""
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
q += f" FROM ( {sub1} ) AS stat"
else:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
* 60 * 1000}
else:
sub1 = f"""{subQ} AND datetime>=toDateTime(%(startDate)s/1000)
AND datetime<=toDateTime(%(now)s/1000)"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND datetime<toDateTime(%(startDate)s/1000)
AND datetime>=toDateTime(%(timestamp_sub2)s/1000)"""
params["timestamp_sub2"] = TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
return q, params
def process():
notifications = []
all_alerts = alerts_listener.get_all_alerts()
with pg_client.PostgresClient() as cur, ch_client.ClickHouseClient() as ch_cur:
for alert in all_alerts:
if alert["query"]["left"] != "CUSTOM":
continue
if True or alerts_processor.can_check(alert):
logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
query, params = Build(alert)
query = ch_cur.format(query, params)
logging.debug(alert)
logging.debug(query)
try:
result = ch_cur.execute(query)
if len(result) > 0:
result = result[0]
if result["valid"]:
logging.info("Valid alert, notifying users")
notifications.append(alerts_processor.generate_notification(alert, result))
except Exception as e:
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
logging.error(str(e))
logging.error(query)
if len(notifications) > 0:
cur.execute(
cur.mogrify(f"""UPDATE public.alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0:
alerts.process_notifications(notifications)

View file

@ -361,9 +361,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
print("--------------------")
main_query = cur.format(f"""SELECT DISTINCT er.error_id,
COALESCE((SELECT TRUE
FROM final.user_viewed_errors AS ve
WHERE er.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM {exp_ch_helper.get_user_viewed_errors_table()} AS ve
WHERE er.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
{query_part};""", full_args)
elif count_only:
@ -1252,9 +1252,9 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
"isNotNull(s.duration)"
]
if favorite_only:
extra_constraints.append("""s.session_id IN (SELECT session_id
FROM final.user_favorite_sessions
WHERE user_id = %(userId)s)""")
extra_constraints.append(f"""s.session_id IN (SELECT session_id
FROM {exp_ch_helper.get_user_favorite_sessions_table()} AS user_favorite_sessions
WHERE user_id = %(userId)s)""")
extra_from = ""
events_query_part = ""
__events_where_basic = ["project_id = %(projectId)s",
@ -1526,7 +1526,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
"main.datetime >= toDateTime(%(startDate)s/1000)",
"main.datetime <= toDateTime(%(endDate)s/1000)"]
if favorite_only and not errors_only:
event_from += "INNER JOIN final.user_favorite_sessions AS fs USING(session_id)"
event_from += f"INNER JOIN {exp_ch_helper.get_user_favorite_sessions_table()} AS fs USING(session_id)"
event_where.append("fs.user_id = %(userId)s")
# else:
# event_from = "%s"
@ -1940,9 +1940,9 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
else:
events_extra_join = f"LEFT JOIN ({events_extra_join}) AS main1 USING(error_id)"
if favorite_only and user_id is not None:
events_conditions_where.append("""main.session_id IN (SELECT session_id
FROM final.user_favorite_sessions
WHERE user_id = %(userId)s)""")
events_conditions_where.append(f"""main.session_id IN (SELECT session_id
FROM {exp_ch_helper.get_user_favorite_sessions_table()} AS user_favorite_sessions
WHERE user_id = %(userId)s)""")
if data.events_order in [schemas.SearchEventOrder._then, schemas.SearchEventOrder._and]:
sequence_pattern = [f'(?{i + 1}){c.get("time", "")}' for i, c in enumerate(events_conditions)]
@ -2067,8 +2067,8 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
extra_from += """INNER JOIN (SELECT 1 AS session_id) AS favorite_sessions
ON (TRUE)"""
elif not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT session_id
FROM final.user_favorite_sessions
extra_from += f"""LEFT JOIN (SELECT session_id
FROM {exp_ch_helper.get_user_favorite_sessions_table()} AS user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
ON (s.session_id=favorite_sessions.session_id)"""
extra_join = ""

View file

@ -21,7 +21,7 @@ def get_main_sessions_table(timestamp):
def get_main_resources_table(timestamp):
return "experimental.resources_l7s_mv" \
return "experimental.resources_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources"

View file

@ -5,6 +5,7 @@ rm -rf ./chalicelib/core/alerts_processor.py
rm -rf ./chalicelib/core/announcements.py
rm -rf ./chalicelib/core/autocomplete.py
rm -rf ./chalicelib/core/collaboration_slack.py
rm -rf ./chalicelib/core/countries.py
rm -rf ./chalicelib/core/errors.py
rm -rf ./chalicelib/core/errors_favorite.py
rm -rf ./chalicelib/core/events.py

View file

@ -65,4 +65,5 @@ EXP_SESSIONS_SEARCH=true
EXP_AUTOCOMPLETE=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_7D_MV=true
EXP_7D_MV=true
EXP_ALERTS=true

View file

@ -1,10 +1,67 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
event_type,
datetime,
label,
hesitation_time,
name,
payload,
level,
source,
message,
error_id,
duration,
context,
container_type,
container_id,
container_name,
container_src,
url,
url_host,
url_path,
url_hostpath,
request_start,
response_start,
response_end,
dom_content_loaded_event_start,
dom_content_loaded_event_end,
load_event_start,
load_event_end,
first_paint,
first_contentful_paint_time,
speed_index,
visually_complete,
time_to_interactive,
ttfb,
ttlb,
response_time,
dom_building_time,
dom_content_loaded_event_time,
load_event_time,
min_fps,
avg_fps,
max_fps,
min_cpu,
avg_cpu,
max_cpu,
min_total_js_heap_size,
avg_total_js_heap_size,
max_total_js_heap_size,
min_used_js_heap_size,
avg_used_js_heap_size,
max_used_js_heap_size,
method,
status,
success,
request_body,
response_body,
_timestamp
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -1,10 +1,26 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
ORDER BY (project_id, datetime, type, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
datetime,
url,
url_host,
url_path,
url_hostpath,
type,
name,
duration,
ttfb,
header_size,
encoded_body_size,
decoded_body_size,
compression_ratio,
success,
_timestamp
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -6,7 +6,41 @@ CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
tracker_version,
rev_id,
user_uuid,
user_os,
user_os_version,
user_browser,
user_browser_version,
user_device,
user_device_type,
user_country,
datetime,
duration,
pages_count,
events_count,
errors_count,
utm_source,
utm_medium,
utm_campaign,
user_id,
metadata_1,
metadata_2,
metadata_3,
metadata_4,
metadata_5,
metadata_6,
metadata_7,
metadata_8,
metadata_9,
metadata_10,
issue_types,
referrer,
base_referrer,
_timestamp
FROM experimental.sessions
WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)

View file

@ -1,10 +1,67 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
event_type,
datetime,
label,
hesitation_time,
name,
payload,
level,
source,
message,
error_id,
duration,
context,
container_type,
container_id,
container_name,
container_src,
url,
url_host,
url_path,
url_hostpath,
request_start,
response_start,
response_end,
dom_content_loaded_event_start,
dom_content_loaded_event_end,
load_event_start,
load_event_end,
first_paint,
first_contentful_paint_time,
speed_index,
visually_complete,
time_to_interactive,
ttfb,
ttlb,
response_time,
dom_building_time,
dom_content_loaded_event_time,
load_event_time,
min_fps,
avg_fps,
max_fps,
min_cpu,
avg_cpu,
max_cpu,
min_total_js_heap_size,
avg_total_js_heap_size,
max_total_js_heap_size,
min_used_js_heap_size,
avg_used_js_heap_size,
max_used_js_heap_size,
method,
status,
success,
request_body,
response_body,
_timestamp
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -1,10 +1,26 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
ORDER BY (project_id, datetime, type, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
datetime,
url,
url_host,
url_path,
url_hostpath,
type,
name,
duration,
ttfb,
header_size,
encoded_body_size,
decoded_body_size,
compression_ratio,
success,
_timestamp
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -6,7 +6,41 @@ CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
SELECT session_id,
project_id,
tracker_version,
rev_id,
user_uuid,
user_os,
user_os_version,
user_browser,
user_browser_version,
user_device,
user_device_type,
user_country,
datetime,
duration,
pages_count,
events_count,
errors_count,
utm_source,
utm_medium,
utm_campaign,
user_id,
metadata_1,
metadata_2,
metadata_3,
metadata_4,
metadata_5,
metadata_6,
metadata_7,
metadata_8,
metadata_9,
metadata_10,
issue_types,
referrer,
base_referrer,
_timestamp
FROM experimental.sessions
WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)

View file

@ -9,6 +9,25 @@ ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS sessions_last_check_at timestamp without time zone NULL DEFAULT NULL;
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'alert_change_type') THEN
CREATE TYPE alert_change_type AS ENUM ('percent', 'change');
END IF;
END;
$$
LANGUAGE plpgsql;
ALTER TABLE IF EXISTS alerts
ADD COLUMN IF NOT EXISTS change alert_change_type NOT NULL DEFAULT 'change';
COMMIT;
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_unique_project_id_md5value_type_idx ON autocomplete (project_id, md5(value), type);

View file

@ -819,6 +819,13 @@ $$
WHERE typ.typname = 'alert_detection_method') THEN
CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
END IF;
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'alert_detection_method') THEN
CREATE TYPE alert_change_type AS ENUM ('percent', 'change');
END IF;
CREATE TABLE IF NOT EXISTS alerts
(
alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
@ -828,6 +835,7 @@ $$
description text NULL DEFAULT NULL,
active boolean NOT NULL DEFAULT TRUE,
detection_method alert_detection_method NOT NULL,
change alert_change_type NOT NULL DEFAULT 'change',
query jsonb NOT NULL,
deleted_at timestamp NULL DEFAULT NULL,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),

View file

@ -9,6 +9,25 @@ ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS sessions_last_check_at timestamp without time zone NULL DEFAULT NULL;
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'alert_change_type') THEN
CREATE TYPE alert_change_type AS ENUM ('percent', 'change');
END IF;
END;
$$
LANGUAGE plpgsql;
ALTER TABLE IF EXISTS alerts
ADD COLUMN IF NOT EXISTS change alert_change_type NOT NULL DEFAULT 'change';
COMMIT;
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_unique_project_id_md5value_type_idx ON autocomplete (project_id, md5(value), type);

View file

@ -967,6 +967,7 @@ $$
CREATE INDEX searches_project_id_idx ON public.searches (project_id);
CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
CREATE TYPE alert_change_type AS ENUM ('percent', 'change');
CREATE TABLE alerts
(
@ -977,6 +978,7 @@ $$
description text NULL DEFAULT NULL,
active boolean NOT NULL DEFAULT TRUE,
detection_method alert_detection_method NOT NULL,
change alert_change_type NOT NULL DEFAULT 'change',
query jsonb NOT NULL,
deleted_at timestamp NULL DEFAULT NULL,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),