feat(api): new alerts detection for threshold only
feat(api): old alerts detection for threshold only feat(DB): alerts changes feat(DB): alerts indexes
This commit is contained in:
parent
6b58a93fd5
commit
55a86d76f2
9 changed files with 331 additions and 84 deletions
|
|
@ -3,11 +3,13 @@ import time
|
|||
|
||||
from fastapi import BackgroundTasks
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import notifications, slack, webhook
|
||||
from chalicelib.utils import pg_client, helper, email_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"]
|
||||
|
||||
# ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"]
|
||||
|
||||
|
||||
def get(id):
|
||||
|
|
@ -38,32 +40,32 @@ def get_all(project_id):
|
|||
return all
|
||||
|
||||
|
||||
SUPPORTED_THRESHOLD = [15, 30, 60, 120, 240, 1440]
|
||||
|
||||
|
||||
def __transform_structure(data):
|
||||
if data.get("options") is None:
|
||||
return f"Missing 'options'", None
|
||||
if data["options"].get("currentPeriod") not in SUPPORTED_THRESHOLD:
|
||||
return f"Unsupported currentPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
if data["options"].get("previousPeriod", 15) not in SUPPORTED_THRESHOLD:
|
||||
return f"Unsupported previousPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
if data["options"].get("renotifyInterval") is None:
|
||||
data["options"]["renotifyInterval"] = 720
|
||||
data["query"]["right"] = float(data["query"]["right"])
|
||||
data["query"] = json.dumps(data["query"])
|
||||
data["description"] = data["description"] if data.get("description") is not None and len(
|
||||
data["description"]) > 0 else None
|
||||
if data.get("options"):
|
||||
messages = []
|
||||
for m in data["options"].get("message", []):
|
||||
if m.get("value") is None:
|
||||
continue
|
||||
m["value"] = str(m["value"])
|
||||
messages.append(m)
|
||||
data["options"]["message"] = messages
|
||||
data["options"] = json.dumps(data["options"])
|
||||
return None, data
|
||||
# SUPPORTED_THRESHOLD = [15, 30, 60, 120, 240, 1440]
|
||||
#
|
||||
#
|
||||
# def __transform_structure(data):
|
||||
# if data.get("options") is None:
|
||||
# return f"Missing 'options'", None
|
||||
# if data["options"].get("currentPeriod") not in SUPPORTED_THRESHOLD:
|
||||
# return f"Unsupported currentPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
# if data["options"].get("previousPeriod", 15) not in SUPPORTED_THRESHOLD:
|
||||
# return f"Unsupported previousPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
# if data["options"].get("renotifyInterval") is None:
|
||||
# data["options"]["renotifyInterval"] = 720
|
||||
# data["query"]["right"] = float(data["query"]["right"])
|
||||
# data["query"] = json.dumps(data["query"])
|
||||
# data["description"] = data["description"] if data.get("description") is not None and len(
|
||||
# data["description"]) > 0 else None
|
||||
# if data.get("options"):
|
||||
# messages = []
|
||||
# for m in data["options"].get("message", []):
|
||||
# if m.get("value") is None:
|
||||
# continue
|
||||
# m["value"] = str(m["value"])
|
||||
# messages.append(m)
|
||||
# data["options"]["message"] = messages
|
||||
# data["options"] = json.dumps(data["options"])
|
||||
# return None, data
|
||||
|
||||
|
||||
def __process_circular(alert):
|
||||
|
|
@ -74,15 +76,16 @@ def __process_circular(alert):
|
|||
return alert
|
||||
|
||||
|
||||
def create(project_id, data):
|
||||
err, data = __transform_structure(data)
|
||||
if err is not None:
|
||||
return {"errors": [err]}
|
||||
def create(project_id, data: schemas.AlertSchema):
|
||||
data = data.dict()
|
||||
data["query"] = json.dumps(data["query"])
|
||||
data["options"] = json.dumps(data["options"])
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options)
|
||||
VALUES (%(project_id)s, %(name)s, %(description)s, %(detectionMethod)s, %(query)s, %(options)s::jsonb)
|
||||
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id)
|
||||
VALUES (%(project_id)s, %(name)s, %(description)s, %(detectionMethod)s, %(query)s, %(options)s::jsonb, %(series_id)s)
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, **data})
|
||||
)
|
||||
|
|
@ -90,23 +93,33 @@ def create(project_id, data):
|
|||
return {"data": helper.dict_to_camel_case(__process_circular(a))}
|
||||
|
||||
|
||||
def update(id, changes):
|
||||
changes = {k: changes[k] for k in changes.keys() if k in ALLOW_UPDATE}
|
||||
err, changes = __transform_structure(changes)
|
||||
if err is not None:
|
||||
return {"errors": [err]}
|
||||
updateq = []
|
||||
for k in changes.keys():
|
||||
updateq.append(f"{helper.key_to_snake_case(k)} = %({k})s")
|
||||
if len(updateq) == 0:
|
||||
return {"errors": ["nothing to update"]}
|
||||
def update(id, data: schemas.AlertSchema):
|
||||
data = data.dict()
|
||||
data["query"] = json.dumps(data["query"])
|
||||
data["options"] = json.dumps(data["options"])
|
||||
|
||||
# changes = {k: changes[k] for k in changes.keys() if k in ALLOW_UPDATE}
|
||||
# err, changes = __transform_structure(changes)
|
||||
# if err is not None:
|
||||
# return {"errors": [err]}
|
||||
# updateq = []
|
||||
# for k in changes.keys():
|
||||
# updateq.append(f"{helper.key_to_snake_case(k)} = %({k})s")
|
||||
# if len(updateq) == 0:
|
||||
# return {"errors": ["nothing to update"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
query = cur.mogrify("""\
|
||||
UPDATE public.alerts
|
||||
SET {", ".join(updateq)}
|
||||
SET name = %(name)s,
|
||||
description = %(description)s,
|
||||
active = %(active)s,
|
||||
detection_method = %(detection_method)s,
|
||||
query = %(query)s,
|
||||
options = %(options)s,
|
||||
series_id = %(series_id)s
|
||||
WHERE alert_id =%(id)s AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"id": id, **changes})
|
||||
{"id": id, **data})
|
||||
cur.execute(query=query)
|
||||
a = helper.dict_to_camel_case(cur.fetchone())
|
||||
return {"data": __process_circular(a)}
|
||||
|
|
|
|||
208
api/chalicelib/core/alerts_processor.py
Normal file
208
api/chalicelib/core/alerts_processor.py
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
import schemas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.core import sessions
|
||||
|
||||
LeftToDb = {
|
||||
schemas.AlertColumn.performance__dom_content_loaded__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
|
||||
schemas.AlertColumn.performance__first_meaningful_paint__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
|
||||
schemas.AlertColumn.performance__page_load_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
|
||||
schemas.AlertColumn.performance__dom_build_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(dom_building_time,0))"},
|
||||
schemas.AlertColumn.performance__speed_index__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
|
||||
schemas.AlertColumn.performance__page_response_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(response_time,0))"},
|
||||
schemas.AlertColumn.performance__ttfb__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(first_paint_time,0))"},
|
||||
schemas.AlertColumn.performance__time_to_render__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(visually_complete,0))"},
|
||||
schemas.AlertColumn.performance__image_load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
|
||||
schemas.AlertColumn.performance__request_load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
|
||||
schemas.AlertColumn.resources__load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))"},
|
||||
schemas.AlertColumn.resources__missing__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE"},
|
||||
schemas.AlertColumn.errors__4xx_5xx__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
|
||||
"condition": "status/100!=2"},
|
||||
schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(session_id)", "condition": "status/100=4"},
|
||||
schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(session_id)", "condition": "status/100=5"},
|
||||
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
|
||||
schemas.AlertColumn.performance__crashes__count: {
|
||||
"table": "(SELECT *, start_ts AS timestamp FROM public.sessions WHERE errors_count > 0) AS sessions",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0"},
|
||||
schemas.AlertColumn.errors__javascript__count: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'"},
|
||||
schemas.AlertColumn.errors__backend__count: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'"},
|
||||
}
|
||||
|
||||
# This is the frequency of execution for each threshold
|
||||
TimeInterval = {
|
||||
15: 3,
|
||||
30: 5,
|
||||
60: 10,
|
||||
120: 20,
|
||||
240: 30,
|
||||
1440: 60,
|
||||
}
|
||||
|
||||
|
||||
def can_check(a) -> bool:
|
||||
now = TimeUTC.now()
|
||||
|
||||
repetitionBase = a["options"]["currentPeriod"] \
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
|
||||
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
print(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
a["options"].get("lastNotification") is None or
|
||||
a["options"]["lastNotification"] <= 0 or
|
||||
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||
|
||||
|
||||
def Build(a):
|
||||
params = {"project_id": a["projectId"]}
|
||||
full_args={}
|
||||
if a["seriesId"] is not None:
|
||||
a["filter"]["sort"]="session_id"
|
||||
a["filter"]["order"]="DESC"
|
||||
a["filter"]["startDate"]=-1
|
||||
a["filter"]["endDate"]=TimeUTC.now()
|
||||
full_args, query_part, sort = sessions.search_query_parts(data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
|
||||
error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=a["projectId"],
|
||||
user_id=None)
|
||||
subQ=f"""SELECT COUNT(session_id) AS value
|
||||
{query_part}"""
|
||||
else:
|
||||
colDef = LeftToDb[a["query"]["left"]]
|
||||
subQ = f"""SELECT {colDef["formula"]} AS value
|
||||
FROM {colDef["table"]}
|
||||
WHERE project_id = %(project_id)s
|
||||
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
|
||||
# q = sq.Select(fmt.Sprint("value, coalesce(value,0)", a.Query.Operator, a.Query.Right, " AS valid"))
|
||||
q = f"""SELECT value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
|
||||
|
||||
# if len(colDef.group) > 0 {
|
||||
# subQ = subQ.Column(colDef.group + " AS group_value")
|
||||
# subQ = subQ.GroupBy(colDef.group)
|
||||
# q = q.Column("group_value")
|
||||
# }
|
||||
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
|
||||
if a["seriesId"]is not None:
|
||||
q += f""" FROM ({subQ}) AS stat"""
|
||||
else:
|
||||
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s
|
||||
AND sessions.start_ts>=%(startDate)s) AS stat"""
|
||||
params = {**params, **full_args,"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
|
||||
else:
|
||||
pass
|
||||
# if a.Options.Change == "change" :
|
||||
# if len(colDef.group) == 0 :
|
||||
# pass
|
||||
# # sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60)).ToSql()
|
||||
# # sub2, args2, _ := subQ.Where(
|
||||
# # sq.And{
|
||||
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
|
||||
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
|
||||
# # }).ToSql()
|
||||
# # sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")-(" + sub2 + ")) AS value").ToSql()
|
||||
# # q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...)
|
||||
# else:
|
||||
# pass
|
||||
# # subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
|
||||
# # sub2, args2, _ := subQ.Where(
|
||||
# # sq.And{
|
||||
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
|
||||
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
|
||||
# # }).ToSql()
|
||||
# # sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
|
||||
# # q = q.FromSelect(sub1, "stat")
|
||||
#
|
||||
# elif a.Options.Change == "percent":
|
||||
# # if len(colDef.group) == 0 {
|
||||
# # sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60)).ToSql()
|
||||
# # sub2, args2, _ := subQ.Where(
|
||||
# # sq.And{
|
||||
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
|
||||
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
|
||||
# # }).ToSql()
|
||||
# # sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")/(" + sub2 + ")-1)*100 AS value").ToSql()
|
||||
# # q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...)
|
||||
# # } else {
|
||||
#
|
||||
# pass
|
||||
# # subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
|
||||
# # sub2, args2, _ := subQ.Where(
|
||||
# # sq.And{
|
||||
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
|
||||
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
|
||||
# # }).ToSql()
|
||||
# # sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
|
||||
# # q = q.FromSelect(sub1, "stat")
|
||||
# else:
|
||||
# return q, errors.New("unsupported change method")
|
||||
|
||||
return q, params
|
||||
|
||||
|
||||
def process():
|
||||
with pg_client.PostgresClient(long_query=True) as cur:
|
||||
query = """SELECT alert_id,
|
||||
project_id,
|
||||
detection_method,
|
||||
query,
|
||||
options,
|
||||
(EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at,
|
||||
alerts.name,
|
||||
alerts.series_id,
|
||||
filter
|
||||
FROM public.alerts
|
||||
LEFT JOIN metric_series USING (series_id)
|
||||
INNER JOIN projects USING (project_id)
|
||||
WHERE alerts.deleted_at ISNULL
|
||||
AND alerts.active
|
||||
AND projects.active
|
||||
AND projects.deleted_at ISNULL
|
||||
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
|
||||
AND alert_id=36
|
||||
ORDER BY alerts.created_at;"""
|
||||
cur.execute(query=query)
|
||||
all_alerts = helper.list_to_camel_case(cur.fetchall())
|
||||
for alert in all_alerts:
|
||||
if True or can_check(alert):
|
||||
print(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
|
||||
query, params = Build(alert)
|
||||
print(cur.mogrify(query, params))
|
||||
print("----------------------")
|
||||
|
||||
|
|
@ -613,7 +613,7 @@ def errors_merge(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
@app.put('/{projectId}/alerts', tags=["alerts"])
|
||||
def create_alert(projectId: int, data: schemas.AlertSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return alerts.create(projectId, data.dict())
|
||||
return alerts.create(projectId, data)
|
||||
|
||||
|
||||
@app.get('/{projectId}/alerts', tags=["alerts"])
|
||||
|
|
@ -630,7 +630,7 @@ def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = De
|
|||
@app.put('/{projectId}/alerts/{alertId}', tags=["alerts"])
|
||||
def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return alerts.update(alertId, data.dict())
|
||||
return alerts.update(alertId, data)
|
||||
|
||||
|
||||
@app.delete('/{projectId}/alerts/{alertId}', tags=["alerts"])
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from enum import Enum
|
||||
from typing import Optional, List, Union
|
||||
from typing import Optional, List, Union, Literal
|
||||
|
||||
from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator
|
||||
|
||||
|
|
@ -278,8 +278,8 @@ class _AlertMessageSchema(BaseModel):
|
|||
|
||||
class _AlertOptionSchema(BaseModel):
|
||||
message: List[_AlertMessageSchema] = Field([])
|
||||
currentPeriod: int = Field(...)
|
||||
previousPeriod: int = Field(...)
|
||||
currentPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(...)
|
||||
previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(15)
|
||||
lastNotification: Optional[int] = Field(None)
|
||||
renotifyInterval: Optional[int] = Field(720)
|
||||
|
||||
|
|
@ -304,6 +304,7 @@ class AlertColumn(str, Enum):
|
|||
performance__crashes__count = "performance.crashes.count"
|
||||
errors__javascript__count = "errors.javascript.count"
|
||||
errors__backend__count = "errors.backend.count"
|
||||
custom = "CUSTOM"
|
||||
|
||||
|
||||
class MathOperator(str, Enum):
|
||||
|
|
@ -321,12 +322,24 @@ class _AlertQuerySchema(BaseModel):
|
|||
operator: MathOperator = Field(...)
|
||||
|
||||
|
||||
class AlertDetectionMethod(str, Enum):
|
||||
threshold = "threshold"
|
||||
change = "change"
|
||||
|
||||
|
||||
class AlertSchema(BaseModel):
|
||||
name: str = Field(...)
|
||||
detectionMethod: str = Field(...)
|
||||
detectionMethod: AlertDetectionMethod = Field(...)
|
||||
description: Optional[str] = Field(None)
|
||||
options: _AlertOptionSchema = Field(...)
|
||||
query: _AlertQuerySchema = Field(...)
|
||||
series_id: Optional[int] = Field(None)
|
||||
|
||||
@root_validator
|
||||
def alert_validator(cls, values):
|
||||
if values.get("query") is not None and values["query"].left == AlertColumn.custom:
|
||||
assert values.get("series_id") is not None, "series_id should not be null for CUSTOM alert"
|
||||
return values
|
||||
|
||||
|
||||
class SourcemapUploadPayloadSchema(BaseModel):
|
||||
|
|
@ -451,7 +464,7 @@ class _SessionSearchEventRaw(BaseModel):
|
|||
source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
|
||||
|
||||
@root_validator
|
||||
def check_card_number_omitted(cls, values):
|
||||
def event_validator(cls, values):
|
||||
if isinstance(values.get("type"), PerformanceEventType):
|
||||
if values.get("type") == PerformanceEventType.fetch_failed:
|
||||
return values
|
||||
|
|
@ -484,7 +497,7 @@ class _SessionSearchFilterSchema(BaseModel):
|
|||
source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
|
||||
|
||||
@root_validator
|
||||
def check_card_number_omitted(cls, values):
|
||||
def filter_validator(cls, values):
|
||||
if values.get("type") == FilterType.issue:
|
||||
for v in values.get("value"):
|
||||
assert isinstance(v, IssueType), f"value should be of type IssueType for {values.get('type')} filter"
|
||||
|
|
|
|||
3
ee/api/.gitignore
vendored
3
ee/api/.gitignore
vendored
|
|
@ -258,3 +258,6 @@ Pipfile
|
|||
/schemas.py
|
||||
/chalicelib/blueprints/app/v1_api.py
|
||||
/routers/app/v1_api.py
|
||||
/chalicelib/core/custom_metrics.py
|
||||
/chalicelib/core/performance_event.py
|
||||
/chalicelib/core/saved_search.py
|
||||
|
|
|
|||
|
|
@ -119,5 +119,10 @@ CREATE TABLE searches
|
|||
|
||||
CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
|
||||
CREATE INDEX IF NOT EXISTS alerts_project_id_idx ON alerts (project_id);
|
||||
|
||||
ALTER TABLE alerts
|
||||
ADD COLUMN series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
|
||||
COMMIT;
|
||||
|
|
@ -287,34 +287,6 @@ $$
|
|||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE notify_project();
|
||||
|
||||
-- --- alerts.sql ---
|
||||
|
||||
CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
|
||||
|
||||
CREATE TABLE alerts
|
||||
(
|
||||
alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text NULL DEFAULT NULL,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
detection_method alert_detection_method NOT NULL,
|
||||
query jsonb NOT NULL,
|
||||
deleted_at timestamp NULL DEFAULT NULL,
|
||||
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
|
||||
options jsonb NOT NULL DEFAULT '{
|
||||
"renotifyInterval": 1440
|
||||
}'::jsonb
|
||||
);
|
||||
|
||||
|
||||
CREATE TRIGGER on_insert_or_update_or_delete
|
||||
AFTER INSERT OR UPDATE OR DELETE
|
||||
ON alerts
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE notify_alert();
|
||||
|
||||
|
||||
-- --- webhooks.sql ---
|
||||
|
||||
create type webhook_type as enum ('webhook', 'slack', 'email');
|
||||
|
|
@ -1013,6 +985,32 @@ $$
|
|||
CREATE INDEX searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX searches_project_id_idx ON public.searches (project_id);
|
||||
|
||||
CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
|
||||
|
||||
CREATE TABLE alerts
|
||||
(
|
||||
alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text NULL DEFAULT NULL,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
detection_method alert_detection_method NOT NULL,
|
||||
query jsonb NOT NULL,
|
||||
deleted_at timestamp NULL DEFAULT NULL,
|
||||
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
|
||||
options jsonb NOT NULL DEFAULT '{
|
||||
"renotifyInterval": 1440
|
||||
}'::jsonb
|
||||
);
|
||||
CREATE INDEX alerts_project_id_idx ON alerts (project_id);
|
||||
CREATE INDEX alerts_project_id_idx ON alerts (series_id);
|
||||
CREATE TRIGGER on_insert_or_update_or_delete
|
||||
AFTER INSERT OR UPDATE OR DELETE
|
||||
ON alerts
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE notify_alert();
|
||||
|
||||
raise notice 'DB created';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
|
|
@ -71,5 +71,11 @@ CREATE TABLE searches
|
|||
|
||||
CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
|
||||
CREATE INDEX IF NOT EXISTS alerts_project_id_idx ON alerts (project_id);
|
||||
|
||||
ALTER TABLE alerts
|
||||
ADD COLUMN series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -271,6 +271,7 @@ $$
|
|||
(
|
||||
alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text NULL DEFAULT NULL,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
|
|
@ -282,8 +283,8 @@ $$
|
|||
"renotifyInterval": 1440
|
||||
}'::jsonb
|
||||
);
|
||||
|
||||
|
||||
CREATE INDEX alerts_project_id_idx ON alerts (project_id);
|
||||
CREATE INDEX alerts_series_id_idx ON alerts (series_id);
|
||||
CREATE TRIGGER on_insert_or_update_or_delete
|
||||
AFTER INSERT OR UPDATE OR DELETE
|
||||
ON alerts
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue