feat(api): new alerts detection for change(change&percent)

feat(api): old alerts detection for change(change&percent)
feat(api): custom metrics new endpoints
feat(api): custom metrics return series list for alerts
feat(DB): alerts-change-change fix
This commit is contained in:
Taha Yassine Kraiem 2022-01-08 15:08:12 +01:00
parent 55a86d76f2
commit 6b14f13e53
7 changed files with 154 additions and 68 deletions

View file

@ -1,7 +1,7 @@
import schemas
from chalicelib.core import sessions
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core import sessions
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
@ -53,10 +53,10 @@ LeftToDb = {
"formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0"},
schemas.AlertColumn.errors__javascript__count: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'"},
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
schemas.AlertColumn.errors__backend__count: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'"},
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
}
# This is the frequency of execution for each threshold
@ -91,17 +91,19 @@ def can_check(a) -> bool:
def Build(a):
params = {"project_id": a["projectId"]}
full_args={}
full_args = {}
j_s = True
if a["seriesId"] is not None:
a["filter"]["sort"]="session_id"
a["filter"]["order"]="DESC"
a["filter"]["startDate"]=-1
a["filter"]["endDate"]=TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
subQ=f"""SELECT COUNT(session_id) AS value
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:
colDef = LeftToDb[a["query"]["left"]]
@ -109,6 +111,9 @@ def Build(a):
FROM {colDef["table"]}
WHERE project_id = %(project_id)s
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
j_s = colDef.get("joinSessions", True)
print(">>>>>>>>>>>>>")
print(j_s)
# q = sq.Select(fmt.Sprint("value, coalesce(value,0)", a.Query.Operator, a.Query.Right, " AS valid"))
q = f"""SELECT value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
@ -119,59 +124,73 @@ def Build(a):
# }
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["seriesId"]is not None:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s
AND sessions.start_ts>=%(startDate)s) AS stat"""
params = {**params, **full_args,"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
pass
# if a.Options.Change == "change" :
# if len(colDef.group) == 0 :
# pass
# # sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60)).ToSql()
# # sub2, args2, _ := subQ.Where(
# # sq.And{
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
# # }).ToSql()
# # sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")-(" + sub2 + ")) AS value").ToSql()
# # q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...)
# else:
# pass
# # subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# # sub2, args2, _ := subQ.Where(
# # sq.And{
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
# # }).ToSql()
# # sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# # q = q.FromSelect(sub1, "stat")
#
# elif a.Options.Change == "percent":
# # if len(colDef.group) == 0 {
# # sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60)).ToSql()
# # sub2, args2, _ := subQ.Where(
# # sq.And{
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
# # }).ToSql()
# # sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")/(" + sub2 + ")-1)*100 AS value").ToSql()
# # q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...)
# # } else {
#
# pass
# # subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# # sub2, args2, _ := subQ.Where(
# # sq.And{
# # sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# # sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
# # }).ToSql()
# # sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# # q = q.FromSelect(sub1, "stat")
# else:
# return q, errors.New("unsupported change method")
if a["options"]["change"] == schemas.AlertDetectionChangeType.change:
# if len(colDef.group) > 0:
# subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# sub2, args2, _ := subQ.Where(
# sq.And{
# sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
# }).ToSql()
# sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# q = q.FromSelect(sub1, "stat")
# else:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
q += f" FROM ( {sub1} ) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
q += f" FROM ( {sub1} ) AS stat"
else:
# if len(colDef.group) >0 {
# subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# sub2, args2, _ := subQ.Where(
# sq.And{
# sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
# }).ToSql()
# sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# q = q.FromSelect(sub1, "stat")
# } else {
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
* 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
return q, params
@ -195,7 +214,6 @@ def process():
AND projects.active
AND projects.deleted_at ISNULL
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
AND alert_id=36
ORDER BY alerts.created_at;"""
cur.execute(query=query)
all_alerts = helper.list_to_camel_case(cur.fetchall())
@ -203,6 +221,10 @@ def process():
if True or can_check(alert):
print(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
query, params = Build(alert)
print(cur.mogrify(query, params))
print("----------------------")
query = cur.mogrify(query, params)
# print(alert)
# print(query)
cur.execute(query)
result = cur.fetchone()
if result["valid"]:
print("Valid alert, notifying users")

View file

@ -201,3 +201,25 @@ def get(metric_id, project_id, user_id):
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT metric_id,
series_id,
metrics.name AS metric_name,
metric_series.name AS series_name,
index AS series_index
FROM metric_series
INNER JOIN metrics USING (metric_id)
WHERE metrics.deleted_at ISNULL
AND metrics.project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
ORDER BY metric_name, series_index, series_name;""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -86,7 +86,8 @@ class PostgresClient:
else:
raise error
finally:
postgreSQL_pool.putconn(self.connection)
if not self.long_query:
postgreSQL_pool.putconn(self.connection)
def close():

View file

@ -1097,6 +1097,19 @@ def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsSchema = Bod
return {"data": custom_metrics.try_live(project_id=projectId, data=data)}
@app.post('/{projectId}/custom_metrics/chart', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, data: schemas.CustomMetricChartPayloadSchema2 = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=data.metric_id,
data=data)}
@app.get('/{projectId}/custom_metrics/series', tags=["customMetrics"])
def get_series_for_alert(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_series_for_alert(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),

View file

@ -276,12 +276,18 @@ class _AlertMessageSchema(BaseModel):
value: str = Field(...)
class AlertDetectionChangeType(str, Enum):
percent = "percent"
change = "change"
class _AlertOptionSchema(BaseModel):
message: List[_AlertMessageSchema] = Field([])
currentPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(...)
previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(15)
lastNotification: Optional[int] = Field(None)
renotifyInterval: Optional[int] = Field(720)
change: Optional[AlertDetectionChangeType] = Field(None)
class AlertColumn(str, Enum):
@ -339,8 +345,16 @@ class AlertSchema(BaseModel):
def alert_validator(cls, values):
if values.get("query") is not None and values["query"].left == AlertColumn.custom:
assert values.get("series_id") is not None, "series_id should not be null for CUSTOM alert"
if values.get("detectionMethod") is not None \
and values["detectionMethod"] == AlertDetectionMethod.change \
and values.get("options") is not None:
assert values["options"].change is not None, \
"options.change should not be null for detection method 'change'"
return values
class Config:
alias_generator = attribute_to_camel_case
class SourcemapUploadPayloadSchema(BaseModel):
urls: List[str] = Field(..., alias="URL")
@ -628,6 +642,10 @@ class CustomMetricChartPayloadSchema(BaseModel):
alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema2(CustomMetricChartPayloadSchema):
metric_id: int = Field(...)
class TryCustomMetricsSchema(CreateCustomMetricsSchema, CustomMetricChartPayloadSchema):
name: Optional[str] = Field(None)
@ -635,6 +653,9 @@ class TryCustomMetricsSchema(CreateCustomMetricsSchema, CustomMetricChartPayload
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
series_id: Optional[int] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)

View file

@ -125,4 +125,8 @@ ALTER TABLE alerts
ADD COLUMN series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE;
CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
UPDATE alerts
SET options=jsonb_set(options, '{change}', '"change"')
WHERE detection_method = 'change'
AND options -> 'change' ISNULL;
COMMIT;

View file

@ -77,5 +77,8 @@ ALTER TABLE alerts
ADD COLUMN series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE;
CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
UPDATE alerts
SET options=jsonb_set(options, '{change}', '"change"')
WHERE detection_method = 'change'
AND options -> 'change' ISNULL;
COMMIT;