* feat(api): dynamic-api 1/2
* feat(api): dynamic-api 2/2
feat(api): core-api 1/2
* feat(api): changed schemas
feat(api): aipkey authorizer
feat(api): jwt authorizer payload
feat(api): core-api 2/3
* feat(api): apikey authorizer
feat(api): shared context
feat(api): response editor
feat(api): middleware
feat(api): custom router
feat(api): fix auth double call
* feat(api): dashboard
feat(api): insights
feat(api): public api v1
* feat(api): allow full CORS
* feat(api): use decouple-config instead of env
feat(api): fixed conflict slack endpoint
feat(api): fixed favorite errors param
* feat(api): migration fixes
* feat(api): changes
* feat(api): crons
* feat(api): changes and fixes
* feat(api): added new endpoints
feat(api): applied new changes
feat(api): Docker image
* feat(api): EE 1/4
* feat(api): EE core_dynamic
* feat(api): global routers generator
* feat(api): project authorizer
feat(api): docker image
feat(api): crons
* feat(api): EE trace activity
* feat(api): changed ORRouter
* feat(api): EE trace activity parameters&payload
* feat(api): EE trace activity action name & path_format
* feat(db): user trace
* feat(api): EE trace activity ignore routes and hide attribute
feat(api): fix funnel payload schema
* feat(api): mobile support
* feat(api): changed build script
* feat(api): changed mobile sign endpoint
feat(api): changed requirements.txt
* feat(api): changed dockerfile
* feat(api): changed mobile-env-var
* feat(api): removed insights
* feat(api): changed EE Dockerfile
* feat(api): cast session_id to str for signing
* feat(api): fixed error_id type
* feat(api): fixed /errors priority conflict
* feat(api): fixed /errors/{errorId} default params
* feat(api): fixed change password after invitation
* feat(api): use background task for emails instead of low-timeout-api
feat(api): EE fixed missing required params
* feat(api): funnel-insights payload change
* feat(api): funnel-insights payload change
* feat(api): changed edit user payload schema
* feat(api): changed metrics payload schema
* feat(api): changed metrics payload schema
* feat(api): changed edit user default values
feat(api): fixed change error status route
* feat(api): changed edit user
* feat(api): stop user from changing his own role
* feat(api): changed add slack
* feat(api): changed get funnel
* feat(api): changed get funnel on the fly payload
feat(api): changed update payload
* feat(api): changed get funnel on the fly payload
* feat(api): changed update funnel payload
* feat(api): changed get funnel-sessions/issues on the fly payload
* feat(api): fixed funnel missing rangeValue
* feat(api): fixes
* feat(api): iceServers configuration
* feat(api): fix issueId casting
* feat(api): changed issues-sessions endpoint payload-schema
* feat(api): EE changed traces-ignored-routes
* feat(api): EE include core sessions.py
* feat(api): EE check licence on every request if expired
* feat(api): move general stats to dynamic
* feat(api): code cleanup
feat(api): removed sentry
* feat(api): changed traces-ignore-routes
* feat(api): changed dependencies
* feat(api): changed jwt-auth-response code
* feat(api): changed traces-ignore-routes
* feat(api): changed traces-ignore-routes
* feat(api): removed PyTZ
feat(api): migrated time-helper to zoneinfo
* feat(api): EE added missing dependency
feat(api): changed base docker image
* feat(api): merge after roles
* feat(api): EE roles fastapi
* feat(db): handel HTTPExceptions
* feat(db): changed payload schema
* feat(db): changed payload schema
* feat(api): included insights
* feat(api): removed unused helper
* feat(api): merge from dev to fatsapi
* feat(api): merge fixes
feat(api): SAML migration
* feat(api): changed GET /signup response
feat(api): changed EE Dockerfile
* feat(api): changed edition detection
* feat(api): include ee endpoints
* feat(api): add/edit member changes
* feat(api): saml changed redirect
* feat(api): track session's replay
feat(api): track error's details
* feat(api): ignore tracking for read roles
* feat(api): define global queue
feat(api): define global scheduler
feat(api): traces use queue
feat(api): traces batch insert
feat(DB): changed traces schema
* feat(api): fix signup captcha
* feat(api): fix signup captcha
* feat(api): optional roleId
feat(api): set roleId to member if None
* feat(api): fixed edit role
* feat(api): return role details when creating a new member
* feat(api): trace: use BackgroundTasks instead of BackgroundTask to not override previous tasks
* feat(api): trace: use BackgroundTask if no other background task is defined
* feat(api): optimised delete metadata
* feat(api): Notification optional message
* feat(api): fix background-task reference
* feat(api): fix trace-background-task
* feat(api): fixed g-captcha for reset password
* feat(api): fix edit self-user
* feat(api): fixed create github-issue
* feat(api): set misfire_grace_time for crons
* feat(api): removed chalice
feat(api): freeze dependencies
* feat(api): refactored blueprints
* feat(api): /metadata/session_search allow projectId=None
* feat(api): public API, changed userId type
* feat(api): fix upload sourcemaps
* feat(api): user-trace support ApiKey endpoints
* feat(api): fixed user-trace foreign key type
* feat(api): fixed trace schema
* feat(api): trace save auth-method
* feat(api): trace fixed auth-method
* feat(api): trace changed schema
177 lines
6.7 KiB
Python
177 lines
6.7 KiB
Python
import json
|
|
import time
|
|
|
|
from fastapi import BackgroundTasks
|
|
|
|
from chalicelib.core import notifications, slack, webhook
|
|
from chalicelib.utils import pg_client, helper, email_helper
|
|
from chalicelib.utils.TimeUTC import TimeUTC
|
|
|
|
ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"]
|
|
|
|
|
|
def get(id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
SELECT *
|
|
FROM public.alerts
|
|
WHERE alert_id =%(id)s;""",
|
|
{"id": id})
|
|
)
|
|
a = helper.dict_to_camel_case(cur.fetchone())
|
|
return __process_circular(a)
|
|
|
|
|
|
def get_all(project_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
query = cur.mogrify("""\
|
|
SELECT *
|
|
FROM public.alerts
|
|
WHERE project_id =%(project_id)s AND deleted_at ISNULL
|
|
ORDER BY created_at;""",
|
|
{"project_id": project_id})
|
|
cur.execute(query=query)
|
|
all = helper.list_to_camel_case(cur.fetchall())
|
|
for a in all:
|
|
a = __process_circular(a)
|
|
return all
|
|
|
|
|
|
SUPPORTED_THRESHOLD = [15, 30, 60, 120, 240, 1440]
|
|
|
|
|
|
def __transform_structure(data):
|
|
if data.get("options") is None:
|
|
return f"Missing 'options'", None
|
|
if data["options"].get("currentPeriod") not in SUPPORTED_THRESHOLD:
|
|
return f"Unsupported currentPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
|
if data["options"].get("previousPeriod", 15) not in SUPPORTED_THRESHOLD:
|
|
return f"Unsupported previousPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
|
if data["options"].get("renotifyInterval") is None:
|
|
data["options"]["renotifyInterval"] = 720
|
|
data["query"]["right"] = float(data["query"]["right"])
|
|
data["query"] = json.dumps(data["query"])
|
|
data["description"] = data["description"] if data.get("description") is not None and len(
|
|
data["description"]) > 0 else None
|
|
if data.get("options"):
|
|
messages = []
|
|
for m in data["options"].get("message", []):
|
|
if m.get("value") is None:
|
|
continue
|
|
m["value"] = str(m["value"])
|
|
messages.append(m)
|
|
data["options"]["message"] = messages
|
|
data["options"] = json.dumps(data["options"])
|
|
return None, data
|
|
|
|
|
|
def __process_circular(alert):
|
|
if alert is None:
|
|
return None
|
|
alert.pop("deletedAt")
|
|
alert["createdAt"] = TimeUTC.datetime_to_timestamp(alert["createdAt"])
|
|
return alert
|
|
|
|
|
|
def create(project_id, data):
|
|
err, data = __transform_structure(data)
|
|
if err is not None:
|
|
return {"errors": [err]}
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options)
|
|
VALUES (%(project_id)s, %(name)s, %(description)s, %(detectionMethod)s, %(query)s, %(options)s::jsonb)
|
|
RETURNING *;""",
|
|
{"project_id": project_id, **data})
|
|
)
|
|
a = helper.dict_to_camel_case(cur.fetchone())
|
|
return {"data": helper.dict_to_camel_case(__process_circular(a))}
|
|
|
|
|
|
def update(id, changes):
|
|
changes = {k: changes[k] for k in changes.keys() if k in ALLOW_UPDATE}
|
|
err, changes = __transform_structure(changes)
|
|
if err is not None:
|
|
return {"errors": [err]}
|
|
updateq = []
|
|
for k in changes.keys():
|
|
updateq.append(f"{helper.key_to_snake_case(k)} = %({k})s")
|
|
if len(updateq) == 0:
|
|
return {"errors": ["nothing to update"]}
|
|
with pg_client.PostgresClient() as cur:
|
|
query = cur.mogrify(f"""\
|
|
UPDATE public.alerts
|
|
SET {", ".join(updateq)}
|
|
WHERE alert_id =%(id)s AND deleted_at ISNULL
|
|
RETURNING *;""",
|
|
{"id": id, **changes})
|
|
cur.execute(query=query)
|
|
a = helper.dict_to_camel_case(cur.fetchone())
|
|
return {"data": __process_circular(a)}
|
|
|
|
|
|
def process_notifications(data, background_tasks: BackgroundTasks):
|
|
full = {}
|
|
for n in data:
|
|
if "message" in n["options"]:
|
|
webhook_data = {}
|
|
if "data" in n["options"]:
|
|
webhook_data = n["options"].pop("data")
|
|
for c in n["options"].pop("message"):
|
|
if c["type"] not in full:
|
|
full[c["type"]] = []
|
|
if c["type"] in ["slack", "email"]:
|
|
full[c["type"]].append({
|
|
"notification": n,
|
|
"destination": c["value"]
|
|
})
|
|
elif c["type"] in ["webhook"]:
|
|
full[c["type"]].append({"data": webhook_data, "destination": c["value"]})
|
|
notifications.create(data)
|
|
BATCH_SIZE = 200
|
|
for t in full.keys():
|
|
for i in range(0, len(full[t]), BATCH_SIZE):
|
|
# helper.async_post(config('alert_ntf') % t, {"notifications": full[t][i:i + BATCH_SIZE]})
|
|
notifications_list = full[t][i:i + BATCH_SIZE]
|
|
|
|
if t == "slack":
|
|
background_tasks.add_task(slack.send_batch, notifications_list=notifications_list)
|
|
elif t == "email":
|
|
background_tasks.add_task(send_by_email_batch, notifications_list=notifications_list)
|
|
elif t == "webhook":
|
|
background_tasks.add_task(webhook.trigger_batch, data_list=notifications_list)
|
|
|
|
|
|
def send_by_email(notification, destination):
|
|
if notification is None:
|
|
return
|
|
email_helper.alert_email(recipients=destination,
|
|
subject=f'"{notification["title"]}" has been triggered',
|
|
data={
|
|
"message": f'"{notification["title"]}" {notification["description"]}',
|
|
"project_id": notification["options"]["projectId"]})
|
|
|
|
|
|
def send_by_email_batch(notifications_list):
|
|
if notifications_list is None or len(notifications_list) == 0:
|
|
return
|
|
for n in notifications_list:
|
|
send_by_email(notification=n.get("notification"), destination=n.get("destination"))
|
|
time.sleep(1)
|
|
|
|
|
|
def delete(project_id, alert_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
cur.execute(
|
|
cur.mogrify("""\
|
|
UPDATE public.alerts
|
|
SET
|
|
deleted_at = timezone('utc'::text, now()),
|
|
active = FALSE
|
|
WHERE
|
|
alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
|
|
{"alert_id": alert_id, "project_id": project_id})
|
|
)
|
|
return {"data": {"state": "success"}}
|