feat(api): use logger

feat(alerts): use logger
This commit is contained in:
Taha Yassine Kraiem 2022-01-13 16:29:36 +01:00
parent aad6bceca6
commit 0267532081
6 changed files with 38 additions and 31 deletions

View file

@ -1,3 +1,5 @@
import logging
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
@ -60,5 +62,8 @@ Schedule.start()
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
Schedule.add_job(id=job["func"].__name__, **job)
# for job in Schedule.get_jobs():
# print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
for job in Schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig(level=logging.INFO)
logging.getLogger('apscheduler').setLevel(logging.INFO)

View file

@ -24,5 +24,5 @@ app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process,
for job in app.schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig()
logging.basicConfig(level=logging.INFO)
logging.getLogger('apscheduler').setLevel(logging.INFO)

View file

@ -5,7 +5,7 @@ import schemas
from chalicelib.core import notifications, slack, webhook
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
import logging
def get(id):
with pg_client.PostgresClient() as cur:
@ -110,20 +110,20 @@ def process_notifications(data):
try:
slack.send_batch(notifications_list=notifications_list)
except Exception as e:
print("!!!Error while sending slack notifications batch")
print(str(e))
logging.error("!!!Error while sending slack notifications batch")
logging.error(str(e))
elif t == "email":
try:
send_by_email_batch(notifications_list=notifications_list)
except Exception as e:
print("!!!Error while sending email notifications batch")
print(str(e))
logging.error("!!!Error while sending email notifications batch")
logging.error(str(e))
elif t == "webhook":
try:
webhook.trigger_batch(data_list=notifications_list)
except Exception as e:
print("!!!Error while sending webhook notifications batch")
print(str(e))
logging.error("!!!Error while sending webhook notifications batch")
logging.error(str(e))
def send_by_email(notification, destination):

View file

@ -1,3 +1,5 @@
import logging
import schemas
from chalicelib.core import alerts_listener
from chalicelib.core import sessions, alerts
@ -80,7 +82,7 @@ def can_check(a) -> bool:
else a["options"]["previousPeriod"]
if TimeInterval.get(repetitionBase) is None:
print(f"repetitionBase: {repetitionBase} NOT FOUND")
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
return False
return (a["options"]["renotifyInterval"] <= 0 or
@ -200,16 +202,16 @@ def process():
with pg_client.PostgresClient() as cur:
for alert in all_alerts:
if can_check(alert):
print(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
query, params = Build(alert)
query = cur.mogrify(query, params)
# print(alert)
# print(query)
logging.debug(alert)
logging.debug(query)
try:
cur.execute(query)
result = cur.fetchone()
if result["valid"]:
print("Valid alert, notifying users")
logging.info("Valid alert, notifying users")
notifications.append({
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
@ -232,9 +234,9 @@ def process():
"createdAt": TimeUTC.now()}},
})
except Exception as e:
print(f"!!!Error while running alert query for alertId:{alert['alertId']}")
print(str(e))
print(query)
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
logging.error(str(e))
logging.error(query)
if len(notifications) > 0:
cur.execute(
cur.mogrify(f"""UPDATE public.Alerts

View file

@ -1,3 +1,5 @@
import logging
import requests
from chalicelib.utils import pg_client, helper
@ -77,12 +79,6 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
allow_update = ["name", "index", "authHeader", "endpoint"]
with pg_client.PostgresClient() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update]
print(cur.mogrify(f"""\
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE webhook_id =%(id)s AND deleted_at ISNULL
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
{"id": webhook_id, **changes}))
cur.execute(
cur.mogrify(f"""\
UPDATE public.webhooks
@ -152,7 +148,7 @@ def trigger_batch(data_list):
if w["destination"] not in webhooks_map:
webhooks_map[w["destination"]] = get_by_id(webhook_id=w["destination"])
if webhooks_map[w["destination"]] is None:
print(f"!!Error webhook not found: webhook_id={w['destination']}")
logging.error(f"!!Error webhook not found: webhook_id={w['destination']}")
else:
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
@ -165,10 +161,10 @@ def __trigger(hook, data):
r = requests.post(url=hook["endpoint"], json=data, headers=headers)
if r.status_code != 200:
print("=======> webhook: something went wrong")
print(r)
print(r.status_code)
print(r.text)
logging.error("=======> webhook: something went wrong")
logging.error(r)
logging.error(r.status_code)
logging.error(r.text)
return
response = None
try:
@ -177,5 +173,5 @@ def __trigger(hook, data):
try:
response = r.text
except:
print("no response found")
logging.info("no response found")
return response

View file

@ -1,3 +1,4 @@
import logging
import queue
from apscheduler.schedulers.asyncio import AsyncIOScheduler
@ -75,7 +76,10 @@ for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
app.schedule.add_job(id=job["func"].__name__, **job)
from chalicelib.core import traces
app.schedule.add_job(id="trace_worker",**traces.cron_jobs[0])
app.schedule.add_job(id="trace_worker", **traces.cron_jobs[0])
for job in app.schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig(level=logging.INFO)
logging.getLogger('apscheduler').setLevel(logging.INFO)