Dev (#2430)
* refactor(chalice): upgraded dependencies * refactor(chalice): upgraded dependencies feat(chalice): support heatmaps * fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * refactor(chalice): upgraded dependencies * refactor(chalice): upgraded dependencies feat(chalice): support heatmaps * fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * refactor(chalice): upgraded dependencies refactor(crons): upgraded dependencies refactor(alerts): upgraded dependencies * feat(chalice): get top 10 values for autocomplete CH * refactor(chalice): cleaned code refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies * feat(chalice): autocomplete return top 10 with stats * refactor(chalice): refactored logger * fix(chalice): fixed autocomplete top 10 meta-filters
This commit is contained in:
parent
c8ad140298
commit
97df9b2b7c
15 changed files with 209 additions and 114 deletions
|
|
@ -124,26 +124,26 @@ def process_notifications(data):
|
|||
try:
|
||||
send_to_slack_batch(notifications_list=notifications_list)
|
||||
except Exception as e:
|
||||
logging.error("!!!Error while sending slack notifications batch")
|
||||
logging.error(str(e))
|
||||
logger.error("!!!Error while sending slack notifications batch")
|
||||
logger.error(str(e))
|
||||
elif t == "msteams":
|
||||
try:
|
||||
send_to_msteams_batch(notifications_list=notifications_list)
|
||||
except Exception as e:
|
||||
logging.error("!!!Error while sending msteams notifications batch")
|
||||
logging.error(str(e))
|
||||
logger.error("!!!Error while sending msteams notifications batch")
|
||||
logger.error(str(e))
|
||||
elif t == "email":
|
||||
try:
|
||||
send_by_email_batch(notifications_list=notifications_list)
|
||||
except Exception as e:
|
||||
logging.error("!!!Error while sending email notifications batch")
|
||||
logging.error(str(e))
|
||||
logger.error("!!!Error while sending email notifications batch")
|
||||
logger.error(str(e))
|
||||
elif t == "webhook":
|
||||
try:
|
||||
webhook.trigger_batch(data_list=notifications_list)
|
||||
except Exception as e:
|
||||
logging.error("!!!Error while sending webhook notifications batch")
|
||||
logging.error(str(e))
|
||||
logger.error("!!!Error while sending webhook notifications batch")
|
||||
logger.error(str(e))
|
||||
|
||||
|
||||
def send_by_email(notification, destination):
|
||||
|
|
@ -158,9 +158,9 @@ def send_by_email(notification, destination):
|
|||
|
||||
def send_by_email_batch(notifications_list):
|
||||
if not smtp.has_smtp():
|
||||
logging.info("no SMTP configuration for email notifications")
|
||||
logger.info("no SMTP configuration for email notifications")
|
||||
if notifications_list is None or len(notifications_list) == 0:
|
||||
logging.info("no email notifications")
|
||||
logger.info("no email notifications")
|
||||
return
|
||||
for n in notifications_list:
|
||||
send_by_email(notification=n.get("notification"), destination=n.get("destination"))
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from chalicelib.core import sessions
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logger.info))
|
||||
|
||||
LeftToDb = {
|
||||
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
|
||||
|
|
@ -92,7 +92,7 @@ def can_check(a) -> bool:
|
|||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
logger.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
|
|
@ -116,8 +116,8 @@ def Build(a):
|
|||
try:
|
||||
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
|
||||
except ValidationError:
|
||||
logging.warning("Validation error for:")
|
||||
logging.warning(a["filter"])
|
||||
logger.warning("Validation error for:")
|
||||
logger.warning(a["filter"])
|
||||
raise
|
||||
|
||||
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
|
|
@ -196,23 +196,23 @@ def process():
|
|||
try:
|
||||
query = cur.mogrify(query, params)
|
||||
except Exception as e:
|
||||
logging.error(
|
||||
logger.error(
|
||||
f"!!!Error while building alert query for alertId:{alert['alertId']} name: {alert['name']}")
|
||||
logging.error(e)
|
||||
logger.error(e)
|
||||
continue
|
||||
logging.debug(alert)
|
||||
logging.debug(query)
|
||||
logger.debug(alert)
|
||||
logger.debug(query)
|
||||
try:
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
if result["valid"]:
|
||||
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
|
||||
logger.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
|
||||
notifications.append(generate_notification(alert, result))
|
||||
except Exception as e:
|
||||
logging.error(
|
||||
logger.error(
|
||||
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
|
||||
logging.error(query)
|
||||
logging.error(e)
|
||||
logger.error(query)
|
||||
logger.error(e)
|
||||
cur = cur.recreate(rollback=True)
|
||||
if len(notifications) > 0:
|
||||
cur.execute(
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import logging
|
||||
import schemas
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.event_filter_definition import Event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TABLE = "public.autocomplete"
|
||||
|
||||
|
||||
|
|
@ -329,3 +331,92 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
TYPE_TO_COLUMN = {
|
||||
schemas.EventType.CLICK: "label",
|
||||
schemas.EventType.INPUT: "label",
|
||||
schemas.EventType.LOCATION: "url_path",
|
||||
schemas.EventType.CUSTOM: "name",
|
||||
schemas.EventType.REQUEST: "url_path",
|
||||
schemas.EventType.GRAPHQL: "name",
|
||||
schemas.EventType.STATE_ACTION: "name",
|
||||
# For ERROR, sessions search is happening over name OR message,
|
||||
# for simplicity top 10 is using name only
|
||||
schemas.EventType.ERROR: "name",
|
||||
schemas.FilterType.USER_COUNTRY: "user_country",
|
||||
schemas.FilterType.USER_CITY: "user_city",
|
||||
schemas.FilterType.USER_STATE: "user_state",
|
||||
schemas.FilterType.USER_ID: "user_id",
|
||||
schemas.FilterType.USER_ANONYMOUS_ID: "user_anonymous_id",
|
||||
schemas.FilterType.USER_OS: "user_os",
|
||||
schemas.FilterType.USER_BROWSER: "user_browser",
|
||||
schemas.FilterType.USER_DEVICE: "user_device",
|
||||
schemas.FilterType.PLATFORM: "platform",
|
||||
schemas.FilterType.REV_ID: "rev_id",
|
||||
schemas.FilterType.REFERRER: "referrer",
|
||||
schemas.FilterType.UTM_SOURCE: "utm_source",
|
||||
schemas.FilterType.UTM_MEDIUM: "utm_medium",
|
||||
schemas.FilterType.UTM_CAMPAIGN: "utm_campaign",
|
||||
}
|
||||
|
||||
TYPE_TO_TABLE = {
|
||||
schemas.EventType.CLICK: "events.clicks",
|
||||
schemas.EventType.INPUT: "events.inputs",
|
||||
schemas.EventType.LOCATION: "events.pages",
|
||||
schemas.EventType.CUSTOM: "events_common.customs",
|
||||
schemas.EventType.REQUEST: "events_common.requests",
|
||||
schemas.EventType.GRAPHQL: "events.graphql",
|
||||
schemas.EventType.STATE_ACTION: "events.state_actions",
|
||||
# For ERROR, sessions search is happening over name OR message,
|
||||
# for simplicity top 10 is using name only
|
||||
schemas.EventType.ERROR: "name"
|
||||
}
|
||||
|
||||
|
||||
def get_top_values(project_id, event_type, event_key=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if schemas.FilterType.has_value(event_type):
|
||||
if event_type == schemas.FilterType.METADATA \
|
||||
and (event_key is None \
|
||||
or (colname := metadata.get_colname_by_key(project_id=project_id, key=event_key)) is None) \
|
||||
or event_type != schemas.FilterType.METADATA \
|
||||
and (colname := TYPE_TO_COLUMN.get(event_type)) is None:
|
||||
return []
|
||||
|
||||
query = f"""WITH raw AS (SELECT DISTINCT {colname} AS c_value,
|
||||
COUNT(1) OVER (PARTITION BY {colname}) AS row_count,
|
||||
COUNT(1) OVER () AS total_count
|
||||
FROM public.sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND {colname} IS NOT NULL
|
||||
AND sessions.duration IS NOT NULL
|
||||
AND sessions.duration > 0
|
||||
ORDER BY row_count DESC
|
||||
LIMIT 10)
|
||||
SELECT c_value AS value, row_count, trunc(row_count * 100 / total_count, 2) AS row_percentage
|
||||
FROM raw;"""
|
||||
else:
|
||||
colname = TYPE_TO_COLUMN.get(event_type)
|
||||
table = TYPE_TO_TABLE.get(event_type)
|
||||
query = f"""WITH raw AS (SELECT DISTINCT {colname} AS c_value,
|
||||
COUNT(1) OVER (PARTITION BY {colname}) AS row_count,
|
||||
COUNT(1) OVER () AS total_count
|
||||
FROM {table} INNER JOIN public.sessions USING(session_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND {colname} IS NOT NULL
|
||||
AND {colname} != ''
|
||||
AND sessions.duration IS NOT NULL
|
||||
AND sessions.duration > 0
|
||||
ORDER BY row_count DESC
|
||||
LIMIT 10)
|
||||
SELECT c_value AS value, row_count, trunc(row_count * 100 / total_count,2) AS row_percentage
|
||||
FROM raw;"""
|
||||
params = {"project_id": project_id}
|
||||
query = cur.mogrify(query, params)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(query)
|
||||
logger.debug("--------------------")
|
||||
cur.execute(query=query)
|
||||
results = cur.fetchall()
|
||||
return helper.list_to_camel_case(results)
|
||||
|
|
|
|||
|
|
@ -35,8 +35,8 @@ class MSTeams(BaseCollaboration):
|
|||
"title": "Welcome to OpenReplay"
|
||||
})
|
||||
if r.status_code != 200:
|
||||
logging.warning("MSTeams integration failed")
|
||||
logging.warning(r.text)
|
||||
logger.warning("MSTeams integration failed")
|
||||
logger.warning(r.text)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
@ -51,15 +51,15 @@ class MSTeams(BaseCollaboration):
|
|||
json=body,
|
||||
timeout=5)
|
||||
if r.status_code != 200:
|
||||
logging.warning(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
|
||||
logging.warning(r.text)
|
||||
logger.warning(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
|
||||
logger.warning(r.text)
|
||||
return None
|
||||
except requests.exceptions.Timeout:
|
||||
logging.warning(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
|
||||
logger.warning(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logging.warning(f"!! Issue sending msteams raw webhookId:{webhook_id}")
|
||||
logging.warning(e)
|
||||
logger.warning(f"!! Issue sending msteams raw webhookId:{webhook_id}")
|
||||
logger.warning(e)
|
||||
return None
|
||||
return {"data": r.text}
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ class MSTeams(BaseCollaboration):
|
|||
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
|
||||
if integration is None:
|
||||
return {"errors": ["msteams integration not found"]}
|
||||
logging.debug(f"====> sending msteams batch notification: {len(attachments)}")
|
||||
logger.debug(f"====> sending msteams batch notification: {len(attachments)}")
|
||||
for i in range(0, len(attachments), 50):
|
||||
part = attachments[i:i + 50]
|
||||
for j in range(1, len(part), 2):
|
||||
|
|
@ -82,8 +82,8 @@ class MSTeams(BaseCollaboration):
|
|||
"sections": part
|
||||
})
|
||||
if r.status_code != 200:
|
||||
logging.warning("!!!! something went wrong")
|
||||
logging.warning(r.text)
|
||||
logger.warning("!!!! something went wrong")
|
||||
logger.warning(r.text)
|
||||
|
||||
@classmethod
|
||||
def __share(cls, tenant_id, integration_id, attachement, extra=None):
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ class DatabaseRequestHandler:
|
|||
if additional_clauses:
|
||||
query += " " + additional_clauses
|
||||
|
||||
logging.debug(f"Query: {query}")
|
||||
logger.debug(f"Query: {query}")
|
||||
return query
|
||||
|
||||
def execute_query(self, query, data=None):
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ def update_feature_flag_status(project_id: int, feature_flag_id: int, is_active:
|
|||
|
||||
return {"is_active": cur.fetchone()["is_active"]}
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to update feature flag status: {e}")
|
||||
logger.error(f"Failed to update feature flag status: {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Failed to update feature flag status")
|
||||
|
||||
|
|
|
|||
|
|
@ -128,18 +128,18 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
|
||||
full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
sessions = cur.fetchone()
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(data.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
logger.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
if errors_only or ids_only:
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
|
@ -222,17 +222,17 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count
|
||||
{query_part};""", full_args)
|
||||
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(data.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
logger.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
if view_type == schemas.MetricTimeseriesViewType.LINE_CHART:
|
||||
sessions = cur.fetchall()
|
||||
|
|
@ -284,9 +284,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
GROUP BY {main_col}
|
||||
ORDER BY session_count DESC) AS users_sessions;""",
|
||||
full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
|
|
@ -399,9 +399,9 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
ORDER BY user_count DESC) AS users_sessions;"""
|
||||
|
||||
main_query = cur.mogrify(main_query, full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
|
|
@ -436,9 +436,9 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema,
|
|||
GROUP BY type, context_string
|
||||
ORDER BY session_count DESC) AS filtered_issues
|
||||
) AS ranked_issues;""", full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
|
|
@ -988,7 +988,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
value_key=e_k_f))
|
||||
apply = True
|
||||
else:
|
||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||
logger.warning(f"undefined FETCH filter: {f.type}")
|
||||
if not apply:
|
||||
continue
|
||||
elif event_type == schemas.EventType.GRAPHQL:
|
||||
|
|
@ -1015,7 +1015,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
event_where.append(
|
||||
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
|
||||
else:
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
logger.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
else:
|
||||
continue
|
||||
if event_index == 0 or or_events:
|
||||
|
|
@ -1165,7 +1165,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
query_part = f"""\
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
next_table = events.EventType.CUSTOM_MOBILE.table
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
else:
|
||||
logging.warning(f"=================UNDEFINED:{event_type}")
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
||||
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
|
||||
|
|
@ -219,18 +219,18 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
"issueTypes": tuple(filter_issues), **values}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(n_stages_query, params)
|
||||
logging.debug("---------------------------------------------------")
|
||||
logging.debug(query)
|
||||
logging.debug("---------------------------------------------------")
|
||||
logger.debug("---------------------------------------------------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------------------------------------------------")
|
||||
try:
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
except Exception as err:
|
||||
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(filter_d.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
logger.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
|
||||
logger.warning(query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(filter_d.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
for r in rows:
|
||||
if r["user_id"] == "":
|
||||
|
|
@ -481,7 +481,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
|
|||
if last_stage is None:
|
||||
last_stage = n_stages
|
||||
if last_stage > n_stages:
|
||||
logging.debug(
|
||||
logger.debug(
|
||||
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||
last_stage = n_stages
|
||||
|
||||
|
|
@ -548,7 +548,7 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id, metri
|
|||
stages = filter_d.events
|
||||
|
||||
if len(stages) == 0:
|
||||
logging.debug("no stages found")
|
||||
logger.debug("no stages found")
|
||||
return output, 0
|
||||
|
||||
# The result of the multi-stage query
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ def get_ut_test(project_id: int, test_id: int):
|
|||
live_sessions = assist.__get_live_sessions_ws(project_id, body)
|
||||
row['live_count'] = live_sessions['total']
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to get live sessions count: {e}")
|
||||
logger.error(f"Failed to get live sessions count: {e}")
|
||||
row['live_count'] = 0
|
||||
|
||||
row['created_at'] = TimeUTC.datetime_to_timestamp(row['created_at'])
|
||||
|
|
@ -403,7 +403,7 @@ def get_statistics(test_id: int):
|
|||
except HTTPException as http_exc:
|
||||
raise http_exc
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error occurred: {e}")
|
||||
logger.error(f"Unexpected error occurred: {e}")
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal server error")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -159,7 +159,7 @@ def trigger_batch(data_list):
|
|||
if w["destination"] not in webhooks_map:
|
||||
webhooks_map[w["destination"]] = get_by_id(webhook_id=w["destination"])
|
||||
if webhooks_map[w["destination"]] is None:
|
||||
logging.error(f"!!Error webhook not found: webhook_id={w['destination']}")
|
||||
logger.error(f"!!Error webhook not found: webhook_id={w['destination']}")
|
||||
else:
|
||||
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
|
||||
|
||||
|
|
@ -172,10 +172,10 @@ def __trigger(hook, data):
|
|||
|
||||
r = requests.post(url=hook["endpoint"], json=data, headers=headers)
|
||||
if r.status_code != 200:
|
||||
logging.error("=======> webhook: something went wrong for:")
|
||||
logging.error(hook)
|
||||
logging.error(r.status_code)
|
||||
logging.error(r.text)
|
||||
logger.error("=======> webhook: something went wrong for:")
|
||||
logger.error(hook)
|
||||
logger.error(r.status_code)
|
||||
logger.error(r.text)
|
||||
return
|
||||
response = None
|
||||
try:
|
||||
|
|
@ -184,5 +184,5 @@ def __trigger(hook, data):
|
|||
try:
|
||||
response = r.text
|
||||
except:
|
||||
logging.info("no response found")
|
||||
logger.info("no response found")
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
from . import smtp
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
from . import smtp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.info))
|
||||
|
||||
if smtp.has_smtp():
|
||||
logging.info("valid SMTP configuration found")
|
||||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logging.info("no SMTP configuration found or SMTP validation failed")
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def timed(f):
|
|||
result = f(*args, **kwds)
|
||||
elapsed = time() - start
|
||||
if inspect.stack()[1][3] == "_view_func":
|
||||
logging.debug("%s: took %d s to finish" % (f.__name__, elapsed))
|
||||
logger.debug("%s: took %d s to finish" % (f.__name__, elapsed))
|
||||
else:
|
||||
call_stack = [i[3] for i in inspect.stack()[1:] if i[3] != "wrapper"]
|
||||
call_stack = [c for c in call_stack if
|
||||
|
|
|
|||
|
|
@ -61,11 +61,11 @@ def send_html(BODY_HTML, SUBJECT, recipient):
|
|||
for r in recipient:
|
||||
msg["To"] = r
|
||||
try:
|
||||
logging.info(f"Email sending to: {r}")
|
||||
logger.info(f"Email sending to: {r}")
|
||||
s.send_message(msg)
|
||||
except Exception as e:
|
||||
logging.error("!!! Email error!")
|
||||
logging.error(e)
|
||||
logger.error("!!! Email error!")
|
||||
logger.error(e)
|
||||
|
||||
|
||||
def send_text(recipients, text, subject):
|
||||
|
|
@ -79,8 +79,8 @@ def send_text(recipients, text, subject):
|
|||
try:
|
||||
s.send_message(msg)
|
||||
except Exception as e:
|
||||
logging.error("!! Text-email failed: " + subject),
|
||||
logging.error(e)
|
||||
logger.error("!! Text-email failed: " + subject),
|
||||
logger.error(e)
|
||||
|
||||
|
||||
def __escape_text_html(text):
|
||||
|
|
|
|||
|
|
@ -62,18 +62,18 @@ def make_pool():
|
|||
try:
|
||||
postgreSQL_pool.closeall()
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logging.error("Error while closing all connexions to PostgreSQL", error)
|
||||
logger.error("Error while closing all connexions to PostgreSQL", error)
|
||||
try:
|
||||
postgreSQL_pool = ORThreadedConnectionPool(config("PG_MINCONN", cast=int, default=4),
|
||||
config("PG_MAXCONN", cast=int, default=8),
|
||||
**PG_CONFIG)
|
||||
if (postgreSQL_pool):
|
||||
logging.info("Connection pool created successfully")
|
||||
logger.info("Connection pool created successfully")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logging.error("Error while connecting to PostgreSQL", error)
|
||||
logger.error("Error while connecting to PostgreSQL", error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logging.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
|
|
@ -123,13 +123,13 @@ class PostgresClient:
|
|||
if not self.use_pool or self.long_query or self.unlimited_query:
|
||||
self.connection.close()
|
||||
except Exception as error:
|
||||
logging.error("Error while committing/closing PG-connection", error)
|
||||
logger.error("Error while committing/closing PG-connection", error)
|
||||
if str(error) == "connection already closed" \
|
||||
and self.use_pool \
|
||||
and not self.long_query \
|
||||
and not self.unlimited_query \
|
||||
and config('PG_POOL', cast=bool, default=True):
|
||||
logging.info("Recreating the connexion pool")
|
||||
logger.info("Recreating the connexion pool")
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
|
@ -144,9 +144,9 @@ class PostgresClient:
|
|||
try:
|
||||
result = self.cursor.cursor_execute(query=query, vars=vars)
|
||||
except psycopg2.Error as error:
|
||||
logging.error(f"!!! Error of type:{type(error)} while executing query:")
|
||||
logging.error(query)
|
||||
logging.info("starting rollback to allow future execution")
|
||||
logger.error(f"!!! Error of type:{type(error)} while executing query:")
|
||||
logger.error(query)
|
||||
logger.info("starting rollback to allow future execution")
|
||||
self.connection.rollback()
|
||||
raise error
|
||||
return result
|
||||
|
|
@ -156,17 +156,17 @@ class PostgresClient:
|
|||
try:
|
||||
self.connection.rollback()
|
||||
except Exception as error:
|
||||
logging.error("Error while rollbacking connection for recreation", error)
|
||||
logger.error("Error while rollbacking connection for recreation", error)
|
||||
try:
|
||||
self.cursor.close()
|
||||
except Exception as error:
|
||||
logging.error("Error while closing cursor for recreation", error)
|
||||
logger.error("Error while closing cursor for recreation", error)
|
||||
self.cursor = None
|
||||
return self.__enter__()
|
||||
|
||||
|
||||
async def init():
|
||||
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
logger.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
if config('PG_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
|
@ -176,6 +176,6 @@ async def terminate():
|
|||
if postgreSQL_pool is not None:
|
||||
try:
|
||||
postgreSQL_pool.closeall()
|
||||
logging.info("Closed all connexions to PostgreSQL")
|
||||
logger.info("Closed all connexions to PostgreSQL")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logging.error("Error while closing all connexions to PostgreSQL", error)
|
||||
logger.error("Error while closing all connexions to PostgreSQL", error)
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ from smtplib import SMTPAuthenticationError
|
|||
from decouple import config
|
||||
from fastapi import HTTPException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class EmptySMTP:
|
||||
def sendmail(self, from_addr, to_addrs, msg, mail_options=(), rcpt_options=()):
|
||||
logging.error("!! CANNOT SEND EMAIL, NO VALID SMTP CONFIGURATION FOUND")
|
||||
logger.error("!! CANNOT SEND EMAIL, NO VALID SMTP CONFIGURATION FOUND")
|
||||
|
||||
def send_message(self, msg):
|
||||
self.sendmail( msg["FROM"], msg["TO"], msg.as_string() )
|
||||
|
|
@ -55,9 +56,9 @@ class SMTPClient:
|
|||
if not (status == 250):
|
||||
raise Exception(f"SMTP connexion error, status:{status}")
|
||||
except Exception as e: # smtplib.SMTPServerDisconnected
|
||||
logging.error(
|
||||
logger.error(
|
||||
f'!! SMTP connexion error to {config("EMAIL_HOST")}:{config("EMAIL_PORT", cast=int)}')
|
||||
logging.error(e)
|
||||
logger.error(e)
|
||||
return False, e
|
||||
|
||||
# check authentication
|
||||
|
|
@ -65,8 +66,8 @@ class SMTPClient:
|
|||
self.__enter__()
|
||||
self.__exit__()
|
||||
except Exception as e:
|
||||
logging.error(f'!! SMTP authentication error to {config("EMAIL_HOST")}:{config("EMAIL_PORT", cast=int)}')
|
||||
logging.error(e)
|
||||
logger.error(f'!! SMTP authentication error to {config("EMAIL_HOST")}:{config("EMAIL_PORT", cast=int)}')
|
||||
logger.error(e)
|
||||
return False, e
|
||||
|
||||
return True, None
|
||||
|
|
@ -80,8 +81,8 @@ SMTP_NOTIFIED = False
|
|||
def has_smtp():
|
||||
global VALID_SMTP, SMTP_ERROR, SMTP_NOTIFIED
|
||||
if SMTP_ERROR is not None:
|
||||
logging.error("!!! SMTP error found, disabling SMTP configuration:")
|
||||
logging.error(SMTP_ERROR)
|
||||
logger.error("!!! SMTP error found, disabling SMTP configuration:")
|
||||
logger.error(SMTP_ERROR)
|
||||
|
||||
if VALID_SMTP is not None:
|
||||
return VALID_SMTP
|
||||
|
|
@ -91,7 +92,7 @@ def has_smtp():
|
|||
return VALID_SMTP
|
||||
elif not SMTP_NOTIFIED:
|
||||
SMTP_NOTIFIED = True
|
||||
logging.info("no SMTP configuration found")
|
||||
logger.info("no SMTP configuration found")
|
||||
return False
|
||||
|
||||
|
||||
|
|
@ -105,7 +106,7 @@ def check_connexion():
|
|||
if not (result == 0):
|
||||
error = f"""!! SMTP {config("EMAIL_HOST")}:{config("EMAIL_PORT", cast=int)} is unreachable
|
||||
f'please make sure the host&port are correct, and the SMTP protocol is authorized on your server."""
|
||||
logging.error(error)
|
||||
logger.error(error)
|
||||
sock.close()
|
||||
return False, error
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue