Api v1.15.0 (#1674)

* fix(chalice): fixed edit MSTeams integration

* fix(chalice): fixed funnels wrong attributes

* fix(chalice): fixed funnels missing threshold

* fix(chalice): fixed alerts validation error

(cherry picked from commit b18e857667)
This commit is contained in:
Kraiem Taha Yassine 2023-11-17 12:08:29 +01:00 committed by rjshrjndrn
parent 0cd7f68452
commit 1865ec9095
7 changed files with 115 additions and 81 deletions

View file

@ -2,6 +2,7 @@ import decimal
import logging
from decouple import config
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts_listener
@ -109,11 +110,18 @@ def Build(a):
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.model_validate(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
try:
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
except ValidationError:
logging.warning("Validation error for:")
logging.warning(a["filter"])
raise
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None,
favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -1,9 +1,11 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine"
from chalicelib.utils import sql_helper as sh
import logging
import schemas
from chalicelib.core import events, metadata, sessions
from chalicelib.utils import sql_helper as sh
"""
todo: remove LIMIT from the query
@ -17,11 +19,13 @@ from collections import defaultdict
from psycopg2.extras import RealDictRow
from chalicelib.utils import pg_client, helper
logger = logging.getLogger(__name__)
SIGNIFICANCE_THRSH = 0.4
# Taha: the value 24 was estimated in v1.15
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
21: 2.080, 22: 2.074, 23: 2.069, 24: 2.067, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045,
30: 2.042}
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
@ -89,7 +93,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -98,24 +102,24 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
if f.source in meta_keys.keys():
first_stage_extra_constraints.append(
sh.multi_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
@ -162,7 +166,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print(f"=================UNDEFINED:{event_type}")
logging.warning(f"=================UNDEFINED:{event_type}")
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
@ -230,18 +234,18 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params)
# print("---------------------------------------------------")
# print(query)
# print("---------------------------------------------------")
logging.debug("---------------------------------------------------")
logging.debug(query)
logging.debug("---------------------------------------------------")
try:
cur.execute(query)
rows = cur.fetchall()
except Exception as err:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(filter_d.model_dump_json())
print("--------------------")
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
logging.warning(query.decode('UTF-8'))
logging.warning("--------- PAYLOAD -----------")
logging.warning(filter_d.model_dump_json())
logging.warning("--------------------")
raise err
return rows
@ -484,7 +488,8 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if last_stage is None:
last_stage = n_stages
if last_stage > n_stages:
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
logging.debug(
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
last_stage = n_stages
n_critical_issues = 0
@ -550,7 +555,7 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
stages = filter_d.events
# TODO: handle 1 stage alone
if len(stages) == 0:
print("no stages found")
logging.debug("no stages found")
return output, 0
elif len(stages) == 1:
# TODO: count sessions, and users for single stage
@ -596,8 +601,6 @@ def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_
stages = filter_d.events
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4))
# return
if len(rows) == 0:
return output
# Obtain the second part of the output

View file

@ -814,12 +814,12 @@ def add_msteams_integration(data: schemas.AddCollaborationSchema,
@app.post('/integrations/msteams/{webhookId}', tags=['integrations'])
def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
if len(data.url.unicode_string()) > 0:
old = MSTeams.get_integration(tenant_id=context.tenant_id, integration_id=webhookId)
if not old:
return {"errors": ["MsTeams integration not found."]}
if old["endpoint"] != data.url:
if not MSTeams.say_hello(data.url):
if old["endpoint"] != data.url.unicode_string():
if not MSTeams.say_hello(data.url.unicode_string()):
return {
"errors": [
"We couldn't send you a test message on your Microsoft Teams channel. Please verify your webhook url."]

View file

@ -2,10 +2,11 @@ import decimal
import logging
from decouple import config
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts_listener
from chalicelib.core import alerts
from chalicelib.core import alerts_listener
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
@ -114,11 +115,17 @@ def Build(a):
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.model_validate(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
try:
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
except ValidationError:
logging.warning("Validation error for:")
logging.warning(a["filter"])
raise
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None,
favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -1,13 +1,14 @@
import logging
from decouple import config
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts_listener, alerts_processor
from chalicelib.core import alerts
from chalicelib.core import alerts_listener, alerts_processor
from chalicelib.core import sessions_exp as sessions
from chalicelib.utils import pg_client, ch_client, exp_ch_helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core import sessions_exp as sessions
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
@ -122,11 +123,18 @@ def Build(a):
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part = sessions.search_query_parts_ch(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
try:
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
except ValidationError:
logging.warning("Validation error for:")
logging.warning(a["filter"])
raise
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None,
favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -1,10 +1,13 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine"
import logging
from decouple import config
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.core import events, metadata
from chalicelib.utils import sql_helper as sh
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
@ -23,11 +26,13 @@ from collections import defaultdict
from psycopg2.extras import RealDictRow
from chalicelib.utils import pg_client, helper
logger = logging.getLogger(__name__)
SIGNIFICANCE_THRSH = 0.4
# Taha: the value 24 was estimated in v1.15
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
21: 2.080, 22: 2.074, 23: 2.069, 24: 2.067, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045,
30: 2.042}
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
@ -95,7 +100,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -104,24 +109,24 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
if f.source in meta_keys.keys():
first_stage_extra_constraints.append(
sh.multi_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
@ -168,7 +173,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print(f"=================UNDEFINED:{event_type}")
logging.warning(f"=================UNDEFINED:{event_type}")
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
@ -236,18 +241,18 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params)
# print("---------------------------------------------------")
# print(query)
# print("---------------------------------------------------")
logging.debug("---------------------------------------------------")
logging.debug(query)
logging.debug("---------------------------------------------------")
try:
cur.execute(query)
rows = cur.fetchall()
except Exception as err:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(filter_d.model_dump_json())
print("--------------------")
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
logging.warning(query.decode('UTF-8'))
logging.warning("--------- PAYLOAD -----------")
logging.warning(filter_d.model_dump_json())
logging.warning("--------------------")
raise err
return rows
@ -490,7 +495,8 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if last_stage is None:
last_stage = n_stages
if last_stage > n_stages:
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
logging.debug(
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
last_stage = n_stages
n_critical_issues = 0
@ -556,7 +562,7 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
stages = filter_d.events
# TODO: handle 1 stage alone
if len(stages) == 0:
print("no stages found")
logging.debug("no stages found")
return output, 0
elif len(stages) == 1:
# TODO: count sessions, and users for single stage
@ -602,8 +608,6 @@ def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_
stages = filter_d.events
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4))
# return
if len(rows) == 0:
return output
# Obtain the second part of the output

View file

@ -1,9 +1,12 @@
__maintainer__ = "KRAIEM Taha Yassine"
import logging
from decouple import config
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.core import events, metadata
from chalicelib.utils import sql_helper as sh
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
@ -22,11 +25,13 @@ from collections import defaultdict
from psycopg2.extras import RealDictRow
from chalicelib.utils import pg_client, helper
logger = logging.getLogger(__name__)
SIGNIFICANCE_THRSH = 0.4
# Taha: the value 24 was estimated in v1.15
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
21: 2.080, 22: 2.074, 23: 2.069, 24: 2.067, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045,
30: 2.042}
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
@ -94,7 +99,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -103,24 +108,24 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
if f.source in meta_keys.keys():
first_stage_extra_constraints.append(
sh.multi_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
@ -167,7 +172,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print(f"=================UNDEFINED:{event_type}")
logging.warning(f"=================UNDEFINED:{event_type}")
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
@ -235,18 +240,18 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params)
# print("---------------------------------------------------")
# print(query)
# print("---------------------------------------------------")
logging.debug("---------------------------------------------------")
logging.debug(query)
logging.debug("---------------------------------------------------")
try:
cur.execute(query)
rows = cur.fetchall()
except Exception as err:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(filter_d.model_dump_json())
print("--------------------")
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
logging.warning(query.decode('UTF-8'))
logging.warning("--------- PAYLOAD -----------")
logging.warning(filter_d.model_dump_json())
logging.warning("--------------------")
raise err
return rows
@ -489,7 +494,8 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if last_stage is None:
last_stage = n_stages
if last_stage > n_stages:
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
logging.debug(
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
last_stage = n_stages
n_critical_issues = 0
@ -555,7 +561,7 @@ def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
stages = filter_d.events
# TODO: handle 1 stage alone
if len(stages) == 0:
print("no stages found")
logging.debug("no stages found")
return output, 0
elif len(stages) == 1:
# TODO: count sessions, and users for single stage
@ -601,8 +607,6 @@ def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_
stages = filter_d.events
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4))
# return
if len(rows) == 0:
return output
# Obtain the second part of the output