Merge remote-tracking branch 'origin/api-v1.10.0' into dev

This commit is contained in:
Taha Yassine Kraiem 2023-02-28 13:01:15 +01:00
commit 20f83f47b9
3 changed files with 21 additions and 15 deletions

View file

@ -57,7 +57,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
f_k = f"f_value{i}"
values = {**values,
**sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
@ -166,7 +166,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
value_key=f"value{i + 1}")}
if sh.is_negation_operator(op) and i > 0:
op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
@ -180,7 +180,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE"
else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -574,8 +574,10 @@ def get_top_insights(filter_d, project_id):
# Obtain the first part of the output
stages_list = get_stages(stages, rows)
# Obtain the second part of the output
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"), drop_only=True)
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows,
first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"),
drop_only=True)
return stages_list, total_drop_due_to_issues

View file

@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
f_k = f"f_value{i}"
values = {**values,
**sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
value_key=f"value{i + 1}")}
if sh.is_negation_operator(op) and i > 0:
op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE"
else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id):
# Obtain the first part of the output
stages_list = get_stages(stages, rows)
# Obtain the second part of the output
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"), drop_only=True)
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows,
first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"),
drop_only=True)
return stages_list, total_drop_due_to_issues

View file

@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
f_k = f"f_value{i}"
values = {**values,
**sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
value_key=f"value{i + 1}")}
if sh.is_negation_operator(op) and i > 0:
op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE"
else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id):
# Obtain the first part of the output
stages_list = get_stages(stages, rows)
# Obtain the second part of the output
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"), drop_only=True)
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows,
first_stage=filter_d.get("firstStage"),
last_stage=filter_d.get("lastStage"),
drop_only=True)
return stages_list, total_drop_due_to_issues