From ea036f0e9b602dc3e0731ac8e5afc71bfb8d6218 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 13:00:42 +0100 Subject: [PATCH 1/2] feat(chalice): fixed funnels --- api/chalicelib/core/significance.py | 12 +++++++----- ee/api/chalicelib/core/significance.py | 12 +++++++----- ee/api/chalicelib/core/significance_exp.py | 12 +++++++----- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 8477cc985..822753be0 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -57,7 +57,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -166,7 +166,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -180,7 +180,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -574,8 +574,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index 4e02eee41..bb9a53aaa 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index 4e02eee41..bb9a53aaa 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues From 557024bc80ae7540a8c8cc776429904a31316d13 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 28 Feb 2023 12:38:48 +0100 Subject: [PATCH 2/2] fix(ui) - table card - metricOf value --- frontend/app/mstore/metricStore.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/app/mstore/metricStore.ts b/frontend/app/mstore/metricStore.ts index 9ef49759d..dfdeaf0c9 100644 --- a/frontend/app/mstore/metricStore.ts +++ b/frontend/app/mstore/metricStore.ts @@ -130,6 +130,10 @@ export default class MetricStore { obj['metricValue'] = []; + if (value === TABLE) { + obj['metricOf'] = 'userId'; + } + if (value === TABLE || value === TIMESERIES) { obj['viewType'] = 'table'; }