Api v1.15.0 (#1526)

* feat(chalice): upgraded dependencies

* feat(chalice): changed path analysis schema

* feat(DB): click coordinate support

* feat(chalice): changed path analysis issues schema
feat(chalice): upgraded dependencies

* fix(chalice): fixed pydantic issue

* refactor(chalice): refresh token validator

* feat(chalice): role restrictions

* feat(chalice): EE path analysis changes

* refactor(DB): changed creation queries
refactor(DB): changed delte queries
feat(DB): support new path analysis payload

* feat(chalice): save path analysis card

* feat(chalice): restrict access

* feat(chalice): restrict access

* feat(chalice): EE save new path analysis card

* refactor(chalice): path analysis

* feat(chalice): path analysis new query

* fix(chalice): configurable CH config

* fix(chalice): assist autocomplete

* refactor(chalice): refactored permissions

* refactor(chalice): changed log level

* refactor(chalice): upgraded dependencies

* refactor(chalice): changed path analysis query

* refactor(chalice): changed path analysis query

* refactor(chalice): upgraded dependencies
refactor(alerts): upgraded dependencies
refactor(crons): upgraded dependencies

* feat(chalice): path analysis ignore start point

* feat(chalice): path analysis in progress

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis new query
refactor(chalice): authorizers

* refactor(chalice): refactored authorizer

* fix(chalice): fixed create card of PathAnalysis

* refactor(chalice): compute link-percentage for Path Analysis

* refactor(chalice): remove null starting point from Path Analysis

* feat(chalice): path analysis CH query

* refactor(chalice): changed Path Analysis links-value
fix(chalice): fixed search notes for EE

* feat(chalice): path analysis enhanced query results

* feat(chalice): include timezone in search sessions response

* refactor(chalice): refactored logs

* refactor(chalice): refactored logs
feat(chalice): get path analysis issues

* fix(chalice): fixed path analysis issues pagination

* fix(chalice): sessions-search handle null values

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis support mixed events with start-point

* fix(chalice): PathAnalysis fixed eventType value when metricValue is missing

* fix(chalice): PathAnalysis fixed wrong super-class model for update card

* fix(chalice): PathAnalysis fixed search issues
refactor(chalice): upgraded dependencies

* fix(chalice): enforce isEvent if missing

* fix(chalice): enforce isEvent if missing

* refactor(chalice): refactored custom-metrics

* refactor(chalice): small changes

* feat(chalice): path analysis EE new query

* fix(chalice): fixed hide-excess state for Path Analysis

* fix(chalice): fixed update start point and excludes for Path Analysis
This commit is contained in:
Kraiem Taha Yassine 2023-10-18 18:12:22 +02:00 committed by GitHub
parent ca84dd0c0b
commit b7a8735cc4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 169 additions and 498 deletions

View file

@ -1,12 +1,12 @@
import json
import logging
from typing import Union
from decouple import config
from fastapi import HTTPException, status
import schemas
from chalicelib.core import sessions, funnels, errors, issues, metrics, click_maps, sessions_mobs, product_analytics
from chalicelib.core import sessions, funnels, errors, issues, click_maps, sessions_mobs, product_analytics, \
custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient
@ -85,10 +85,6 @@ def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_click_map(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True):
if len(data.series) == 0:
return None
@ -171,8 +167,10 @@ def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
if data.is_predefined:
return custom_metrics_predefined.get_metric(key=data.metric_of,
project_id=project_id,
data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
@ -320,7 +318,7 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
if data.is_template:
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.issues:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
@ -338,14 +336,15 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
r = {"start_point": [s.model_dump() for s in data.start_point],
"start_type": data.start_type,
"exclude": [e.model_dump() for e in data.excludes]}
"excludes": [e.model_dump() for e in data.excludes],
"hideExcess": data.hide_excess}
return r
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if __is_click_map(data):
if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
@ -423,6 +422,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
if i not in u_series_ids:
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
with pg_client.PostgresClient() as cur:
sub_queries = []
@ -455,7 +457,8 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now()),
default_config = %(config)s,
thumbnail = %(thumbnail)s
thumbnail = %(thumbnail)s,
card_info = %(card_info)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
@ -553,9 +556,10 @@ def delete_card(project_id, metric_id, user_id):
def __get_path_analysis_attributes(row):
card_info = row.pop("cardInfo")
row["exclude"] = card_info.get("exclude", [])
row["excludes"] = card_info.get("excludes", [])
row["startPoint"] = card_info.get("startPoint", [])
row["startType"] = card_info.get("startType", "start")
row["hideExcess"] = card_info.get("hideExcess", False)
return row
@ -690,9 +694,11 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
raw_metric["startTimestamp"] = data.startTimestamp
raw_metric["endTimestamp"] = data.endTimestamp
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump())
elif __is_click_map(metric):
if metric.is_predefined:
return custom_metrics_predefined.get_metric(key=metric.metric_of,
project_id=project_id,
data=data.model_dump())
elif metric.metric_type == schemas.MetricType.click_map:
if raw_metric["data"]:
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])
@ -709,54 +715,3 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
return raw_metric["data"]
return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric)
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -0,0 +1,61 @@
import logging
from typing import Union
import logging
from typing import Union
import schemas
from chalicelib.core import metrics
logger = logging.getLogger(__name__)
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -62,333 +62,6 @@ JOURNEY_TYPES = {
}
# query: Q4, the result is correct,
# startPoints are computed before ranked_events to reduce the number of window functions over rows
# replaced time_to_target by time_from_previous
# compute avg_time_from_previous at the same level as sessions_count
# sort by top 5 according to sessions_count at the CTE level
# final part project data without grouping
def path_analysis_deprecated(project_id: int, data: schemas.CardPathAnalysis):
sub_events = []
start_points_from = "pre_ranked_events"
start_points_conditions = []
sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s",
"project_id=%(project_id)s", "events_count > 1", "duration>0"]
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
else:
for v in data.metric_value:
if JOURNEY_TYPES.get(v):
sub_events.append({"table": JOURNEY_TYPES[v]["table"],
"column": JOURNEY_TYPES[v]["column"],
"eventType": v})
extra_values = {}
reverse = data.start_type == "end"
for i, sf in enumerate(data.start_point):
f_k = f"start_point_{i}"
op = sh.get_sql_operator(sf.operator)
is_not = sh.is_negation_operator(sf.operator)
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{sf.type}' AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
exclusions = {}
for i, ef in enumerate(data.excludes):
if ef.type in data.metric_value:
f_k = f"exclude_{i}"
extra_values = {**extra_values, **sh.multi_values(ef.value, value_key=f_k)}
exclusions[ef.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[ef.type]["column"]} != %({f_k})s', ef.value, is_not=True,
value_key=f_k)]
meta_keys = None
for i, f in enumerate(data.series[0].filter.filters):
op = sh.get_sql_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_not = sh.is_negation_operator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
f_k = f"f_value_{i}"
extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)}
if not is_any and len(f.value) == 0:
continue
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if is_any:
sessions_conditions.append('user_browser IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_os]:
if is_any:
sessions_conditions.append('user_os IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_device]:
if is_any:
sessions_conditions.append('user_device IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_country]:
if is_any:
sessions_conditions.append('user_country IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_city:
if is_any:
sessions_conditions.append('user_city IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_state:
if is_any:
sessions_conditions.append('user_state IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.utm_source]:
if is_any:
sessions_conditions.append('utm_source IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_source IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_medium]:
if is_any:
sessions_conditions.append('utm_medium IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_medium IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_campaign]:
if is_any:
sessions_conditions.append('utm_campaign IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_campaign IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
sessions_conditions.append("duration >= %(minDuration)s")
extra_values["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
sessions_conditions.append("duration <= %(maxDuration)s")
extra_values["maxDuration"] = f.value[1]
elif f.type == schemas.FilterType.referrer:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
sessions_conditions.append('base_referrer IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.metadata:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if f.source in meta_keys.keys():
if is_any:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
elif is_undefined:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
sessions_conditions.append(
sh.multi_conditions(
f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
if is_any:
sessions_conditions.append('user_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
sessions_conditions.append('user_anonymous_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_anonymous_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
sessions_conditions.append('rev_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('rev_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.platform:
# op = __ sh.get_sql_operator(f.operator)
sessions_conditions.append(
sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.issue:
if is_any:
sessions_conditions.append("array_length(issue_types, 1) > 0")
else:
sessions_conditions.append(
sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.events_count:
sessions_conditions.append(
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
events_subquery = []
for t in sub_events:
sub_events_conditions = ["e.timestamp >= %(startTimestamp)s",
"e.timestamp < %(endTimestamp)s"] + exclusions.get(t["eventType"], [])
events_subquery.append(f"""\
SELECT session_id, {t["column"]} AS e_value, timestamp, '{t["eventType"]}' AS event_type
FROM {t["table"]} AS e
INNER JOIN sub_sessions USING (session_id)
WHERE {" AND ".join(sub_events_conditions)}""")
events_subquery = "\n UNION ALL \n".join(events_subquery)
if reverse:
path_direction = "DESC"
else:
path_direction = ""
if len(start_points_conditions) == 0:
start_points_from = """(SELECT event_type, e_value
FROM pre_ranked_events
WHERE event_number_in_session = 1
GROUP BY event_type, e_value
ORDER BY count(1) DESC
LIMIT 1) AS top_start_events
INNER JOIN pre_ranked_events
USING (event_type, e_value)"""
else:
start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")"]
start_points_conditions.append("event_number_in_session = 1")
steps_query = ["""n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
AVG(time_from_previous) AS avg_time_from_previous,
COUNT(1) AS sessions_count
FROM ranked_events INNER JOIN start_points USING (session_id)
WHERE event_number_in_session = 1
AND next_value IS NOT NULL
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC
LIMIT %(eventThresholdNumberInGroup)s)"""]
projection_query = ["""(SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count,
avg_time_from_previous
FROM n1)"""]
for i in range(2, data.density):
steps_query.append(f"""n{i} AS (SELECT *
FROM (SELECT re.event_number_in_session,
re.event_type,
re.e_value,
re.next_type,
re.next_value,
AVG(re.time_from_previous) AS avg_time_from_previous,
COUNT(1) AS sessions_count
FROM ranked_events AS re
INNER JOIN n{i - 1} ON (n{i - 1}.next_value = re.e_value)
WHERE re.event_number_in_session = {i}
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value) AS sub_level
ORDER BY sessions_count DESC
LIMIT %(eventThresholdNumberInGroup)s)""")
projection_query.append(f"""(SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count,
avg_time_from_previous
FROM n{i})""")
with pg_client.PostgresClient() as cur:
pg_query = f"""\
WITH sub_sessions AS (SELECT session_id
FROM public.sessions
WHERE {" AND ".join(sessions_conditions)}),
sub_events AS ({events_subquery}),
pre_ranked_events AS (SELECT *
FROM (SELECT session_id,
event_type,
e_value,
timestamp,
row_number() OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS event_number_in_session
FROM sub_events
ORDER BY session_id) AS full_ranked_events
WHERE event_number_in_session < %(density)s),
start_points AS (SELECT session_id
FROM {start_points_from}
WHERE {" AND ".join(start_points_conditions)}),
ranked_events AS (SELECT *,
LEAD(e_value, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_value,
LEAD(event_type, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_type,
abs(LAG(timestamp, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) -
timestamp) AS time_from_previous
FROM pre_ranked_events INNER JOIN start_points USING (session_id)),
{",".join(steps_query)}
{"UNION ALL".join(projection_query)};"""
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
**extra_values}
query = cur.mogrify(pg_query, params)
_now = time()
cur.execute(query)
if True or time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
logger.info("----------------------")
logger.info(query)
logger.info("----------------------")
rows = cur.fetchall()
return __transform_journey(rows=rows, reverse_path=reverse)
# query: Q5, the result is correct,
# startPoints are computed before ranked_events to reduce the number of window functions over rows
# replaced time_to_target by time_from_previous
@ -718,10 +391,10 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
cur.execute(query)
if time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
logger.info("----------------------")
logger.info(query)
logger.info("----------------------")
logger.warning(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
logger.warning("----------------------")
logger.warning(query)
logger.warning("----------------------")
rows = cur.fetchall()
return __transform_journey(rows=rows, reverse_path=reverse)

View file

@ -1029,7 +1029,7 @@ class __CardSchema(CardSessionsSchema):
@computed_field
@property
def is_template(self) -> bool:
def is_predefined(self) -> bool:
return self.metric_type in [MetricType.errors, MetricType.performance,
MetricType.resources, MetricType.web_vital]

4
ee/api/.gitignore vendored
View file

@ -188,6 +188,7 @@ Pipfile.lock
/chalicelib/core/collaboration_msteams.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/countries.py
/chalicelib/core/custom_metrics_predefined.py
/chalicelib/core/dashboards.py
#exp /chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
@ -250,6 +251,7 @@ Pipfile.lock
/routers/subs/__init__.py
/routers/__init__.py
/chalicelib/core/assist.py
/chalicelib/core/assist_stats.py
/auth/__init__.py
/auth/auth_apikey.py
/build.sh
@ -272,4 +274,4 @@ Pipfile.lock
#exp /chalicelib/core/dashboards.py
/schemas/overrides.py
/schemas/schemas.py
/chalicelib/core/authorizers.py
/chalicelib/core/authorizers.py

View file

@ -1,13 +1,12 @@
import json
import logging
from typing import Union
from decouple import config
from fastapi import HTTPException, status
import schemas
from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite, \
product_analytics
from chalicelib.core import funnels, issues, click_maps, sessions_insights, sessions_mobs, sessions_favorite, \
product_analytics, custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient, extra
@ -97,10 +96,6 @@ def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_click_map(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True):
if len(data.series) == 0:
return None
@ -192,8 +187,10 @@ def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
if data.is_predefined:
return custom_metrics_predefined.get_metric(key=data.metric_of,
project_id=project_id,
data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
@ -341,7 +338,7 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
if data.is_template:
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.issues:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
@ -359,14 +356,15 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
r = {"start_point": [s.model_dump() for s in data.start_point],
"start_type": data.start_type,
"exclude": [e.model_dump() for e in data.excludes]}
"excludes": [e.model_dump() for e in data.excludes],
"hideExcess": data.hide_excess}
return r
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if __is_click_map(data):
if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
@ -456,6 +454,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
if i not in u_series_ids:
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
with pg_client.PostgresClient() as cur:
sub_queries = []
@ -488,7 +489,8 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now()),
default_config = %(config)s,
thumbnail = %(thumbnail)s
thumbnail = %(thumbnail)s,
card_info = %(card_info)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
@ -600,9 +602,10 @@ def delete_card(project_id, metric_id, user_id):
def __get_path_analysis_attributes(row):
card_info = row.pop("cardInfo")
row["exclude"] = card_info.get("exclude", [])
row["excludes"] = card_info.get("excludes", [])
row["startPoint"] = card_info.get("startPoint", [])
row["startType"] = card_info.get("startType", "start")
row["hideExcess"] = card_info.get("hideExcess", False)
return row
@ -737,9 +740,11 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
raw_metric["startTimestamp"] = data.startTimestamp
raw_metric["endTimestamp"] = data.endTimestamp
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump())
elif __is_click_map(metric):
if metric.is_predefined:
return custom_metrics_predefined.get_metric(key=metric.metric_of,
project_id=project_id,
data=data.model_dump())
elif metric.metric_type == schemas.MetricType.click_map:
if raw_metric["data"]:
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])
@ -756,54 +761,3 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
return raw_metric["data"]
return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric)
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -64,15 +64,17 @@ JOURNEY_TYPES = {
}
# query: Q4, the result is correct,
# query: Q5, the result is correct,
# startPoints are computed before ranked_events to reduce the number of window functions over rows
# replaced time_to_target by time_from_previous
# compute avg_time_from_previous at the same level as sessions_count
# sort by top 5 according to sessions_count at the CTE level
# final part project data without grouping
# if start-point is selected, the selected event is ranked n°1
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sub_events = []
start_points_conditions = []
start_point_conditions = []
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
@ -89,24 +91,33 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
','.join([f"event_type='{s['eventType']}',{s['column']}" for s in sub_events[:-1]]),
sub_events[-1]["column"])
extra_values = {}
start_join = []
initial_event_cte = ""
reverse = data.start_type == "end"
for i, sf in enumerate(data.start_point):
f_k = f"start_point_{i}"
op = sh.get_sql_operator(sf.operator)
is_not = sh.is_negation_operator(sf.operator)
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{JOURNEY_TYPES[sf.type]['eventType']}' AND " +
event_column = JOURNEY_TYPES[sf.type]['column']
event_type = JOURNEY_TYPES[sf.type]['eventType']
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k),
f"start_event_type_{i}": event_type}
start_points_conditions.append(f"(event_type=%(start_event_type_{i})s AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
start_point_conditions.append(f"(event_type=%(start_event_type_{i})s AND " +
sh.multi_conditions(f'{event_column} {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
exclusions = {}
for i, sf in enumerate(data.excludes):
if sf.type in data.metric_value:
for i, ef in enumerate(data.excludes):
if ef.type in data.metric_value:
f_k = f"exclude_{i}"
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
exclusions[sf.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[sf.type]["column"]} != %({f_k})s', sf.value, is_not=True,
extra_values = {**extra_values, **sh.multi_values(ef.value, value_key=f_k)}
exclusions[ef.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[ef.type]["column"]} != %({f_k})s', ef.value, is_not=True,
value_key=f_k)]
sessions_conditions = []
@ -275,6 +286,11 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
if reverse:
path_direction = "DESC"
else:
path_direction = ""
# ch_sub_query = __get_basic_constraints(table_name="experimental.events", data=data.model_dump())
ch_sub_query = __get_basic_constraints(table_name="events")
selected_event_type_sub_query = []
@ -287,7 +303,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
main_table = exp_ch_helper.get_main_events_table(data.startTimestamp)
if len(sessions_conditions) > 0:
sessions_conditions.append(f"sessions.project_id = %(project_id)s")
sessions_conditions.append(f"sessions.project_id = toUInt16(%(project_id)s)")
sessions_conditions.append(f"sessions.datetime >= toDateTime(%(startTimestamp)s / 1000)")
sessions_conditions.append(f"sessions.datetime < toDateTime(%(endTimestamp)s / 1000)")
sessions_conditions.append("sessions.events_count>1")
@ -311,14 +327,21 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
else:
start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")",
"event_number_in_session = 1"]
start_point_conditions = ["(" + " OR ".join(start_point_conditions) + ")",
"events.project_id = toUInt16(%(project_id)s)",
"events.datetime >= toDateTime(%(startTimestamp)s / 1000)",
"events.datetime < toDateTime(%(endTimestamp)s / 1000)"]
start_points_subquery = f"""SELECT DISTINCT session_id
FROM pre_ranked_events
WHERE {" AND ".join(start_points_conditions)}"""
initial_event_cte = f"""\
initial_event AS (SELECT session_id, MIN(datetime) AS start_event_timestamp
FROM {main_table}
WHERE {" AND ".join(start_point_conditions)}
GROUP BY session_id),"""
ch_sub_query.append("events.datetime>=initial_event.start_event_timestamp")
main_table += " INNER JOIN initial_event USING (session_id)"
del start_points_conditions
if reverse:
path_direction = "DESC"
else:
path_direction = ""
steps_query = ["""n1 AS (SELECT event_number_in_session,
event_type,
@ -370,12 +393,13 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
_now = time()
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 6 if data.hide_excess else 8,
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
**extra_values}
ch_query1 = f"""\
CREATE TEMPORARY TABLE pre_ranked_events_{time_key} AS
WITH pre_ranked_events AS (SELECT *
WITH {initial_event_cte}
pre_ranked_events AS (SELECT *
FROM (SELECT session_id,
event_type,
datetime,
@ -390,11 +414,11 @@ WITH pre_ranked_events AS (SELECT *
SELECT *
FROM pre_ranked_events;"""
ch.execute(query=ch_query1, params=params)
if time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.info("---------Q1-----------")
logger.info(ch.format(ch_query1, params))
logger.info("----------------------")
if True or time() - _now > 2:
logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.warning("---------Q1-----------")
logger.warning(ch.format(ch_query1, params))
logger.warning("----------------------")
_now = time()
ch_query2 = f"""\
@ -417,11 +441,11 @@ WITH pre_ranked_events AS (SELECT *
SELECT *
FROM ranked_events;"""
ch.execute(query=ch_query2, params=params)
if time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.info("---------Q2-----------")
logger.info(ch.format(ch_query2, params))
logger.info("----------------------")
if True or time() - _now > 2:
logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.warning("---------Q2-----------")
logger.warning(ch.format(ch_query2, params))
logger.warning("----------------------")
_now = time()
ch_query3 = f"""\
@ -433,11 +457,11 @@ FROM ({" UNION ALL ".join(projection_query)}) AS chart_steps
ORDER BY event_number_in_session;"""
rows = ch.execute(query=ch_query3, params=params)
if time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.info("---------Q3-----------")
logger.info(ch.format(ch_query3, params))
logger.info("----------------------")
if True or time() - _now > 2:
logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
logger.warning("---------Q3-----------")
logger.warning(ch.format(ch_query3, params))
logger.warning("----------------------")
return __transform_journey(rows=rows, reverse_path=reverse)

View file

@ -10,6 +10,7 @@ rm -rf ./chalicelib/core/collaboration_base.py
rm -rf ./chalicelib/core/collaboration_msteams.py
rm -rf ./chalicelib/core/collaboration_slack.py
rm -rf ./chalicelib/core/countries.py
rm -rf ./chalicelib/core/custom_metrics_predefined.py
rm -rf ./chalicelib/core/feature_flags.py
#exp rm -rf ./chalicelib/core/errors.py
rm -rf ./chalicelib/core/errors_favorite.py
@ -72,6 +73,7 @@ rm -rf ./crons/__init__.py
rm -rf ./routers/subs/__init__.py
rm -rf ./routers/__init__.py
rm -rf ./chalicelib/core/assist.py
rm -rf ./chalicelib/core/assist_stats.py
rm -rf ./auth/__init__.py
rm -rf ./auth/auth_apikey.py
rm -rf ./build.sh