* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* fix(chalice): support low-density for cards
This commit is contained in:
Kraiem Taha Yassine 2024-12-26 13:23:19 +01:00 committed by GitHub
parent fe7e200dba
commit aad8542d97
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 41 additions and 44 deletions

View file

@ -1,12 +1,12 @@
import json
from typing import Optional, List
import schemas
from chalicelib.core.sourcemaps import sourcemaps
from chalicelib.core.sessions import sessions_search
from chalicelib.core.sourcemaps import sourcemaps
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from typing import Optional, List, Union, Literal
from chalicelib.utils.metrics_helper import get_step_size
def get(error_id, family=False) -> dict | List[dict]:
@ -113,7 +113,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
step_size = get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)

View file

@ -3,7 +3,7 @@ from chalicelib.core import metadata
from chalicelib.core import sessions
from chalicelib.core.metrics import metrics
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils import helper, metrics_helper
from chalicelib.utils.TimeUTC import TimeUTC
from . import errors as errors_legacy
@ -81,11 +81,6 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
return ch_sub_query
def __get_step_size(startTimestamp, endTimestamp, density):
step_size = (int(endTimestamp) // 1000 - int(startTimestamp) // 1000) // (int(density) - 1)
return step_size
def __get_sort_key(key):
return {
schemas.ErrorSort.OCCURRENCE: "max_datetime",
@ -302,7 +297,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
value_key=f_k))
with ch_client.ClickHouseClient() as ch:
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density)
step_size = metrics_helper.get_step_size(data.startTimestamp, data.endTimestamp, data.density)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)

View file

@ -2,7 +2,7 @@ from chalicelib.core.errors import errors_legacy as errors
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from chalicelib.utils.metrics_helper import get_step_size
def __flatten_sort_key_count_version(data, merge_nested=False):
@ -67,9 +67,9 @@ def get_details(project_id, error_id, user_id, **data):
data["startDate30"] = TimeUTC.now(-30)
data["endDate30"] = TimeUTC.now()
density24 = int(data.get("density24", 24))
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
step_size24 = get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
density30 = int(data.get("density30", 30))
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
step_size30 = get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
params = {
"startDate24": data['startDate24'],
"endDate24": data['endDate24'],

View file

@ -5,7 +5,7 @@ from chalicelib.core import metadata
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from chalicelib.utils.metrics_helper import get_step_size
logger = logging.getLogger(__name__)
@ -89,7 +89,7 @@ def __get_meta_constraint(project_id, data):
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
@ -146,7 +146,7 @@ def __merge_rows_with_neutral(rows, neutral):
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="requests", time_column="timestamp", project=False,
@ -244,7 +244,7 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args)
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
@ -307,7 +307,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
@ -388,7 +388,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
@ -475,7 +475,7 @@ def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTi
def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
@ -506,7 +506,7 @@ def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp,
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@ -550,7 +550,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)

View file

@ -6,7 +6,7 @@ from chalicelib.utils import ch_client
from chalicelib.utils import exp_ch_helper
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from chalicelib.utils.metrics_helper import get_step_size
logger = logging.getLogger(__name__)
@ -45,7 +45,7 @@ def __add_missing_keys(original, complete):
def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_key="timestamp", time_coefficient=1000):
if len(rows) == density:
return rows
step = __get_step_size(start_time, end_time, density, decimal=True)
step = get_step_size(start_time, end_time, density, decimal=True)
optimal = [(int(i * time_coefficient), int((i + step) * time_coefficient)) for i in
__frange(start_time // time_coefficient, end_time // time_coefficient, step)]
result = []
@ -150,7 +150,7 @@ def __get_generic_constraint(data, table_name):
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
@ -211,7 +211,7 @@ def __merge_rows_with_neutral(rows, neutral):
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
ch_sub_query.append("requests.event_type='REQUEST'")
ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s")
@ -306,7 +306,7 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query_chart = __get_basic_constraints(table_name="events", round_start=True,
data=args)
ch_sub_query_chart.append("(events.event_type = 'REQUEST' OR events.event_type = 'ERROR')")
@ -339,7 +339,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args)
ch_sub_query_chart.append("errors.event_type='ERROR'")
ch_sub_query_chart.append("errors.source == 'js_exception'")
@ -379,7 +379,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
ch_sub_query.append("requests.event_type='REQUEST'")
ch_sub_query.append("requests.success = 0")
@ -470,7 +470,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim
def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
ch_sub_query_chart.append("pages.event_type='LOCATION'")
meta_condition = __get_meta_constraint(args)
@ -497,7 +497,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
ch_sub_query_chart.append("pages.event_type='LOCATION'")
meta_condition = __get_meta_constraint(args)
@ -538,7 +538,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
step_size = get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)

View file

@ -14,8 +14,8 @@ logger = logging.getLogger(__name__)
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_type: schemas.MetricType, metric_of: schemas.MetricOfTimeseries | schemas.MetricOfTable,
metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
extra_event = None
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = "events.pages"
@ -133,8 +133,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_of: schemas.MetricOfTable, metric_value: List,
metric_format: Union[schemas.MetricExtendedFormatType, schemas.MetricExtendedFormatType]):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
extra_event = None
extra_conditions = None
if metric_of == schemas.MetricOfTable.VISITED_URL:

View file

@ -14,8 +14,8 @@ logger = logging.getLogger(__name__)
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_type: schemas.MetricType, metric_of: schemas.MetricOfTimeseries | schemas.MetricOfTable,
metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density))
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density))
extra_event = None
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
@ -128,8 +128,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
metric_of: schemas.MetricOfTable, metric_value: List,
metric_format: Union[schemas.MetricExtendedFormatType, schemas.MetricExtendedFormatType]):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density))
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density))
extra_event = None
extra_deduplication = []
extra_conditions = None

View file

@ -1,5 +1,7 @@
def __get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
def get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
step_size = (endTimestamp // factor - startTimestamp // factor)
if density <= 1:
return step_size
if decimal:
return step_size / density
return step_size // (density - 1)

View file

@ -105,9 +105,9 @@ def get_details(project_id, error_id, user_id, **data):
data["endDate30"] = TimeUTC.now()
density24 = int(data.get("density24", 24))
step_size24 = errors.__get_step_size(data["startDate24"], data["endDate24"], density24)
step_size24 = errors.get_step_size(data["startDate24"], data["endDate24"], density24)
density30 = int(data.get("density30", 30))
step_size30 = errors.__get_step_size(data["startDate30"], data["endDate30"], density30)
step_size30 = errors.get_step_size(data["startDate30"], data["endDate30"], density30)
params = {
"startDate24": data['startDate24'],
"endDate24": data['endDate24'],