Merge branch 'dev' into live-se-red
This commit is contained in:
commit
15fe4e5994
16 changed files with 89 additions and 75 deletions
|
|
@ -1,12 +1,12 @@
|
|||
import json
|
||||
from typing import Optional, List
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.sourcemaps import sourcemaps
|
||||
from chalicelib.core.sessions import sessions_search
|
||||
from chalicelib.core.sourcemaps import sourcemaps
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
from typing import Optional, List, Union, Literal
|
||||
from chalicelib.utils.metrics_helper import get_step_size
|
||||
|
||||
|
||||
def get(error_id, family=False) -> dict | List[dict]:
|
||||
|
|
@ -113,7 +113,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
|||
return empty_response
|
||||
error_ids = [e["errorId"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
|
||||
step_size = get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from chalicelib.core import metadata
|
|||
from chalicelib.core import sessions
|
||||
from chalicelib.core.metrics import metrics
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import helper, metrics_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import errors as errors_legacy
|
||||
|
||||
|
|
@ -81,11 +81,6 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
|
|||
return ch_sub_query
|
||||
|
||||
|
||||
def __get_step_size(startTimestamp, endTimestamp, density):
|
||||
step_size = (int(endTimestamp) // 1000 - int(startTimestamp) // 1000) // (int(density) - 1)
|
||||
return step_size
|
||||
|
||||
|
||||
def __get_sort_key(key):
|
||||
return {
|
||||
schemas.ErrorSort.OCCURRENCE: "max_datetime",
|
||||
|
|
@ -302,7 +297,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
|||
value_key=f_k))
|
||||
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density)
|
||||
step_size = metrics_helper.get_step_size(data.startTimestamp, data.endTimestamp, data.density)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from chalicelib.core.errors import errors_legacy as errors
|
|||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
from chalicelib.utils.metrics_helper import get_step_size
|
||||
|
||||
|
||||
def __flatten_sort_key_count_version(data, merge_nested=False):
|
||||
|
|
@ -67,9 +67,9 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||
step_size24 = get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||
step_size30 = get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
|
|
|
|||
|
|
@ -208,11 +208,15 @@ def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.Card
|
|||
return supported.get(data.metric_type, not_supported)()
|
||||
|
||||
|
||||
def __get_global_card_info(data: schemas.CardSchema):
|
||||
r = {"hideExcess": data.hide_excess, "compareTo": data.compare_to}
|
||||
return r
|
||||
|
||||
|
||||
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
|
||||
r = {"start_point": [s.model_dump() for s in data.start_point],
|
||||
"start_type": data.start_type,
|
||||
"excludes": [e.model_dump() for e in data.excludes],
|
||||
"hideExcess": data.hide_excess}
|
||||
"excludes": [e.model_dump() for e in data.excludes]}
|
||||
return r
|
||||
|
||||
|
||||
|
|
@ -237,8 +241,10 @@ def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSche
|
|||
series_len = len(data.series)
|
||||
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
|
||||
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
|
||||
params["card_info"] = __get_global_card_info(data=data)
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
params["card_info"] = {**params["card_info"], **__get_path_analysis_card_info(data=data)}
|
||||
params["card_info"] = json.dumps(params["card_info"])
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
|
|
@ -298,16 +304,18 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
|
|||
if i not in u_series_ids:
|
||||
d_series_ids.append(i)
|
||||
params["d_series_ids"] = tuple(d_series_ids)
|
||||
params["card_info"] = None
|
||||
params["session_data"] = json.dumps(metric["data"])
|
||||
params["card_info"] = __get_global_card_info(data=data)
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
params["card_info"] = {**params["card_info"], **__get_path_analysis_card_info(data=data)}
|
||||
elif data.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if data.session_id is not None:
|
||||
params["session_data"] = json.dumps({"sessionId": data.session_id})
|
||||
elif metric.get("data") and metric["data"].get("sessionId"):
|
||||
params["session_data"] = json.dumps({"sessionId": metric["data"]["sessionId"]})
|
||||
|
||||
params["card_info"] = json.dumps(params["card_info"])
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
if len(n_series) > 0:
|
||||
|
|
@ -442,8 +450,16 @@ def delete_card(project_id, metric_id, user_id):
|
|||
return {"state": "success"}
|
||||
|
||||
|
||||
def __get_global_attributes(row):
|
||||
if row is None or row.get("cardInfo") is None:
|
||||
return row
|
||||
card_info = row.get("cardInfo", {})
|
||||
row["compareTo"] = card_info.get("compareTo", [])
|
||||
return row
|
||||
|
||||
|
||||
def __get_path_analysis_attributes(row):
|
||||
card_info = row.pop("cardInfo")
|
||||
card_info = row.get("cardInfo", {})
|
||||
row["excludes"] = card_info.get("excludes", [])
|
||||
row["startPoint"] = card_info.get("startPoint", [])
|
||||
row["startType"] = card_info.get("startType", "start")
|
||||
|
|
@ -496,6 +512,8 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
|
|||
row = helper.dict_to_camel_case(row)
|
||||
if row["metricType"] == schemas.MetricType.PATH_ANALYSIS:
|
||||
row = __get_path_analysis_attributes(row=row)
|
||||
row = __get_global_attributes(row=row)
|
||||
row.pop("cardInfo")
|
||||
return row
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from chalicelib.core import metadata
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
from chalicelib.utils.metrics_helper import get_step_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -89,7 +89,7 @@ def __get_meta_constraint(project_id, data):
|
|||
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(),
|
||||
density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
|
||||
chart=True, data=args)
|
||||
|
|
@ -146,7 +146,7 @@ def __merge_rows_with_neutral(rows, neutral):
|
|||
|
||||
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
|
||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
|
||||
data=args, main_table="requests", time_column="timestamp", project=False,
|
||||
|
|
@ -244,7 +244,7 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
|
|||
|
||||
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
|
||||
platform=None, density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
|
||||
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
|
||||
|
|
@ -307,7 +307,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
|
|||
|
||||
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
|
||||
chart=True, data=args)
|
||||
|
|
@ -388,7 +388,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
|
|||
|
||||
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
|
||||
chart=False, data=args)
|
||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
|
||||
|
|
@ -475,7 +475,7 @@ def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTi
|
|||
|
||||
|
||||
def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args):
|
||||
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
|
||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
|
||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp}
|
||||
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
|
||||
|
|
@ -506,7 +506,7 @@ def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp,
|
|||
|
||||
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
|
||||
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
|
||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
|
||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp}
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
|
|
@ -550,7 +550,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
|
|||
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(),
|
||||
density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
|
||||
chart=True, data=args)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.utils import ch_client
|
|||
from chalicelib.utils import exp_ch_helper
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
from chalicelib.utils.metrics_helper import get_step_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -45,7 +45,7 @@ def __add_missing_keys(original, complete):
|
|||
def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_key="timestamp", time_coefficient=1000):
|
||||
if len(rows) == density:
|
||||
return rows
|
||||
step = __get_step_size(start_time, end_time, density, decimal=True)
|
||||
step = get_step_size(start_time, end_time, density, decimal=True)
|
||||
optimal = [(int(i * time_coefficient), int((i + step) * time_coefficient)) for i in
|
||||
__frange(start_time // time_coefficient, end_time // time_coefficient, step)]
|
||||
result = []
|
||||
|
|
@ -150,7 +150,7 @@ def __get_generic_constraint(data, table_name):
|
|||
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(),
|
||||
density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
|
||||
meta_condition = __get_meta_constraint(args)
|
||||
|
|
@ -211,7 +211,7 @@ def __merge_rows_with_neutral(rows, neutral):
|
|||
|
||||
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=6, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
|
||||
ch_sub_query.append("requests.event_type='REQUEST'")
|
||||
ch_sub_query.append("intDiv(requests.status, 100) == %(status_code)s")
|
||||
|
|
@ -306,7 +306,7 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
|
|||
|
||||
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
|
||||
platform=None, density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="events", round_start=True,
|
||||
data=args)
|
||||
ch_sub_query_chart.append("(events.event_type = 'REQUEST' OR events.event_type = 'ERROR')")
|
||||
|
|
@ -339,7 +339,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
|
|||
|
||||
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="errors", round_start=True, data=args)
|
||||
ch_sub_query_chart.append("errors.event_type='ERROR'")
|
||||
ch_sub_query_chart.append("errors.source == 'js_exception'")
|
||||
|
|
@ -379,7 +379,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
|
|||
|
||||
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query = __get_basic_constraints(table_name="requests", round_start=True, data=args)
|
||||
ch_sub_query.append("requests.event_type='REQUEST'")
|
||||
ch_sub_query.append("requests.success = 0")
|
||||
|
|
@ -470,7 +470,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim
|
|||
|
||||
|
||||
def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args):
|
||||
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
|
||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
|
||||
ch_sub_query_chart.append("pages.event_type='LOCATION'")
|
||||
meta_condition = __get_meta_constraint(args)
|
||||
|
|
@ -497,7 +497,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
|
|||
|
||||
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
|
||||
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
|
||||
step_size = get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
|
||||
ch_sub_query_chart.append("pages.event_type='LOCATION'")
|
||||
meta_condition = __get_meta_constraint(args)
|
||||
|
|
@ -538,7 +538,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
|
|||
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(),
|
||||
density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density)
|
||||
step_size = get_step_size(startTimestamp, endTimestamp, density)
|
||||
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
|
||||
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
|
||||
meta_condition = __get_meta_constraint(args)
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ logger = logging.getLogger(__name__)
|
|||
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
metric_type: schemas.MetricType, metric_of: schemas.MetricOfTimeseries | schemas.MetricOfTable,
|
||||
metric_value: List):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
extra_event = None
|
||||
if metric_of == schemas.MetricOfTable.VISITED_URL:
|
||||
extra_event = "events.pages"
|
||||
|
|
@ -133,8 +133,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
metric_of: schemas.MetricOfTable, metric_value: List,
|
||||
metric_format: Union[schemas.MetricExtendedFormatType, schemas.MetricExtendedFormatType]):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
extra_event = None
|
||||
extra_conditions = None
|
||||
if metric_of == schemas.MetricOfTable.VISITED_URL:
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ logger = logging.getLogger(__name__)
|
|||
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
metric_type: schemas.MetricType, metric_of: schemas.MetricOfTimeseries | schemas.MetricOfTable,
|
||||
metric_value: List):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density))
|
||||
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density))
|
||||
extra_event = None
|
||||
if metric_of == schemas.MetricOfTable.VISITED_URL:
|
||||
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
|
||||
|
|
@ -128,8 +128,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
metric_of: schemas.MetricOfTable, metric_value: List,
|
||||
metric_format: Union[schemas.MetricExtendedFormatType, schemas.MetricExtendedFormatType]):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density))
|
||||
step_size = int(metrics_helper.get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density))
|
||||
extra_event = None
|
||||
extra_deduplication = []
|
||||
extra_conditions = None
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
def __get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
|
||||
def get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
|
||||
step_size = (endTimestamp // factor - startTimestamp // factor)
|
||||
if density <= 1:
|
||||
return step_size
|
||||
if decimal:
|
||||
return step_size / density
|
||||
return step_size // (density - 1)
|
||||
|
|
|
|||
|
|
@ -933,6 +933,8 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
# events: List[SessionSearchEventSchema2] = Field(default=[], doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default=[])
|
||||
|
||||
compare_to: Optional[List[str]] = Field(default=None)
|
||||
|
||||
# Used mainly for PathAnalysis, and could be used by other cards
|
||||
hide_excess: Optional[bool] = Field(default=False, description="Hide extra values")
|
||||
|
||||
|
|
@ -1237,8 +1239,7 @@ class CardPathAnalysis(__CardSchema):
|
|||
# Union of cards-schemas that doesn't change between FOSS and EE
|
||||
__cards_union_base = Union[
|
||||
CardTimeSeries, CardTable, CardFunnel,
|
||||
CardErrors,
|
||||
CardWebVital, CardHeatMap,
|
||||
CardErrors, CardWebVital, CardHeatMap,
|
||||
CardPathAnalysis]
|
||||
CardSchema = ORUnion(__cards_union_base, discriminator='metric_type')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package analytics
|
||||
|
||||
import (
|
||||
"github.com/go-playground/validator/v10"
|
||||
"openreplay/backend/pkg/analytics/charts"
|
||||
"time"
|
||||
|
||||
|
|
@ -31,11 +32,12 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
reqValidator := validator.New()
|
||||
cardsService, err := cards.New(log, pgconn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cardsHandlers, err := cards.NewHandlers(log, cfg, responser, cardsService)
|
||||
cardsHandlers, err := cards.NewHandlers(log, cfg, responser, cardsService, reqValidator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -43,7 +45,7 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dashboardsHandlers, err := dashboards.NewHandlers(log, cfg, responser, dashboardsService)
|
||||
dashboardsHandlers, err := dashboards.NewHandlers(log, cfg, responser, dashboardsService, reqValidator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -51,7 +53,7 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
chartsHandlers, err := charts.NewHandlers(log, cfg, responser, chartsService)
|
||||
chartsHandlers, err := charts.NewHandlers(log, cfg, responser, chartsService, reqValidator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ type handlersImpl struct {
|
|||
responser *api.Responser
|
||||
jsonSizeLimit int64
|
||||
cards Cards
|
||||
validator *validator.Validate
|
||||
}
|
||||
|
||||
func (e *handlersImpl) GetAll() []*api.Description {
|
||||
|
|
@ -48,12 +49,13 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
}
|
||||
}
|
||||
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, cards Cards) (api.Handlers, error) {
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, cards Cards, validator *validator.Validate) (api.Handlers, error) {
|
||||
return &handlersImpl{
|
||||
log: log,
|
||||
responser: responser,
|
||||
jsonSizeLimit: cfg.JsonSizeLimit,
|
||||
cards: cards,
|
||||
validator: validator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -74,9 +76,7 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
if err = e.validator.Struct(req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
|
@ -256,9 +256,7 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
if err = e.validator.Struct(req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ type handlersImpl struct {
|
|||
responser *api.Responser
|
||||
jsonSizeLimit int64
|
||||
charts Charts
|
||||
validator *validator.Validate
|
||||
}
|
||||
|
||||
func (e *handlersImpl) GetAll() []*api.Description {
|
||||
|
|
@ -45,12 +46,13 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
}
|
||||
}
|
||||
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, charts Charts) (api.Handlers, error) {
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, charts Charts, validator *validator.Validate) (api.Handlers, error) {
|
||||
return &handlersImpl{
|
||||
log: log,
|
||||
responser: responser,
|
||||
jsonSizeLimit: cfg.JsonSizeLimit,
|
||||
charts: charts,
|
||||
validator: validator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -77,9 +79,7 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request)
|
|||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
if err = e.validator.Struct(req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ type handlersImpl struct {
|
|||
responser *api.Responser
|
||||
jsonSizeLimit int64
|
||||
dashboards Dashboards
|
||||
validator *validator.Validate
|
||||
}
|
||||
|
||||
func (e *handlersImpl) GetAll() []*api.Description {
|
||||
|
|
@ -50,12 +51,13 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
}
|
||||
}
|
||||
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, dashboards Dashboards) (api.Handlers, error) {
|
||||
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, dashboards Dashboards, validator *validator.Validate) (api.Handlers, error) {
|
||||
return &handlersImpl{
|
||||
log: log,
|
||||
responser: responser,
|
||||
jsonSizeLimit: cfg.JsonSizeLimit,
|
||||
dashboards: dashboards,
|
||||
validator: validator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -76,9 +78,7 @@ func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
if err = e.validator.Struct(req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
|
@ -279,9 +279,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request
|
|||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
if err = e.validator.Struct(req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,9 +105,9 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
data["endDate30"] = TimeUTC.now()
|
||||
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = errors.__get_step_size(data["startDate24"], data["endDate24"], density24)
|
||||
step_size24 = errors.get_step_size(data["startDate24"], data["endDate24"], density24)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = errors.__get_step_size(data["startDate30"], data["endDate30"], density30)
|
||||
step_size30 = errors.get_step_size(data["startDate30"], data["endDate30"], density30)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
|
|
|
|||
|
|
@ -33,8 +33,8 @@ function AssistSessionsModal(props: ConnectProps) {
|
|||
|
||||
const sortOptions = metaList
|
||||
.map((i: any) => ({
|
||||
label: capitalize(i),
|
||||
value: i
|
||||
label: capitalize(i.key),
|
||||
value: i.key
|
||||
}));
|
||||
|
||||
React.useEffect(() => {
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue