feat(chalice): solved exp circular import
feat(chalice): fixed col-name typo for exp_search feat(chalice): filter insights
This commit is contained in:
parent
df61a405a5
commit
c29423f0b6
10 changed files with 113 additions and 111 deletions
|
|
@ -2,6 +2,7 @@ import json
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
|
@ -277,7 +278,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
status = cur.fetchone()
|
||||
|
||||
if status is not None:
|
||||
row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
|
||||
row["status"] = status.pop("status")
|
||||
row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
|
|
@ -721,19 +722,6 @@ def __status_rank(status):
|
|||
}.get(status)
|
||||
|
||||
|
||||
def format_first_stack_frame(error):
|
||||
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
|
||||
for s in error["stack"]:
|
||||
for c in s.get("context", []):
|
||||
for sci, sc in enumerate(c):
|
||||
if isinstance(sc, str) and len(sc) > 1000:
|
||||
c[sci] = sc[:1000]
|
||||
# convert bytes to string:
|
||||
if isinstance(s["filename"], bytes):
|
||||
s["filename"] = s["filename"].decode("utf-8")
|
||||
return error
|
||||
|
||||
|
||||
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
|
|
|
|||
|
|
@ -2,8 +2,9 @@ from typing import List
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_favorite, \
|
||||
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
|
||||
sessions_devtool, sessions_notes
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -91,7 +92,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
|
|||
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
||||
# to keep only the first stack
|
||||
# limit the number of errors to reduce the response-body size
|
||||
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
|
||||
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
||||
if e['source'] == "js_exception"][:500]
|
||||
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
||||
session_id=session_id)
|
||||
|
|
@ -360,7 +361,8 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
|
|||
|
||||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
|
||||
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, extra_event=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
|
|||
|
|
@ -24,12 +24,6 @@ class UserLoginSchema(_Grecaptcha):
|
|||
password: str = Field(...)
|
||||
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
|
||||
|
||||
@root_validator(pre=True)
|
||||
def log_payload(cls, values):
|
||||
print("login received:")
|
||||
print(values)
|
||||
return values
|
||||
|
||||
|
||||
class UserSignupSchema(UserLoginSchema):
|
||||
fullname: str = Field(...)
|
||||
|
|
|
|||
1
ee/api/.gitignore
vendored
1
ee/api/.gitignore
vendored
|
|
@ -230,6 +230,7 @@ Pipfile.lock
|
|||
/chalicelib/utils/dev.py
|
||||
/chalicelib/utils/email_handler.py
|
||||
/chalicelib/utils/email_helper.py
|
||||
/chalicelib/utils/errors_helper.py
|
||||
/chalicelib/utils/event_filter_definition.py
|
||||
/chalicelib/utils/github_client_v3.py
|
||||
/chalicelib/utils/helper.py
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import json
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
|
|
@ -90,13 +91,14 @@ def __process_tags(row):
|
|||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30",project_key="sessions.project_id")
|
||||
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
|
||||
startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="sessions.project_id")
|
||||
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_session.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30",project_key="errors.project_id")
|
||||
endTime_arg_name="endDate30", project_key="errors.project_id")
|
||||
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
|
||||
|
|
@ -283,7 +285,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
status = cur.fetchone()
|
||||
|
||||
if status is not None:
|
||||
row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
|
||||
row["status"] = status.pop("status")
|
||||
row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
|
|
@ -727,19 +729,6 @@ def __status_rank(status):
|
|||
}.get(status)
|
||||
|
||||
|
||||
def format_first_stack_frame(error):
|
||||
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
|
||||
for s in error["stack"]:
|
||||
for c in s.get("context", []):
|
||||
for sci, sc in enumerate(c):
|
||||
if isinstance(sc, str) and len(sc) > 1000:
|
||||
c[sci] = sc[:1000]
|
||||
# convert bytes to string:
|
||||
if isinstance(s["filename"], bytes):
|
||||
s["filename"] = s["filename"].decode("utf-8")
|
||||
return error
|
||||
|
||||
|
||||
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from typing import List, Union
|
||||
from typing import List
|
||||
|
||||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_favorite, \
|
||||
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
|
||||
sessions_devtool, sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, errors_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
|
|
@ -88,15 +88,15 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
|
|||
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id)
|
||||
else:
|
||||
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
|
||||
group_clickrage=True)
|
||||
group_clickrage=True)
|
||||
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
||||
# to keep only the first stack
|
||||
# limit the number of errors to reduce the response-body size
|
||||
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
|
||||
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
||||
if e['source'] == "js_exception"][:500]
|
||||
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
||||
session_id=session_id)
|
||||
session_id=session_id)
|
||||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
|
|
@ -363,7 +363,8 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
|
|||
|
||||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
|
||||
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, extra_event=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@ from typing import List, Union
|
|||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics, sessions_devtool, \
|
||||
sessions_mobs, issues, projects, resources, assist, performance_event, metrics, sessions_devtool, \
|
||||
sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper, errors_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
SESSION_PROJECTION_COLS_CH = """\
|
||||
s.project_id,
|
||||
|
|
@ -111,7 +112,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
|
|||
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
||||
# to keep only the first stack
|
||||
# limit the number of errors to reduce the response-body size
|
||||
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
|
||||
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
||||
if e['source'] == "js_exception"][:500]
|
||||
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
||||
session_id=session_id)
|
||||
|
|
@ -287,9 +288,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
print("--------------------")
|
||||
print(main_query)
|
||||
print("--------------------")
|
||||
try:
|
||||
sessions = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
|
|
@ -451,7 +452,7 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
|
|||
event.filters is None or len(event.filters) == 0))
|
||||
|
||||
|
||||
def __get_event_type(EventType: Union[schemas.EventType, schemas.PerformanceEventType]):
|
||||
def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType]):
|
||||
defs = {
|
||||
schemas.EventType.click: "CLICK",
|
||||
schemas.EventType.input: "INPUT",
|
||||
|
|
@ -475,7 +476,8 @@ def __get_event_type(EventType: Union[schemas.EventType, schemas.PerformanceEven
|
|||
|
||||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
|
||||
def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, extra_event=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
@ -780,7 +782,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
if event_type == events.EventType.CLICK.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CLICK.column
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -796,7 +798,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.INPUT.column
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -816,7 +818,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -831,7 +833,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CUSTOM.column
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -846,7 +848,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -860,7 +862,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
# elif EventType == events.EventType.GRAPHQL.ui_type:
|
||||
# event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
|
||||
# event_where.append(f"main.EventType='GRAPHQL'")
|
||||
# event_where.append(f"main.event_type='GRAPHQL'")
|
||||
# events_conditions.append({"type": event_where[-1]})
|
||||
# if not is_any:
|
||||
# event_where.append(
|
||||
|
|
@ -870,7 +872,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.STATEACTION.column
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
|
|
@ -886,7 +888,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
|
||||
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
event.source = tuple(event.source)
|
||||
events_conditions[-1]["condition"] = []
|
||||
|
|
@ -906,7 +908,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == schemas.PerformanceEventType.fetch_failed:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
events_conditions[-1]["condition"] = []
|
||||
if not is_any:
|
||||
|
|
@ -944,7 +946,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
schemas.PerformanceEventType.location_largest_contentful_paint_time,
|
||||
schemas.PerformanceEventType.location_ttfb]:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
events_conditions[-1]["condition"] = []
|
||||
col = performance_event.get_col(event_type)
|
||||
|
|
@ -967,7 +969,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type in [schemas.PerformanceEventType.location_avg_cpu_load,
|
||||
schemas.PerformanceEventType.location_avg_memory_usage]:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
events_conditions[-1]["condition"] = []
|
||||
col = performance_event.get_col(event_type)
|
||||
|
|
@ -990,9 +992,9 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
elif event_type == schemas.PerformanceEventType.time_between_events:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
# event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
|
||||
event_where.append(f"main.EventType='{__get_event_type(event.value[0].type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
event_where.append(f"main.EventType='{__get_event_type(event.value[0].type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
||||
if not isinstance(event.value[0].value, list):
|
||||
|
|
@ -1040,7 +1042,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
# TODO: no isNot for RequestDetails
|
||||
elif event_type == schemas.EventType.request_details:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(f"main.EventType='{__get_event_type(event_type)}'")
|
||||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
apply = False
|
||||
events_conditions[-1]["condition"] = []
|
||||
|
|
@ -1093,7 +1095,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
# TODO: no isNot for GraphQL
|
||||
elif event_type == schemas.EventType.graphql:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(f"main.EventType='GRAPHQL'")
|
||||
event_where.append(f"main.event_type='GRAPHQL'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
events_conditions[-1]["condition"] = []
|
||||
for j, f in enumerate(event.filters):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import schemas, schemas_ee
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
from chalicelib.core import metrics
|
||||
from chalicelib.utils import ch_client
|
||||
|
||||
|
|
@ -86,25 +86,19 @@ def __get_two_values(response, time_index='hh', name_index='name'):
|
|||
return table_hh1, table_hh2, columns, names_hh1, names_hh2
|
||||
|
||||
|
||||
def __handle_timestep(time_step):
|
||||
base = "{0}"
|
||||
if time_step == 'hour':
|
||||
return f"toStartOfHour({base})", 3600
|
||||
elif time_step == 'day':
|
||||
return f"toStartOfDay({base})", 24 * 3600
|
||||
elif time_step == 'week':
|
||||
return f"toStartOfWeek({base})", 7 * 24 * 3600
|
||||
else:
|
||||
assert type(
|
||||
time_step) == int, "time_step must be {'hour', 'day', 'week'} or an integer representing the time step in minutes"
|
||||
return f"toStartOfInterval({base}, INTERVAL {time_step} minute)", int(time_step) * 60
|
||||
|
||||
|
||||
def query_requests_by_period(project_id, start_time, end_time):
|
||||
def query_requests_by_period(project_id, start_time, end_time, filters: Optional[schemas.SessionsSearchPayloadSchema]):
|
||||
params = {
|
||||
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
|
||||
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
|
||||
}
|
||||
sub_query = ""
|
||||
if filters:
|
||||
qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id,
|
||||
error_status=None,
|
||||
errors_only=True, favorite_only=None,
|
||||
issue=None, user_id=None)
|
||||
params = {**params, **qp_params}
|
||||
sub_query = f"INNER JOIN {sub_query} USING(session_id)"
|
||||
conditions = ["event_type = 'REQUEST'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
|
|
@ -113,6 +107,7 @@ def query_requests_by_period(project_id, start_time, end_time):
|
|||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, success, message, duration, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
FROM experimental.events
|
||||
{sub_query}
|
||||
WHERE project_id = {project_id}
|
||||
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
|
||||
GROUP BY T1.hh, T2.url_host, T2.url_path
|
||||
|
|
@ -185,11 +180,20 @@ def query_requests_by_period(project_id, start_time, end_time):
|
|||
return results
|
||||
|
||||
|
||||
def query_most_errors_by_period(project_id, start_time, end_time):
|
||||
def query_most_errors_by_period(project_id, start_time, end_time,
|
||||
filters: Optional[schemas.SessionsSearchPayloadSchema]):
|
||||
params = {
|
||||
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
|
||||
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
|
||||
}
|
||||
sub_query = ""
|
||||
if filters:
|
||||
qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id,
|
||||
error_status=None,
|
||||
errors_only=True, favorite_only=None,
|
||||
issue=None, user_id=None)
|
||||
params = {**params, **qp_params}
|
||||
sub_query = f"INNER JOIN {sub_query} USING(session_id)"
|
||||
conditions = ["event_type = 'ERROR'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
|
|
@ -198,6 +202,7 @@ def query_most_errors_by_period(project_id, start_time, end_time):
|
|||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, name, source, message, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
FROM experimental.events
|
||||
{sub_query}
|
||||
WHERE project_id = {project_id}
|
||||
AND datetime >= toDateTime(%(startTimestamp)s/1000)
|
||||
AND datetime < toDateTime(%(endTimestamp)s/1000)
|
||||
|
|
@ -260,11 +265,20 @@ def query_most_errors_by_period(project_id, start_time, end_time):
|
|||
return results
|
||||
|
||||
|
||||
def query_cpu_memory_by_period(project_id, start_time, end_time):
|
||||
def query_cpu_memory_by_period(project_id, start_time, end_time,
|
||||
filters: Optional[schemas.SessionsSearchPayloadSchema]):
|
||||
params = {
|
||||
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
|
||||
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
|
||||
}
|
||||
sub_query = ""
|
||||
if filters:
|
||||
qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id,
|
||||
error_status=None,
|
||||
errors_only=True, favorite_only=None,
|
||||
issue=None, user_id=None)
|
||||
params = {**params, **qp_params}
|
||||
sub_query = f"INNER JOIN {sub_query} USING(session_id)"
|
||||
conditions = ["event_type = 'PERFORMANCE'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
|
|
@ -273,6 +287,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time):
|
|||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, avg_used_js_heap_size, avg_cpu, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
FROM experimental.events
|
||||
{sub_query}
|
||||
WHERE project_id = {project_id}
|
||||
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
|
||||
GROUP BY T1.hh, T2.url_host
|
||||
|
|
@ -310,11 +325,22 @@ def query_cpu_memory_by_period(project_id, start_time, end_time):
|
|||
]
|
||||
|
||||
|
||||
def query_click_rage_by_period(project_id, start_time, end_time):
|
||||
from chalicelib.core import sessions_exp
|
||||
|
||||
|
||||
def query_click_rage_by_period(project_id, start_time, end_time,
|
||||
filters: Optional[schemas.SessionsSearchPayloadSchema]):
|
||||
params = {
|
||||
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
|
||||
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)}
|
||||
|
||||
sub_query = ""
|
||||
if filters:
|
||||
qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id,
|
||||
error_status=None,
|
||||
errors_only=True, favorite_only=None,
|
||||
issue=None, user_id=None)
|
||||
params = {**params, **qp_params}
|
||||
sub_query = f"INNER JOIN {sub_query} USING(session_id)"
|
||||
conditions = ["issue_type = 'click_rage'", "event_type = 'ISSUE'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
|
|
@ -322,6 +348,7 @@ def query_click_rage_by_period(project_id, start_time, end_time):
|
|||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, toStartOfInterval(datetime, INTERVAL %(step_size)s second ) as dtime
|
||||
FROM experimental.events
|
||||
{sub_query}
|
||||
WHERE project_id = %(project_id)s
|
||||
AND datetime >= toDateTime(%(startTimestamp)s/1000)
|
||||
AND datetime < toDateTime(%(endTimestamp)s/1000)
|
||||
|
|
@ -391,26 +418,24 @@ def query_click_rage_by_period(project_id, start_time, end_time):
|
|||
|
||||
def fetch_selected(project_id, data: schemas_ee.GetInsightsSchema):
|
||||
output = list()
|
||||
# TODO: Handle filters of GetInsightsSchema
|
||||
# data.series[0].filter.filters
|
||||
if data.metricValue is None or len(data.metricValue) == 0:
|
||||
data.metricValue = []
|
||||
for v in schemas_ee.InsightCategories:
|
||||
data.metricValue.append(v)
|
||||
filters = None
|
||||
if len(data.series) > 0:
|
||||
filters = data.series[0].filter
|
||||
|
||||
if schemas_ee.InsightCategories.errors in data.metricValue:
|
||||
output += query_most_errors_by_period(project_id=project_id,
|
||||
start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp)
|
||||
output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp, filters=filters)
|
||||
if schemas_ee.InsightCategories.network in data.metricValue:
|
||||
output += query_requests_by_period(project_id=project_id,
|
||||
start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp)
|
||||
output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp, filters=filters)
|
||||
if schemas_ee.InsightCategories.rage in data.metricValue:
|
||||
output += query_click_rage_by_period(project_id=project_id,
|
||||
start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp)
|
||||
if schemas_ee.InsightCategories.resources in data.metricValue:
|
||||
output += query_cpu_memory_by_period(project_id=project_id,
|
||||
start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp)
|
||||
output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp, filters=filters)
|
||||
if schemas_ee.InsightCategories.resources in data.metricValue:
|
||||
output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp,
|
||||
end_time=data.endTimestamp, filters=filters)
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -50,6 +50,7 @@ rm -rf ./chalicelib/utils/captcha.py
|
|||
rm -rf ./chalicelib/utils/dev.py
|
||||
rm -rf ./chalicelib/utils/email_handler.py
|
||||
rm -rf ./chalicelib/utils/email_helper.py
|
||||
rm -rf ./chalicelib/utils/errors_helper.py
|
||||
rm -rf ./chalicelib/utils/event_filter_definition.py
|
||||
rm -rf ./chalicelib/utils/github_client_v3.py
|
||||
rm -rf ./chalicelib/utils/helper.py
|
||||
|
|
|
|||
|
|
@ -48,11 +48,10 @@ class InsightCategories(str, Enum):
|
|||
|
||||
|
||||
class GetInsightsSchema(BaseModel):
|
||||
startTimestamp: int = Field(TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(TimeUTC.now())
|
||||
# time_step: int = Field(default=3600)
|
||||
startTimestamp: int = Field(default=TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(default=TimeUTC.now())
|
||||
metricValue: List[InsightCategories] = Field(...)
|
||||
series: List[schemas.CardCreateSeriesSchema] = Field([...])
|
||||
series: List[schemas.CardCreateSeriesSchema] = Field(default=[])
|
||||
|
||||
class Config:
|
||||
alias_generator = schemas.attribute_to_camel_case
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue