* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* refactor(DB): changed the list of integration providers

* refactor(chalice): enable exp funnels by default

* refactor(chalice): removed issues list of pathAnalysis card

* refactor(chalice): removed unused code
This commit is contained in:
Kraiem Taha Yassine 2024-11-13 12:16:00 +01:00 committed by GitHub
parent 7c2326c7d2
commit c88bd374e9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 36 additions and 335 deletions

View file

@ -12,20 +12,6 @@ from chalicelib.utils.TimeUTC import TimeUTC
ASSIST_KEY = config("ASSIST_KEY")
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
s.user_device_type,
s.user_country,
s.start_ts,
s.user_anonymous_id,
s.platform
"""
def get_live_sessions_ws_user_id(project_id, user_id):

View file

@ -211,51 +211,19 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.filters) > 0 or len(data.series) > 0:
filters = [f.model_dump(by_alias=True) for f in data.filters] \
+ [f.model_dump(by_alias=True) for f in data.series[0].filter.filters]
else:
return []
search_data = schemas.SessionsSearchPayloadSchema(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
limit=data.limit,
page=data.page,
filters=filters
)
# ---- To make issues response close to the chart response
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
operator=schemas.MathOperator.GREATER,
value=[1]))
if len(data.start_point) == 0:
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
operator=schemas.SearchEventOperator.IS_ANY,
value=[]))
# ---- End
for s in data.excludes:
search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator.NOT_ON,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.CardSchema):
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.ISSUES:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
return __get_table_of_issues(project=project, user_id=user_id, data=data)
supported = {
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
schemas.MetricType.PATH_ANALYSIS: not_supported,
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
return supported.get(data.metric_type, not_supported)()
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):

View file

@ -35,28 +35,6 @@ def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
return events
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema,
metric_format: schemas.MetricExtendedFormatType):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data,
project_id=project_id,
metric_format=metric_format)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
if metric_format == schemas.MetricFormatType.SESSION_COUNT and total_drop_due_to_issues > (
insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]):
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
elif metric_format == schemas.MetricExtendedFormatType.USER_COUNT and total_drop_due_to_issues > (
insights[0]["usersCount"] - insights[-1]["usersCount"]):
total_drop_due_to_issues = insights[0]["usersCount"] - insights[-1]["usersCount"]
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
@ -69,7 +47,7 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem
last_stage=len(data.events)))}
def get_simple_funnel(project:schemas.ProjectContext, data: schemas.CardSeriesFilterSchema,
def get_simple_funnel(project: schemas.ProjectContext, data: schemas.CardSeriesFilterSchema,
metric_format: schemas.MetricExtendedFormatType):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
@ -79,4 +57,4 @@ def get_simple_funnel(project:schemas.ProjectContext, data: schemas.CardSeriesFi
project=project,
metric_format=metric_format)
return {"stages": insights, "totalDropDueToIssues": 0}
return {"stages": insights}

View file

@ -1,32 +1,5 @@
from chalicelib.utils import pg_client, helper
ISSUE_TYPES = ['click_rage', 'dead_click', 'excessive_scrolling', 'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'ml_cpu', 'ml_memory', 'ml_dead_click', 'ml_click_rage',
'ml_mouse_thrashing', 'ml_excessive_scrolling', 'ml_slow_resources', 'custom', 'js_exception',
'custom_event_error', 'js_error']
ORDER_QUERY = """\
(CASE WHEN type = 'js_exception' THEN 0
WHEN type = 'bad_request' THEN 1
WHEN type = 'missing_resource' THEN 2
WHEN type = 'click_rage' THEN 3
WHEN type = 'dead_click' THEN 4
WHEN type = 'memory' THEN 5
WHEN type = 'cpu' THEN 6
WHEN type = 'crash' THEN 7
ELSE -1 END)::INTEGER
"""
NAME_QUERY = """\
(CASE WHEN type = 'js_exception' THEN 'Errors'
WHEN type = 'bad_request' THEN 'Bad Requests'
WHEN type = 'missing_resource' THEN 'Missing Images'
WHEN type = 'click_rage' THEN 'Click Rage'
WHEN type = 'dead_click' THEN 'Dead Clicks'
WHEN type = 'memory' THEN 'High Memory'
WHEN type = 'cpu' THEN 'High CPU'
WHEN type = 'crash' THEN 'Crashes'
ELSE type::text END)::text
"""
def get(project_id, issue_id):
with pg_client.PostgresClient() as cur:

View file

@ -46,20 +46,6 @@ def __get_constraint_values(data):
return params
METADATA_FIELDS = {"userId": "user_id",
"userAnonymousId": "user_anonymous_id",
"metadata1": "metadata_1",
"metadata2": "metadata_2",
"metadata3": "metadata_3",
"metadata4": "metadata_4",
"metadata5": "metadata_5",
"metadata6": "metadata_6",
"metadata7": "metadata_7",
"metadata8": "metadata_8",
"metadata9": "metadata_9",
"metadata10": "metadata_10"}
def __get_meta_constraint(project_id, data):
if len(data.get("filters", [])) == 0:
return []
@ -101,14 +87,6 @@ def __get_meta_constraint(project_id, data):
return constraints
SESSIONS_META_FIELDS = {"revId": "rev_id",
"country": "user_country",
"os": "user_os",
"platform": "user_device_type",
"device": "user_device",
"browser": "user_browser"}
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@ -265,33 +243,6 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
return helper.dict_to_camel_case(row)
def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("requests.type = 'fetch'")
pg_sub_query.append("requests.method IS NOT NULL")
pg_sub_query.append(f"requests.status_code/100 = {status}")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT requests.method,
requests.host,
requests.path,
COUNT(requests.session_id) AS all_requests
FROM events_common.requests INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY requests.method, requests.host, requests.path
ORDER BY all_requests DESC
LIMIT 10;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
rows = cur.fetchall()
for r in rows:
r["url_hostpath"] = r.pop("host") + r.pop("path")
return helper.list_to_camel_case(rows)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)

View file

@ -438,43 +438,6 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
with pg_client.PostgresClient() as cur:
full_args["issues_limit"] = data.limit
full_args["issues_limit_s"] = (data.page - 1) * data.limit
full_args["issues_limit_e"] = data.page * data.limit
main_query = cur.mogrify(f"""SELECT COUNT(1) AS count,
COALESCE(SUM(session_count), 0) AS count,
COALESCE(JSONB_AGG(ranked_issues)
FILTER ( WHERE rn > %(issues_limit_s)s
AND rn <= %(issues_limit_e)s ), '[]'::JSONB) AS values
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY session_count DESC) AS rn
FROM (SELECT type AS name, context_string AS value, COUNT(DISTINCT session_id) AS total
FROM (SELECT session_id
{query_part}) AS filtered_sessions
INNER JOIN events_common.issues USING (session_id)
INNER JOIN public.issues USING (issue_id)
WHERE project_id = %(project_id)s
AND timestamp >= %(startDate)s
AND timestamp <= %(endDate)s
GROUP BY type, context_string
ORDER BY session_count DESC) AS filtered_issues
) AS ranked_issues;""", full_args)
logger.debug("--------------------")
logger.debug(main_query)
logger.debug("--------------------")
cur.execute(main_query)
sessions = helper.dict_to_camel_case(cur.fetchone())
for s in sessions["values"]:
s.pop("rn")
return sessions
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \

View file

@ -18,7 +18,7 @@ def __group_metadata(session, project_metadata):
return meta
def get_pre_replay(project_id, session_id, context: schemas.CurrentContext):
def get_pre_replay(project_id, session_id):
return {
'domURL': [sessions_mobs.get_first_url(project_id=project_id, session_id=session_id, check_existence=False)]}

View file

@ -273,7 +273,7 @@ def get_first_mob_file(projectId: int, sessionId: Union[int, str],
return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_pre_replay(project_id=projectId, session_id=sessionId, context=context)
data = sessions_replay.get_pre_replay(project_id=projectId, session_id=sessionId)
if data is None:
return {"errors": ["session not found"]}
return {

View file

@ -95,7 +95,7 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(..
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)}
return {"data": custom_metrics.get_issues(project=context.project, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards', tags=["cards"])

View file

@ -233,51 +233,19 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.filters) > 0 or len(data.series) > 0:
filters = [f.model_dump(by_alias=True) for f in data.filters] \
+ [f.model_dump(by_alias=True) for f in data.series[0].filter.filters]
else:
return []
search_data = schemas.SessionsSearchPayloadSchema(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
limit=data.limit,
page=data.page,
filters=filters
)
# ---- To make issues response close to the chart response
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
operator=schemas.MathOperator.GREATER,
value=[1]))
if len(data.start_point) == 0:
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
operator=schemas.SearchEventOperator.IS_ANY,
value=[]))
# ---- End
for s in data.excludes:
search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator.NOT_ON,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.CardSchema):
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.ISSUES:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
return __get_table_of_issues(project=project, user_id=user_id, data=data)
supported = {
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
schemas.MetricType.PATH_ANALYSIS: not_supported,
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
return supported.get(data.metric_type, not_supported)()
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):

View file

@ -3,7 +3,7 @@ from decouple import config
import schemas
from chalicelib.core import errors_legacy
from chalicelib.core import metrics, metadata
from chalicelib.core import sourcemaps, sessions
from chalicelib.core import sessions
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -106,30 +106,6 @@ def __flatten_sort_key_count(data):
]
def __rearrange_chart_details(start_at, end_at, density, chart):
chart = list(chart)
for i in range(len(chart)):
chart[i] = {"timestamp": chart[i][0], "count": chart[i][1]}
chart = metrics.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density,
neutral={"count": 0})
return chart
def __process_tags(row):
return [
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
{"name": "browser.ver",
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
{"name": "OS.ver",
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
{"name": "device",
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
{"name": "country", "partitions": __flatten_sort_key_count(data=row.pop("country_partition"))}
]
def __process_tags_map(row):
browsers_partition = row.pop("browsers_partition")
os_partition = row.pop("os_partition")
@ -181,35 +157,23 @@ def get_details(project_id, error_id, user_id, **data):
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0)
MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0)
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
MAIN_EVENTS_TABLE_24 = exp_ch_helper.get_main_events_table(TimeUTC.now())
ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24")
ch_sub_query24.append("error_id = %(error_id)s")
# pg_sub_query30_err = __get_basic_constraints(time_constraint=True, startTime_arg_name="startDate30",
# endTime_arg_name="endDate30", project_key="errors.project_id",
# table_name="errors")
# pg_sub_query30_err.append("sessions.project_id = toUInt16(%(project_id)s)")
# pg_sub_query30_err.append("sessions.datetime >= toDateTime(%(startDate30)s/1000)")
# pg_sub_query30_err.append("sessions.datetime <= toDateTime(%(endDate30)s/1000)")
# pg_sub_query30_err.append("error_id = %(error_id)s")
# pg_sub_query30_err.append("source ='js_exception'")
ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30",
project_key="errors.project_id")
ch_sub_query30.append("error_id = %(error_id)s")
ch_basic_query = __get_basic_constraints(time_constraint=False)
ch_basic_query.append("error_id = %(error_id)s")
# ch_basic_query_session = ch_basic_query[:]
# ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)")
with ch_client.ClickHouseClient() as ch:
data["startDate24"] = TimeUTC.now(-1)
data["endDate24"] = TimeUTC.now()
data["startDate30"] = TimeUTC.now(-30)
data["endDate30"] = TimeUTC.now()
# # TODO: remove time limits
# data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000
# data["endDate24"] = 1650470729000
# data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000
# data["endDate30"] = 1650470729000
density24 = int(data.get("density24", 24))
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24)
density30 = int(data.get("density30", 30))
@ -353,17 +317,11 @@ def get_details(project_id, error_id, user_id, **data):
if status is not None:
status = status[0]
# row["stack"] = format_first_stack_frame(status).pop("stack")
# row["status"] = status.pop("status")
# row["parent_error_id"] = status.pop("parent_error_id")
row["favorite"] = status.pop("favorite")
row["viewed"] = status.pop("viewed")
row["last_hydrated_session"] = status
else:
# row["stack"] = []
row["last_hydrated_session"] = None
# row["status"] = "untracked"
# row["parent_error_id"] = None
row["favorite"] = False
row["viewed"] = False
row["chart24"] = metrics.__complete_missing_steps(start_time=data["startDate24"], end_time=data["endDate24"],
@ -712,10 +670,6 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
}
def __save_stacktrace(error_id, data):
errors_legacy.__save_stacktrace(error_id=error_id, data=data)
def get_trace(project_id, error_id):
return errors_legacy.get_trace(project_id=project_id, error_id=error_id)

View file

@ -485,50 +485,6 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
with ch_client.ClickHouseClient() as cur:
full_args["issues_limit"] = data.limit
full_args["issues_limit_s"] = (data.page - 1) * data.limit
full_args["issues_limit_e"] = data.page * data.limit
main_query = cur.format(f"""SELECT issues.type AS name,
issues.context_string AS value,
COUNT(DISTINCT raw_sessions.session_id) AS session_count,
sum(session_count) OVER () AS total_sessions,
COUNT(1) OVER () AS count
FROM (SELECT session_id
{query_part}) AS raw_sessions
INNER JOIN experimental.events ON (raw_sessions.session_id = events.session_id)
INNER JOIN experimental.issues ON (events.issue_id = issues.issue_id)
WHERE event_type = 'ISSUE'
AND events.datetime >= toDateTime(%(startDate)s / 1000)
AND events.datetime <= toDateTime(%(endDate)s / 1000)
AND events.project_id = %(projectId)s
AND issues.project_id = %(projectId)s
GROUP BY issues.type, issues.context_string
ORDER BY session_count DESC
LIMIT %(issues_limit)s OFFSET %(issues_limit_s)s""", full_args)
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
issues = cur.execute(main_query)
issues = helper.list_to_camel_case(issues)
if len(issues) > 0:
total_sessions = issues[0]["totalSessions"]
issues_count = issues[0]["count"]
for s in issues:
s.pop("totalSessions")
s.pop("count")
else:
total_sessions = 0
issues_count = 0
return {"total": issues_count, "count": total_sessions, "values": issues}
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \

View file

@ -18,7 +18,7 @@ def __group_metadata(session, project_metadata):
return meta
def get_pre_replay(project_id, session_id, context: schemas.CurrentContext):
def get_pre_replay(project_id, session_id):
return {
'domURL': [sessions_mobs.get_first_url(project_id=project_id, session_id=session_id, check_existence=False)]}

View file

@ -32,7 +32,7 @@ EXP_ALERTS=false
EXP_AUTOCOMPLETE=false
EXP_ERRORS_GET=false
EXP_ERRORS_SEARCH=false
EXP_FUNNELS=false
EXP_FUNNELS=true
EXP_RESOURCES=true
EXP_SESSIONS_SEARCH=false
FS_DIR=/mnt/efs

View file

@ -289,7 +289,7 @@ def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_ta
return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_pre_replay(project_id=projectId, session_id=sessionId, context=context)
data = sessions_replay.get_pre_replay(project_id=projectId, session_id=sessionId)
if data is None:
return {"errors": ["session not found"]}
return {

View file

@ -95,7 +95,7 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(..
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)}
return {"data": custom_metrics.get_issues(project=context.project, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards', tags=["cards"])

View file

@ -48,13 +48,13 @@ DELETE
FROM public.metrics
WHERE metric_of IN ('avgCpu', 'avgDomContentLoaded',
'avgDomContentLoadStart', 'avgFirstContentfulPixel',
'avgFirstPaint',
'avgFirstPaint',
'avgFps', 'avgImageLoadTime',
'avgPageLoadTime', 'avgRequestLoadTime',
'avgResponseTime', 'avgSessionDuration',
'avgTillFirstByte', 'avgTimeToRender')
or metric_of IN ('timeToRender', 'cpu','crashes'
'fps', 'avgTimeToInteractive',
or metric_of IN ('timeToRender', 'cpu', 'crashes'
'fps', 'avgTimeToInteractive',
'avgPagesResponseTime', 'avgUsedJsHeapSize',
'memoryConsumption', 'pagesResponseTime',
'pagesDomBuildtime', 'pagesResponseTimeDistribution',
@ -62,7 +62,7 @@ WHERE metric_of IN ('avgCpu', 'avgDomContentLoaded',
'slowestDomains', 'speedLocation', 'impactedSessionsBySlowPages',
'avgPagesDomBuildtime')
or metric_of IN ('missingResources', 'resourcesLoadingTime',
'slowestResources', 'callsErrors','resourceTypeVsResponseEnd',
'slowestResources', 'callsErrors', 'resourceTypeVsResponseEnd',
'resourcesCountByType');
DELETE
@ -76,6 +76,8 @@ DROP TABLE IF EXISTS events.resources;
DROP TYPE IF EXISTS events.resource_type;
DROP TYPE IF EXISTS events.resource_method;
ALTER TYPE integration_provider ADD VALUE IF NOT EXISTS 'dynatrace';
COMMIT;
\elif :is_next

View file

@ -300,7 +300,7 @@ CREATE TABLE public.announcements
);
CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --,'jira','github');
CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch', 'dynatrace');
CREATE TABLE public.integrations
(
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,

View file

@ -43,13 +43,13 @@ DELETE
FROM public.metrics
WHERE metric_of IN ('avgCpu', 'avgDomContentLoaded',
'avgDomContentLoadStart', 'avgFirstContentfulPixel',
'avgFirstPaint',
'avgFirstPaint',
'avgFps', 'avgImageLoadTime',
'avgPageLoadTime', 'avgRequestLoadTime',
'avgResponseTime', 'avgSessionDuration',
'avgTillFirstByte', 'avgTimeToRender')
or metric_of IN ('timeToRender', 'cpu','crashes'
'fps', 'avgTimeToInteractive',
or metric_of IN ('timeToRender', 'cpu', 'crashes'
'fps', 'avgTimeToInteractive',
'avgPagesResponseTime', 'avgUsedJsHeapSize',
'memoryConsumption', 'pagesResponseTime',
'pagesDomBuildtime', 'pagesResponseTimeDistribution',
@ -57,7 +57,7 @@ WHERE metric_of IN ('avgCpu', 'avgDomContentLoaded',
'slowestDomains', 'speedLocation', 'impactedSessionsBySlowPages',
'avgPagesDomBuildtime')
or metric_of IN ('missingResources', 'resourcesLoadingTime',
'slowestResources', 'callsErrors','resourceTypeVsResponseEnd',
'slowestResources', 'callsErrors', 'resourceTypeVsResponseEnd',
'resourcesCountByType');
DELETE
@ -71,6 +71,8 @@ DROP TABLE IF EXISTS events.resources;
DROP TYPE IF EXISTS events.resource_type;
DROP TYPE IF EXISTS events.resource_method;
ALTER TYPE integration_provider ADD VALUE IF NOT EXISTS 'dynatrace';
COMMIT;
\elif :is_next

View file

@ -262,7 +262,7 @@ CREATE TABLE public.announcements
);
CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github');
CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch', 'dynatrace');
CREATE TABLE public.integrations
(
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,