Merge branch 'dev' of github.com:openreplay/openreplay into funnels

This commit is contained in:
Shekar Siri 2022-04-22 12:47:03 +02:00
commit a287a9ca47
48 changed files with 2104 additions and 1629 deletions

View file

@ -105,8 +105,10 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
return results
def try_sessions(project_id, user_id, data: schemas.TryCustomMetricsPayloadSchema):
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
results = []
if data.series is None:
return results
for s in data.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
@ -251,7 +253,7 @@ def get_all(project_id, user_id, include_series=False):
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR metrics.is_public)
ORDER BY metrics.edited_at, metrics.created_at;""",
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
{"project_id": project_id, "user_id": user_id}
)
)

View file

@ -339,9 +339,9 @@ def __generic_autocomplete(event: Event):
class event_type:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="base_path")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="base_path")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",

View file

@ -21,7 +21,7 @@ def __transform_journey(rows):
JOURNEY_DEPTH = 5
JOURNEY_TYPES = {
"PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"},
"PAGES": {"table": "events.pages", "column": "path", "table_id": "message_id"},
"CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"},
# "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only
"EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"}

View file

@ -353,8 +353,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.TableMetricOfType.visited_url:
main_col = "base_path"
extra_col = ", base_path"
main_col = "path"
extra_col = ", path"
main_query = cur.mogrify(f"""{pre_query}
SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values
FROM (SELECT {main_col} AS name,

View file

@ -108,7 +108,7 @@ def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchem
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int,
data: schemas.TryCustomMetricsPayloadSchema = Body(...),
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data}

View file

@ -70,16 +70,14 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error {
session_id, message_id, timestamp, referrer, base_referrer, host, path, query,
dom_content_loaded_time, load_time, response_end, first_paint_time, first_contentful_paint_time,
speed_index, visually_complete, time_to_interactive,
response_time, dom_building_time,
base_path
response_time, dom_building_time
) VALUES (
$1, $2, $3,
$4, $5,
$6, $7, $8,
NULLIF($9, 0), NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0),
NULLIF($14, 0), NULLIF($15, 0), NULLIF($16, 0),
NULLIF($17, 0), NULLIF($18, 0),
''
NULLIF($17, 0), NULLIF($18, 0)
)
`,
sessionID, e.MessageID, e.Timestamp,
@ -230,7 +228,7 @@ func (conn *Conn) InsertWebFetchEvent(sessionID uint64, savePayload bool, e *Fet
duration, success
) VALUES (
$1, $2, $3,
$4, $5, $6, $7
$4, $5, $6, $7,
$8, $9, $10::smallint, NULLIF($11, '')::http_method,
$12, $13
) ON CONFLICT DO NOTHING`,

View file

@ -14,6 +14,10 @@ func GetURLParts(rawURL string) (string, string, string, error) {
if err != nil {
return "", "", "", err
}
// u.Scheme ?
return u.Host, u.RawPath, u.RawQuery, nil
// u.Scheme u.Fragment / RawFragment ?
path := u.Path
if u.RawPath != "" {
path = u.RawPath
}
return u.Host, path, u.RawQuery, nil
}

View file

@ -344,8 +344,8 @@ def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query += meta_condition
ch_sub_query.append("(pages.dom_content_loaded_event_end>0 OR pages.first_contentful_paint>0)")
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
ch_query = f"""SELECT COALESCE(avgOrNull(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
COALESCE(avgOrNull(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -375,7 +375,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""SELECT AVG(pages.load_event_end) AS avg_page_load_time
ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS avg_page_load_time
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND pages.load_event_end>0;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -387,7 +387,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_query = f"""SELECT AVG(resources.duration) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0;"""
row = ch.execute(query=ch_query,
@ -433,8 +433,8 @@ def __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args):
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
ch_query = f"""SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(avgOrNull(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -460,7 +460,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
AVG(resources.duration) AS avg,
COALESCE(avgOrNull(resources.duration),0) AS avg,
COUNT(resources.session_id) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
@ -477,7 +477,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
charts = {}
ch_query = f"""SELECT url,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
GROUP BY url, timestamp
@ -540,7 +540,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img' AND resources.duration>0
@ -553,7 +553,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
end_time=endTimestamp,
density=density, neutral={"avg": 0})]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch' AND resources.duration>0
@ -571,7 +571,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
ch_sub_query_chart += meta_condition
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(pages.load_event_end) AS avg
COALESCE(avgOrNull(pages.load_event_end),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND pages.load_event_end>0
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
@ -896,7 +896,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -906,7 +906,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
"endTimestamp": endTimestamp,
"value": url, "type": type, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(resources.duration) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -929,7 +929,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(pages.dom_building_time) AS value
COALESCE(avgOrNull(pages.dom_building_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -939,7 +939,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
"endTimestamp": endTimestamp,
"value": url, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(pages.dom_building_time) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_building_time),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -970,7 +970,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT any(url) AS url, any(type) AS type,
splitByChar('/', resources.url_hostpath)[-1] AS name,
AVG(NULLIF(resources.duration,0)) AS avg
COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY name
@ -987,7 +987,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
names = {f"name_{i}": r["name"] for i, r in enumerate(rows)}
ch_query = f"""SELECT splitByChar('/', resources.url_hostpath)[-1] AS name,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND ({" OR ".join([f"endsWith(resources.url_hostpath, %(name_{i})s)>0" for i in range(len(names.keys()))])})
@ -1044,7 +1044,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT pages.user_country, AVG(pages.speed_index) AS avg
ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY pages.user_country
@ -1053,7 +1053,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(pages.speed_index) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(pages.speed_index),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1073,7 +1073,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
ch_sub_query_chart.append(f"url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(pages.response_time) AS value
COALESCE(avgOrNull(pages.response_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1084,7 +1084,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
"endTimestamp": endTimestamp,
"value": url, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(pages.response_time) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1114,7 +1114,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
ch_query = f"""SELECT AVG(pages.response_time) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query,
@ -1247,12 +1247,12 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT (SELECT COALESCE(AVG(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0) AS avg_response_time,
ch_query = f"""SELECT (SELECT COALESCE(avgOrNull(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0) AS avg_response_time,
(SELECT COUNT(pages.session_id) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)}) AS count_requests,
(SELECT COALESCE(AVG(pages.first_paint),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0) AS avg_first_paint,
(SELECT COALESCE(AVG(pages.dom_content_loaded_event_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0) AS avg_dom_content_loaded,
(SELECT COALESCE(AVG(pages.ttfb),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0) AS avg_till_first_bit,
(SELECT COALESCE(AVG(pages.time_to_interactive),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0) AS avg_time_to_interactive;"""
(SELECT COALESCE(avgOrNull(pages.first_paint),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0) AS avg_first_paint,
(SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0) AS avg_dom_content_loaded,
(SELECT COALESCE(avgOrNull(pages.ttfb),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0) AS avg_till_first_bit,
(SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0) AS avg_time_to_interactive;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
@ -1274,7 +1274,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(pages.visually_complete) AS value
COALESCE(avgOrNull(pages.visually_complete),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1284,7 +1284,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(pages.visually_complete) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(pages.visually_complete),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1311,7 +1311,7 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d
COUNT(DISTINCT pages.session_id) AS count
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
AND (pages.response_time)>(SELECT AVG(pages.response_time)
AND (pages.response_time)>(SELECT COALESCE(avgOrNull(pages.response_time),0)
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(sch_sub_query)})*2
GROUP BY timestamp
@ -1337,7 +1337,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(performance.avg_used_js_heap_size) AS value
COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1347,7 +1347,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(performance.avg_used_js_heap_size) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS avg
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1369,7 +1369,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(performance.avg_cpu) AS value
COALESCE(avgOrNull(performance.avg_cpu),0) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1379,7 +1379,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(performance.avg_cpu) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_cpu),0) AS avg
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1401,7 +1401,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(performance.avg_fps) AS value
COALESCE(avgOrNull(performance.avg_fps),0) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1411,7 +1411,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(performance.avg_fps) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_fps),0) AS avg
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1657,7 +1657,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url_host AS domain,
AVG(resources.duration) AS avg
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.url_host
@ -1667,7 +1667,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(resources.duration) AS avg
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -1900,7 +1900,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
density=density,
neutral={"total": 0, "xhr": 0})
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(pages.response_end) AS avg_response_end
COALESCE(avgOrNull(pages.response_end),0) AS avg_response_end
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart_response_end)}
GROUP BY timestamp
@ -1963,7 +1963,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp,
AVG(NULLIF(s.count,0)) AS avg,
COALESCE(avgOrNull(NULLIF(s.count,0)),0) AS avg,
groupArray([toString(t.type), toString(t.xavg)]) AS types
FROM
( SELECT resources.session_id,
@ -1976,7 +1976,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
INNER JOIN
(SELECT session_id,
type,
AVG(NULLIF(count,0)) AS xavg
COALESCE(avgOrNull(NULLIF(count,0)),0) AS xavg
FROM (SELECT resources.session_id, resources.type, COUNT(resources.url) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
@ -2111,7 +2111,7 @@ def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("pages.load_event_end>0")
ch_query = f"""SELECT AVG(pages.load_event_end) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2148,7 +2148,7 @@ def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.no
ch_sub_query_chart.append("pages.load_event_end>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(pages.load_event_end),0) AS value
COALESCE(avgOrNull(pages.load_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
@ -2190,7 +2190,7 @@ def __get_application_activity_avg_image_load_time(ch, project_id, startTimestam
ch_sub_query.append("resources.type= %(type)s")
ch_sub_query.append("resources.duration>0")
ch_query = f"""\
SELECT COALESCE(AVG(resources.duration),0) AS value
SELECT COALESCE(avgOrNull(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
row = ch.execute(query=ch_query,
@ -2224,7 +2224,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("resources.duration>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
COALESCE(avgOrNull(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
@ -2265,7 +2265,7 @@ def __get_application_activity_avg_request_load_time(ch, project_id, startTimest
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_sub_query.append("resources.duration>0")
ch_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
row = ch.execute(query=ch_query,
@ -2298,7 +2298,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("resources.duration>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(resources.duration),0) AS value
COALESCE(avgOrNull(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
@ -2343,7 +2343,7 @@ def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("pages.dom_content_loaded_event_end>0")
ch_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_event_end),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2363,7 +2363,7 @@ def __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTim
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("pages.dom_content_loaded_event_end>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(pages.dom_content_loaded_event_end),0) AS value
COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2406,7 +2406,7 @@ def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp
ch_sub_query.append("pages.first_contentful_paint>0")
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""\
SELECT COALESCE(AVG(pages.first_contentful_paint),0) AS value
SELECT COALESCE(avgOrNull(pages.first_contentful_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2426,7 +2426,7 @@ def __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTim
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("pages.first_contentful_paint>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(pages.first_contentful_paint),0) AS value
COALESCE(avgOrNull(pages.first_contentful_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2469,7 +2469,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("sessions.pages_count>0")
ch_query = f"""SELECT COALESCE(CEIL(AVG(sessions.pages_count)),0) AS value
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(sessions.pages_count)),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2490,7 +2490,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("sessions.pages_count>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(sessions.pages_count),0) AS value
COALESCE(avgOrNull(sessions.pages_count),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2534,7 +2534,7 @@ def __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, end
ch_sub_query.append("isNotNull(sessions.duration)")
ch_sub_query.append("sessions.duration>0")
ch_query = f"""SELECT COALESCE(AVG(sessions.duration),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(sessions.duration),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2556,7 +2556,7 @@ def __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestam
"endTimestamp": endTimestamp}
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(sessions.duration),0) AS value
COALESCE(avgOrNull(sessions.duration),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2582,7 +2582,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.response_time),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0;"""
params = {"step_size": step_size, "project_id": project_id,
@ -2657,7 +2657,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.first_paint),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.first_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0;"""
params = {"step_size": step_size, "project_id": project_id,
@ -2667,7 +2667,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.first_paint),0) AS value
COALESCE(avgOrNull(pages.first_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.first_paint) AND pages.first_paint>0
GROUP BY timestamp
@ -2700,7 +2700,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
ch_sub_query_chart.append("isNotNull(pages.dom_content_loaded_event_time)")
ch_sub_query_chart.append("pages.dom_content_loaded_event_time>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_event_time),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
@ -2710,7 +2710,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
rows = ch.execute(query=ch_query, params=params)
results = helper.dict_to_camel_case(rows[0])
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.dom_content_loaded_event_time),0) AS value
COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2742,7 +2742,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
ch_sub_query_chart.append("isNotNull(pages.ttfb)")
ch_sub_query_chart.append("pages.ttfb>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.ttfb),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.ttfb),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
@ -2752,7 +2752,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.ttfb),0) AS value
COALESCE(avgOrNull(pages.ttfb),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -2784,7 +2784,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
ch_sub_query_chart.append("isNotNull(pages.time_to_interactive)")
ch_sub_query_chart.append("pages.time_to_interactive >0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive),0) AS value
ch_query = f"""SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
@ -2794,7 +2794,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.time_to_interactive),0) AS value
COALESCE(avgOrNull(pages.time_to_interactive),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp

View file

@ -24,7 +24,7 @@ ALTER TABLE IF EXISTS metrics
DROP CONSTRAINT IF EXISTS unique_key;
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS edited_at timestamp NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
ADD COLUMN IF NOT EXISTS is_pinned boolean NOT NULL DEFAULT FALSE,
ADD COLUMN IF NOT EXISTS category text NULL DEFAULT 'custom',
ADD COLUMN IF NOT EXISTS is_predefined boolean NOT NULL DEFAULT FALSE,
@ -53,12 +53,36 @@ CREATE TABLE IF NOT EXISTS dashboard_widgets
);
ALTER TABLE events_common.requests
ADD COLUMN IF NOT EXISTS host text NULL,
ADD COLUMN IF NOT EXISTS base_path text NULL,
ADD COLUMN IF NOT EXISTS query text NULL;
ADD COLUMN IF NOT EXISTS host text NULL,
ADD COLUMN IF NOT EXISTS path text NULL,
ADD COLUMN IF NOT EXISTS query text NULL;
ALTER TABLE events.pages
ADD COLUMN IF NOT EXISTS query text NULL;
DO
$$
BEGIN
IF EXISTS(SELECT *
FROM information_schema.columns
WHERE table_schema = 'events'
AND table_name = 'pages'
AND column_name = 'base_path')
THEN
ALTER TABLE events.pages
DROP COLUMN IF EXISTS path;
ALTER TABLE events.pages
RENAME COLUMN base_path TO path;
DROP INDEX IF EXISTS events.pages_base_path_gin_idx2;
DROP INDEX IF EXISTS pages_base_path_idx2;
ALTER INDEX IF EXISTS events.pages_base_path_gin_idx RENAME TO pages_path_gin_idx;
ALTER INDEX IF EXISTS events.pages_base_path_idx RENAME TO pages_path_idx;
ALTER INDEX IF EXISTS events.pages_base_path_session_id_timestamp_idx RENAME TO pages_path_session_id_timestamp_idx;
ALTER INDEX IF EXISTS events.pages_base_path_base_pathLNGT2_idx RENAME TO pages_path_pathLNGT2_idx;
END IF;
END
$$;
COMMIT;
ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'areaChart';
@ -312,10 +336,13 @@ ON CONFLICT (predefined_key) DO UPDATE
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_base_path_nn_idx ON events_common.requests (base_path) WHERE base_path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_base_path_nn_gin_idx ON events_common.requests USING GIN (base_path gin_trgm_ops) WHERE base_path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp);
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2;

View file

@ -100,38 +100,36 @@ $$ LANGUAGE plpgsql;
DO
$$
BEGIN
IF (with to_check (name) as (
values ('alerts'),
('announcements'),
('assigned_sessions'),
('autocomplete'),
('basic_authentication'),
('dashboards'),
('dashboard_widgets'),
('errors'),
('funnels'),
('integrations'),
('issues'),
('jira_cloud'),
('jobs'),
('metric_series'),
('metrics'),
('notifications'),
('oauth_authentication'),
('projects'),
('roles'),
('roles_projects'),
('searches'),
('sessions'),
('tenants'),
('traces'),
('user_favorite_errors'),
('user_favorite_sessions'),
('user_viewed_errors'),
('user_viewed_sessions'),
('users'),
('webhooks')
)
IF (with to_check (name) as (values ('alerts'),
('announcements'),
('assigned_sessions'),
('autocomplete'),
('basic_authentication'),
('dashboards'),
('dashboard_widgets'),
('errors'),
('funnels'),
('integrations'),
('issues'),
('jira_cloud'),
('jobs'),
('metric_series'),
('metrics'),
('notifications'),
('oauth_authentication'),
('projects'),
('roles'),
('roles_projects'),
('searches'),
('sessions'),
('tenants'),
('traces'),
('user_favorite_errors'),
('user_favorite_sessions'),
('user_viewed_errors'),
('user_viewed_sessions'),
('users'),
('webhooks'))
select bool_and(exists(select *
from information_schema.tables t
where table_schema = 'public'
@ -798,9 +796,9 @@ $$
name text NOT NULL,
is_public boolean NOT NULL DEFAULT FALSE,
active boolean NOT NULL DEFAULT TRUE,
created_at timestamp default timezone('utc'::text, now()) not null,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp,
edited_at timestamp,
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type metric_type NOT NULL DEFAULT 'timeseries',
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',
@ -914,16 +912,14 @@ LANGUAGE plpgsql;
DO
$$
BEGIN
IF (with to_check (name) as (
values ('clicks'),
('errors'),
('graphql'),
('inputs'),
('pages'),
('performance'),
('resources'),
('state_actions')
)
IF (with to_check (name) as (values ('clicks'),
('errors'),
('graphql'),
('inputs'),
('pages'),
('performance'),
('resources'),
('state_actions'))
select bool_and(exists(select *
from information_schema.tables t
where table_schema = 'events'
@ -938,7 +934,6 @@ $$
timestamp bigint NOT NULL,
host text NOT NULL,
path text NOT NULL,
base_path text NOT NULL,
query text NULL,
referrer text DEFAULT NULL,
base_referrer text DEFAULT NULL,
@ -956,13 +951,9 @@ $$
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX IF NOT EXISTS pages_session_id_idx ON events.pages (session_id);
CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops);
CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops);
CREATE INDEX IF NOT EXISTS pages_timestamp_idx ON events.pages (timestamp);
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops);
CREATE INDEX IF NOT EXISTS pages_base_path_idx ON events.pages (base_path);
CREATE INDEX IF NOT EXISTS pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1));
CREATE INDEX IF NOT EXISTS pages_base_referrer_idx ON events.pages (base_referrer);
CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
length(base_referrer) -
@ -997,8 +988,8 @@ $$
0;
CREATE INDEX IF NOT EXISTS pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp);
CREATE INDEX IF NOT EXISTS pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2;
CREATE INDEX IF NOT EXISTS pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp);
CREATE INDEX IF NOT EXISTS pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2;
CREATE INDEX IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
@ -1176,11 +1167,9 @@ LANGUAGE plpgsql;
DO
$$
BEGIN
IF (with to_check (name) as (
values ('customs'),
('issues'),
('requests')
)
IF (with to_check (name) as (values ('customs'),
('issues'),
('requests'))
select bool_and(exists(select *
from information_schema.tables t
where table_schema = 'events_common'
@ -1238,7 +1227,7 @@ $$
status_code smallint NULL,
method http_method NULL,
host text NULL,
base_path text NULL,
path text NULL,
query text NULL,
PRIMARY KEY (session_id, timestamp, seq_index)
);
@ -1263,8 +1252,8 @@ $$
CREATE INDEX IF NOT EXISTS requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_base_path_nn_idx ON events_common.requests (base_path) WHERE base_path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_base_path_nn_gin_idx ON events_common.requests USING GIN (base_path gin_trgm_ops) WHERE base_path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React, { useState, FC, useEffect } from 'react'
import VideoContainer from '../components/VideoContainer'
import { Icon, Popup, Button } from 'UI'

View file

@ -2,3 +2,10 @@
opacity: 0.5;
pointer-events: none;
}
.divider {
width: 1px;
height: 49px;
margin: 0 10px;
background-color: $gray-light;
}

View file

@ -5,7 +5,7 @@ import cn from 'classnames'
import { toggleChatWindow } from 'Duck/sessions';
import { connectPlayer } from 'Player/store';
import ChatWindow from '../../ChatWindow';
import { callPeer, requestReleaseRemoteControl } from 'Player'
import { callPeer, requestReleaseRemoteControl, toggleAnnotation } from 'Player'
import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager';
import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
@ -31,13 +31,14 @@ interface Props {
userId: String,
toggleChatWindow: (state) => void,
calling: CallingState,
annotating: boolean,
peerConnectionStatus: ConnectionStatus,
remoteControlStatus: RemoteControlStatus,
hasPermission: boolean,
isEnterprise: boolean,
}
function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlStatus, hasPermission, isEnterprise }: Props) {
function AssistActions({ toggleChatWindow, userId, calling, annotating, peerConnectionStatus, remoteControlStatus, hasPermission, isEnterprise }: Props) {
const [ incomeStream, setIncomeStream ] = useState<MediaStream | null>(null);
const [ localStream, setLocalStream ] = useState<LocalStream | null>(null);
const [ callObject, setCallObject ] = useState<{ end: ()=>void } | null >(null);
@ -81,6 +82,23 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
return (
<div className="flex items-center">
{onCall && (
<>
<div
className={
cn(
'cursor-pointer p-2 flex items-center',
{[stl.disabled]: !onCall}
)
}
onClick={ toggleAnnotation }
role="button"
>
<IconButton label={`Annotate`} icon={ annotating ? "pencil-stop" : "pencil"} primaryText redText={annotating} />
</div>
<div className={ stl.divider } />
</>
)}
<div
className={
cn(
@ -91,8 +109,9 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
onClick={ requestReleaseRemoteControl }
role="button"
>
<IconButton label={`${remoteActive ? 'Stop ' : ''} Remote Control`} icon="remote-control" primaryText redText={remoteActive} />
<IconButton label={`Remote Control`} icon={ remoteActive ? "window-x" : "remote-control"} primaryText redText={remoteActive} />
</div>
<div className={ stl.divider } />
<Popup
trigger={
@ -132,6 +151,7 @@ const con = connect(state => {
export default con(connectPlayer(state => ({
calling: state.calling,
annotating: state.annotating,
remoteControlStatus: state.remoteControl,
peerConnectionStatus: state.peerConnectionStatus,
}))(AssistActions))

View file

@ -20,7 +20,7 @@ function CustomMetriLineChart(props: Props) {
margin={Styles.chartMargins}
// syncId={ showSync ? "domainsErrors_4xx" : undefined }
onClick={onClick}
isAnimationActive={ false }
// isAnimationActive={ false }
>
<CartesianGrid strokeDasharray="3 3" vertical={ false } stroke="#EEEEEE" />
<XAxis

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react'
import { ResponsiveContainer, Tooltip } from 'recharts';
import { PieChart, Pie, Cell } from 'recharts';
@ -57,7 +58,6 @@ function CustomMetricPieChart(props: Props) {
innerRadius,
outerRadius,
value,
index
}) => {
const RADIAN = Math.PI / 180;
let radius1 = 15 + innerRadius + (outerRadius - innerRadius);

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react';
import { NoContent } from 'UI';
import { Styles } from '../../common';
@ -39,6 +40,8 @@ function ErrorsByOrigin(props: Props) {
<Tooltip {...Styles.tooltip} />
<Bar minPointSize={1} name={<span className="float">1<sup>st</sup> Party</span>} dataKey="firstParty" stackId="a" fill={Styles.colors[0]} />
<Bar name={<span className="float">3<sup>rd</sup> Party</span>} dataKey="thirdParty" stackId="a" fill={Styles.colors[2]} />
{/* <Bar minPointSize={1} name={<span className="float">1<sup>st</sup> Party</span>} dataKey="firstParty" stackId="a" fill={Styles.colors[0]} />
<Bar name={<span className="float">3<sup>rd</sup> Party</span>} dataKey="thirdParty" stackId="a" fill={Styles.colors[2]} /> */}
</BarChart>
</ResponsiveContainer>
</NoContent>

View file

@ -99,7 +99,7 @@ function ResponseTimeDistribution(props: Props) {
label={`${item.percentile}th Percentile (${item.responseTime}ms)`}
/>
}
allowDecimals={false}
// allowDecimals={false}
x={item.responseTime}
strokeWidth={0}
strokeOpacity={1}

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import { useObserver } from 'mobx-react-lite';
import React from 'react';
import { SideMenuitem, SideMenuHeader, Icon, Button } from 'UI';

View file

@ -13,8 +13,7 @@ import { IconButton, Icon } from 'UI';
import FilterSelection from 'Shared/Filters/FilterSelection';
import SeriesName from './SeriesName';
import cn from 'classnames';
import { useDashboardStore } from '../../store/store';
import { observer, useObserver } from 'mobx-react-lite';
import { observer } from 'mobx-react-lite';
interface Props {
seriesIndex: number;
@ -37,7 +36,7 @@ function FilterSeries(props: Props) {
const [expanded, setExpanded] = useState(true)
const { series, seriesIndex } = props;
useEffect(observeChanges, [series])
useEffect(observeChanges, [series.filter]);
const onAddFilter = (filter) => {
series.filter.addFilter(filter)
@ -49,12 +48,10 @@ function FilterSeries(props: Props) {
const onChangeEventsOrder = (e, { name, value }) => {
series.filter.updateKey(name, value)
// props.editSeriesFilter(seriesIndex, { eventsOrder: value });
}
const onRemoveFilter = (filterIndex) => {
series.filter.removeFilter(filterIndex)
// props.removeSeriesFilterFilter(seriesIndex, filterIndex);
}
return (

View file

@ -4,29 +4,32 @@ import CustomMetricPercentage from 'App/components/Dashboard/Widgets/CustomMetri
import CustomMetricTable from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricTable';
import CustomMetricPieChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart';
import { Styles } from 'App/components/Dashboard/Widgets/common';
import { useObserver } from 'mobx-react-lite';
import { observer, useObserver } from 'mobx-react-lite';
import { Loader } from 'UI';
import { useStore } from 'App/mstore';
import WidgetPredefinedChart from '../WidgetPredefinedChart';
import CustomMetricOverviewChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart';
import { getStartAndEndTimestampsByDensity } from 'Types/dashboard/helper';
import { debounce } from 'App/utils';
interface Props {
metric: any;
isWidget?: boolean
}
function WidgetChart(props: Props) {
const { isWidget = false, metric } = props;
const { dashboardStore } = useStore();
const { dashboardStore, metricStore } = useStore();
const _metric: any = useObserver(() => metricStore.instance);
const period = useObserver(() => dashboardStore.period);
const drillDownFilter = useObserver(() => dashboardStore.drillDownFilter);
const colors = Styles.customMetricColors;
const [loading, setLoading] = useState(false)
const [loading, setLoading] = useState(true)
const isOverviewWidget = metric.metricType === 'predefined' && metric.viewType === 'overview';
const params = { density: isOverviewWidget ? 7 : 70 }
const metricParams = { ...params }
const prevMetricRef = useRef<any>();
const [data, setData] = useState<any>(metric.data);
const isTableWidget = metric.metricType === 'table' && metric.viewType === 'table';
const isPieChart = metric.metricType === 'table' && metric.viewType === 'pieChart';
@ -52,21 +55,28 @@ function WidgetChart(props: Props) {
}
}
const depsString = JSON.stringify(_metric.series);
const fetchMetricChartData = (metric, payload, isWidget) => {
setLoading(true)
dashboardStore.fetchMetricChartData(metric, payload, isWidget).then((res: any) => {
setData(res);
}).finally(() => {
setLoading(false);
});
}
const debounceRequest: any = React.useCallback(debounce(fetchMetricChartData, 500), []);
useEffect(() => {
if (prevMetricRef.current && prevMetricRef.current.name !== metric.name) {
prevMetricRef.current = metric;
return
};
prevMetricRef.current = metric;
setLoading(true);
const payload = isWidget ? { ...params } : { ...metricParams, ...metric.toJson() };
dashboardStore.fetchMetricChartData(metric, payload, isWidget).then((res: any) => {
setData(res);
}).finally(() => {
setLoading(false);
});
}, [period]);
debounceRequest(metric, payload, isWidget);
}, [period, depsString]);
const renderChart = () => {
const { metricType, viewType } = metric;
@ -121,10 +131,10 @@ function WidgetChart(props: Props) {
return <div>Unknown</div>;
}
return useObserver(() => (
<Loader loading={loading}>
<Loader loading={loading} size="small" style={{ height: `${isOverviewWidget ? 100 : 240}px` }}>
{renderChart()}
</Loader>
));
}
export default WidgetChart;
export default observer(WidgetChart);

View file

@ -4,9 +4,8 @@ import { metricTypes, metricOf, issueOptions } from 'App/constants/filterOptions
import { FilterKey } from 'Types/filter/filterType';
import { useStore } from 'App/mstore';
import { useObserver } from 'mobx-react-lite';
import { HelpText, Button, Icon } from 'UI'
import { Button, Icon } from 'UI'
import FilterSeries from '../FilterSeries';
import { withRouter } from 'react-router-dom';
import { confirm } from 'UI/Confirmation';
import { withSiteId, dashboardMetricDetails, metricDetails } from 'App/routes'
import DashboardSelectionModal from '../DashboardSelectionModal/DashboardSelectionModal';
@ -28,31 +27,34 @@ function WidgetForm(props: Props) {
const timeseriesOptions = metricOf.filter(i => i.type === 'timeseries');
const tableOptions = metricOf.filter(i => i.type === 'table');
const isTable = metric.metricType === 'table';
const isTimeSeries = metric.metricType === 'timeseries';
const _issueOptions = [{ text: 'All', value: 'all' }].concat(issueOptions);
const canAddToDashboard = metric.exists() && dashboards.length > 0;
const write = ({ target: { value, name } }) => metricStore.merge({ [ name ]: value });
const writeOption = (e, { value, name }) => {
metricStore.merge({ [ name ]: value });
const obj = { [ name ]: value };
if (name === 'metricValue') {
metricStore.merge({ metricValue: [value] });
obj['metricValue'] = [value];
}
if (name === 'metricOf') {
if (value === FilterKey.ISSUE) {
metricStore.merge({ metricValue: ['all'] });
obj['metricValue'] = ['all'];
}
}
if (name === 'metricType') {
if (value === 'timeseries') {
metricStore.merge({ metricOf: timeseriesOptions[0].value, viewType: 'lineChart' });
obj['metricOf'] = timeseriesOptions[0].value;
obj['viewType'] = 'lineChart';
} else if (value === 'table') {
metricStore.merge({ metricOf: tableOptions[0].value, viewType: 'table' });
obj['metricOf'] = tableOptions[0].value;
obj['viewType'] = 'table';
}
}
metricStore.merge(obj);
};
const onSave = () => {
@ -172,7 +174,7 @@ function WidgetForm(props: Props) {
'Filter data using any event or attribute. Use Add Step button below to do so.' :
'Add user event or filter to define the series by clicking Add Step.'
}
observeChanges={onObserveChanges}
// observeChanges={onObserveChanges}
/>
</div>
))}

View file

@ -5,12 +5,14 @@ import { useStore } from 'App/mstore';
import SessionItem from 'Shared/SessionItem';
import { observer, useObserver } from 'mobx-react-lite';
import { DateTime } from 'luxon';
import { debounce } from 'App/utils';
interface Props {
className?: string;
}
function WidgetSessions(props: Props) {
const { className = '' } = props;
const [data, setData] = useState<any>([]);
const [loading, setLoading] = useState(false);
const [seriesOptions, setSeriesOptions] = useState([
{ text: 'All', value: 'all' },
]);
@ -30,18 +32,27 @@ function WidgetSessions(props: Props) {
]);
}, [data]);
const fetchSessions = (metricId, filter) => {
setLoading(true)
widget.fetchSessions(metricId, filter).then(res => {
setData(res)
}).finally(() => {
setLoading(false)
});
}
const filteredSessions = getListSessionsBySeries(data, activeSeries);
const { dashboardStore, metricStore } = useStore();
const filter = useObserver(() => dashboardStore.drillDownFilter);
const widget: any = metricStore.instance;
const widget: any = useObserver(() => metricStore.instance);
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm a');
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm a');
const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []);
const depsString = JSON.stringify(widget.series);
useEffect(() => {
widget.fetchSessions({ ...filter, filter: widget.toJsonDrilldown()}).then(res => {
setData(res);
});
}, [filter.startTimestamp, filter.endTimestamp, filter.filters]);
debounceRequest(widget.metricId, { ...filter, series: widget.toJsonDrilldown() });
}, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString]);
return useObserver(() => (
<div className={cn(className)}>
@ -71,7 +82,7 @@ function WidgetSessions(props: Props) {
</div>
<div className="mt-3">
<Loader loading={widget.sessionsLoading}>
<Loader loading={loading}>
<NoContent
title="No recordings found"
show={filteredSessions.length === 0}

View file

@ -1,5 +1,4 @@
import React, { useEffect, useState } from 'react';
import { withRouter } from 'react-router-dom';
import React, { useState } from 'react';
import { useStore } from 'App/mstore';
import WidgetForm from '../WidgetForm';
import WidgetPreview from '../WidgetPreview';

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react';
import { Tooltip } from 'react-tippy';

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react';
import { Icon } from 'UI';
import { Tooltip } from 'react-tippy';

View file

@ -33,6 +33,7 @@ function WidgetWrapper(props: Props) {
const widget: any = useObserver(() => props.widget);
const isPredefined = widget.metricType === 'predefined';
const dashboard = useObserver(() => dashboardStore.selectedDashboard);
const isOverviewWidget = widget.widgetType === 'predefined' && widget.viewType === 'overview';
const [{ opacity, isDragging }, dragRef] = useDrag({
type: 'item',
@ -117,7 +118,7 @@ function WidgetWrapper(props: Props) {
)}
</div>
<LazyLoad height={100} offset={120} >
<LazyLoad height={!isTemplate ? 300 : 10} offset={!isTemplate ? 100 : 10} >
<div className="px-4" onClick={onChartClick}>
<WidgetChart metric={widget} isWidget={isWidget} />
</div>

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React, { Component, createContext } from 'react';
import Modal from './Modal';

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
import { Tooltip } from 'react-tippy';

View file

@ -8,7 +8,7 @@
.divider {
width: 1px;
height: 49px;
margin: 0 15px;
margin: 0 10px;
background-color: $gray-light;
}

View file

@ -98,7 +98,7 @@ function CustomMetricForm(props: Props) {
autoFocus={ true }
className="text-lg"
name="name"
style={{ fontSize: '18px', padding: '10px', fontWeight: '600'}}
style={{ fontSize: '18px', padding: '10px', fontWeight: 600}}
value={ metric.name }
onChange={ write }
placeholder="Metric Title"

View file

@ -1,3 +1,4 @@
//@ts-nocheck
import React from 'react'
import { Icon } from 'UI'
import cn from 'classnames'

View file

@ -73,13 +73,13 @@ export default class MetricStore implements IMetricStore {
paginatedList: computed,
})
reaction(
() => this.metricsSearch,
(metricsSearch) => { // TODO filter the list for View
this.page = 1
this.paginatedList
}
)
// reaction(
// () => this.metricsSearch,
// (metricsSearch) => { // TODO filter the list for View
// this.page = 1
// this.paginatedList
// }
// )
}
// State Actions
@ -92,7 +92,7 @@ export default class MetricStore implements IMetricStore {
}
merge(object: any) {
this.instance = Object.assign(this.instance, object)
Object.assign(this.instance, object)
}
reset(id: string) {

View file

@ -1,6 +1,4 @@
import { makeAutoObservable, runInAction, observable, action, reaction } from "mobx"
import { FilterKey, FilterType } from 'Types/filter/filterType'
import { filtersMap } from 'Types/filter/newFilter'
import { makeAutoObservable, runInAction, observable, action } from "mobx"
import FilterItem from "./filterItem"
export interface IFilter {

View file

@ -4,6 +4,7 @@ import { DateTime } from 'luxon';
import { IFilter } from "./filter";
import { metricService } from "App/services";
import Session, { ISession } from "App/mstore/types/session";
export interface IWidget {
metricId: any
widgetId: any
@ -45,7 +46,7 @@ export interface IWidget {
exists(): boolean
toWidget(): any
setData(data: any): void
fetchSessions(filter: any): Promise<any>
fetchSessions(metricId: any, filter: any): Promise<any>
}
export default class Widget implements IWidget {
public static get ID_KEY():string { return "metricId" }
@ -158,9 +159,7 @@ export default class Widget implements IWidget {
}
toJsonDrilldown() {
return {
series: this.series.map((series: any) => series.toJson()),
}
return this.series.map((series: any) => series.toJson())
}
toJson() {
@ -197,18 +196,15 @@ export default class Widget implements IWidget {
})
}
fetchSessions(filter: any): Promise<any> {
this.sessionsLoading = true
fetchSessions(metricId: any, filter: any): Promise<any> {
return new Promise((resolve, reject) => {
metricService.fetchSessions(this.metricId, filter).then(response => {
metricService.fetchSessions(metricId, filter).then(response => {
resolve(response.map(cat => {
return {
...cat,
sessions: cat.sessions.map(s => new Session().fromJson(s))
}
}))
}).finally(() => {
this.sessionsLoading = false
})
})
}

View file

@ -59,12 +59,14 @@ export interface State {
calling: CallingState,
peerConnectionStatus: ConnectionStatus,
remoteControl: RemoteControlStatus,
annotating: boolean,
}
export const INITIAL_STATE: State = {
calling: CallingState.NoCall,
peerConnectionStatus: ConnectionStatus.Connecting,
remoteControl: RemoteControlStatus.Disabled,
annotating: false,
}
const MAX_RECONNECTION_COUNT = 4;
@ -321,7 +323,7 @@ export default class AssistManager {
private handleCallEnd() {
this.callArgs && this.callArgs.onCallEnd()
this.callConnection && this.callConnection.close()
update({ calling: CallingState.NoCall })
update({ calling: CallingState.NoCall, annotating: false })
this.callArgs = null
this.annot?.remove()
this.annot = null
@ -370,6 +372,45 @@ export default class AssistManager {
}
}
toggleAnnotation(enable?: boolean) {
if (getState().calling !== CallingState.OnCall) { return }
if (typeof enable !== "boolean") {
enable = !!getState().annotating
}
if (!enable && !this.annot) {
const annot = this.annot = new AnnotationCanvas()
annot.mount(this.md.overlay)
annot.canvas.addEventListener("mousedown", e => {
if (!this.socket) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.start([ data.x, data.y ])
this.socket.emit("startAnnotation", [ data.x, data.y ])
})
annot.canvas.addEventListener("mouseleave", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mouseup", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mousemove", e => {
if (!this.socket || !annot.isPainting()) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.move([ data.x, data.y ])
this.socket.emit("moveAnnotation", [ data.x, data.y ])
})
update({ annotating: true })
} else if (enable && !!this.annot) {
this.annot.remove()
this.annot = null
update({ annotating: false })
}
}
private annot: AnnotationCanvas | null = null
private _call() {
@ -396,34 +437,6 @@ export default class AssistManager {
call.on('stream', stream => {
update({ calling: CallingState.OnCall })
this.callArgs && this.callArgs.onStream(stream)
if (!this.annot) {
const annot = this.annot = new AnnotationCanvas()
annot.mount(this.md.overlay)
annot.canvas.addEventListener("mousedown", e => {
if (!this.socket) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.start([ data.x, data.y ])
this.socket.emit("startAnnotation", [ data.x, data.y ])
})
annot.canvas.addEventListener("mouseleave", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mouseup", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mousemove", e => {
if (!this.socket || !annot.isPainting()) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.move([ data.x, data.y ])
this.socket.emit("moveAnnotation", [ data.x, data.y ])
})
}
});
//call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))

View file

@ -72,6 +72,7 @@ export const callPeer = initCheck((...args) => instance.assistManager.call(...ar
export const requestReleaseRemoteControl = initCheck((...args) => instance.assistManager.requestReleaseRemoteControl(...args))
export const markTargets = initCheck((...args) => instance.markTargets(...args))
export const activeTarget = initCheck((...args) => instance.activeTarget(...args))
export const toggleAnnotation = initCheck((...args) => instance.assistManager.toggleAnnotation(...args))
export const Controls = {
jump,

View file

@ -0,0 +1,11 @@
<svg viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_670_5221)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.4999 -0.207153L16.2071 3.49995L5.78093 13.9261L-0.397583 16.3975L2.07382 10.219L12.4999 -0.207153ZM2.92607 10.7809L1.39747 14.6024L5.21896 13.0738L14.7928 3.49995L12.4999 1.20706L2.92607 10.7809Z" fill="black"/>
<path d="M9 4.5C9 5.69347 8.52589 6.83807 7.68198 7.68198C6.83807 8.52589 5.69347 9 4.5 9C3.30653 9 2.16193 8.52589 1.31802 7.68198C0.474106 6.83807 0 5.69347 0 4.5C0 3.30653 0.474106 2.16193 1.31802 1.31802C2.16193 0.474106 3.30653 0 4.5 0C5.69347 0 6.83807 0.474106 7.68198 1.31802C8.52589 2.16193 9 3.30653 9 4.5V4.5ZM3.01163 2.61337C2.95881 2.56056 2.88719 2.53089 2.8125 2.53089C2.73781 2.53089 2.66619 2.56056 2.61337 2.61337C2.56056 2.66619 2.53089 2.73781 2.53089 2.8125C2.53089 2.88719 2.56056 2.95881 2.61337 3.01163L4.10231 4.5L2.61337 5.98837C2.58723 6.01452 2.56648 6.04557 2.55233 6.07973C2.53818 6.1139 2.53089 6.15052 2.53089 6.1875C2.53089 6.22448 2.53818 6.2611 2.55233 6.29527C2.56648 6.32943 2.58723 6.36048 2.61337 6.38663C2.66619 6.43944 2.73781 6.46911 2.8125 6.46911C2.84948 6.46911 2.8861 6.46182 2.92027 6.44767C2.95443 6.43352 2.98548 6.41277 3.01163 6.38663L4.5 4.89769L5.98837 6.38663C6.01452 6.41277 6.04557 6.43352 6.07973 6.44767C6.1139 6.46182 6.15052 6.46911 6.1875 6.46911C6.22448 6.46911 6.2611 6.46182 6.29527 6.44767C6.32943 6.43352 6.36048 6.41277 6.38663 6.38663C6.41277 6.36048 6.43352 6.32943 6.44767 6.29527C6.46182 6.2611 6.46911 6.22448 6.46911 6.1875C6.46911 6.15052 6.46182 6.1139 6.44767 6.07973C6.43352 6.04557 6.41277 6.01452 6.38663 5.98837L4.89769 4.5L6.38663 3.01163C6.41277 2.98548 6.43352 2.95443 6.44767 2.92027C6.46182 2.8861 6.46911 2.84948 6.46911 2.8125C6.46911 2.77552 6.46182 2.7389 6.44767 2.70473C6.43352 2.67057 6.41277 2.63952 6.38663 2.61337C6.36048 2.58723 6.32943 2.56648 6.29527 2.55233C6.2611 2.53818 6.22448 2.53089 6.1875 2.53089C6.15052 2.53089 6.1139 2.53818 6.07973 2.55233C6.04557 2.56648 6.01452 2.58723 5.98837 2.61337L4.5 4.10231L3.01163 2.61337Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_670_5221">
<rect width="16" height="16" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View file

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" class="bi bi-window-x" viewBox="0 0 16 16">
<path d="M2.5 5a.5.5 0 1 0 0-1 .5.5 0 0 0 0 1ZM4 5a.5.5 0 1 0 0-1 .5.5 0 0 0 0 1Zm2-.5a.5.5 0 1 1-1 0 .5.5 0 0 1 1 0Z"/>
<path d="M0 4a2 2 0 0 1 2-2h11a2 2 0 0 1 2 2v4a.5.5 0 0 1-1 0V7H1v5a1 1 0 0 0 1 1h5.5a.5.5 0 0 1 0 1H2a2 2 0 0 1-2-2V4Zm1 2h13V4a1 1 0 0 0-1-1H2a1 1 0 0 0-1 1v2Z"/>
<path d="M16 12.5a3.5 3.5 0 1 1-7 0 3.5 3.5 0 0 1 7 0Zm-4.854-1.354a.5.5 0 0 0 0 .708l.647.646-.647.646a.5.5 0 0 0 .708.708l.646-.647.646.647a.5.5 0 0 0 .708-.708l-.647-.646.647-.646a.5.5 0 0 0-.708-.708l-.646.647-.646-.647a.5.5 0 0 0-.708 0Z"/>
</svg>

After

Width:  |  Height:  |  Size: 628 B

File diff suppressed because it is too large Load diff

View file

@ -28,7 +28,7 @@
"luxon": "^1.24.1",
"mobx": "^6.3.8",
"mobx-react-lite": "^3.1.6",
"moment": "^2.27.0",
"moment": "^2.29.2",
"moment-range": "^4.0.2",
"peerjs": "^1.3.2",
"rc-time-picker": "^3.7.3",
@ -53,7 +53,7 @@
"react-tippy": "^1.4.0",
"react-toastify": "^5.5.0",
"react-virtualized": "^9.22.2",
"recharts": "^1.8.5",
"recharts": "^2.1.9",
"redux": "^4.0.5",
"redux-immutable": "^4.0.0",
"redux-thunk": "^2.3.0",

View file

@ -3,7 +3,8 @@
"target": "es5",
"module": "es2020",
"moduleResolution": "node", //?
//"allowJs": true,
// "allowJs": true,
"declaration": true,
"allowSyntheticDefaultImports": true,
"downlevelIteration": true,
//"sourceMap": false,

View file

@ -24,13 +24,17 @@ ALTER TABLE IF EXISTS metrics
DROP CONSTRAINT IF EXISTS unique_key;
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS edited_at timestamp NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
ADD COLUMN IF NOT EXISTS is_pinned boolean NOT NULL DEFAULT FALSE,
ADD COLUMN IF NOT EXISTS category text NULL DEFAULT 'custom',
ADD COLUMN IF NOT EXISTS category text NULL DEFAULT 'custom',
ADD COLUMN IF NOT EXISTS is_predefined boolean NOT NULL DEFAULT FALSE,
ADD COLUMN IF NOT EXISTS is_template boolean NOT NULL DEFAULT FALSE,
ADD COLUMN IF NOT EXISTS predefined_key text NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS default_config jsonb NOT NULL DEFAULT '{"col": 2,"row": 2,"position": 0}'::jsonb,
ADD COLUMN IF NOT EXISTS predefined_key text NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS default_config jsonb NOT NULL DEFAULT '{
"col": 2,
"row": 2,
"position": 0
}'::jsonb,
ALTER COLUMN project_id DROP NOT NULL,
ADD CONSTRAINT null_project_id_for_template_only
CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ),
@ -49,12 +53,36 @@ CREATE TABLE IF NOT EXISTS dashboard_widgets
);
ALTER TABLE events_common.requests
ADD COLUMN IF NOT EXISTS host text NULL,
ADD COLUMN IF NOT EXISTS base_path text NULL,
ADD COLUMN IF NOT EXISTS query text NULL;
ADD COLUMN IF NOT EXISTS host text NULL,
ADD COLUMN IF NOT EXISTS path text NULL,
ADD COLUMN IF NOT EXISTS query text NULL;
ALTER TABLE events.pages
ADD COLUMN IF NOT EXISTS query text NULL;
DO
$$
BEGIN
IF EXISTS(SELECT *
FROM information_schema.columns
WHERE table_schema = 'events'
AND table_name = 'pages'
AND column_name = 'base_path')
THEN
ALTER TABLE events.pages
DROP COLUMN IF EXISTS path;
ALTER TABLE events.pages
RENAME COLUMN base_path TO path;
DROP INDEX IF EXISTS events.pages_base_path_gin_idx2;
DROP INDEX IF EXISTS pages_base_path_idx2;
ALTER INDEX IF EXISTS events.pages_base_path_gin_idx RENAME TO pages_path_gin_idx;
ALTER INDEX IF EXISTS events.pages_base_path_idx RENAME TO pages_path_idx;
ALTER INDEX IF EXISTS events.pages_base_path_session_id_timestamp_idx RENAME TO pages_path_session_id_timestamp_idx;
ALTER INDEX IF EXISTS events.pages_base_path_base_pathLNGT2_idx RENAME TO pages_path_pathLNGT2_idx;
END IF;
END
$$;
COMMIT;
ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'areaChart';
@ -65,55 +93,236 @@ ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'overview';
ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'map';
ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined';
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type)
VALUES ('Captured sessions', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_fps', 'predefined', 'overview'),
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'overview', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_fps', 'predefined', 'overview'),
('Sessions Affected by JS Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
('Top Domains with 4xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
('Top Domains with 5xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
('Errors per Domain', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'calls_errors', 'predefined', 'table'),
('Errors by Type', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
('Errors by Origin', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
('Sessions Affected by JS Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
('Top Domains with 4xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
('Top Domains with 5xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
('Errors per Domain', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'calls_errors', 'predefined', 'table'),
('Errors by Type', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
('Errors by Origin', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
('Speed Index by Location', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'speed_location', 'predefined', 'map'),
('Slowest Domains', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'slowest_domains', 'predefined', 'table'),
('Sessions per Browser', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
('Time To Render', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
('Sessions Impacted by Slow Pages', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
('Memory Consumption', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
('CPU Load', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'cpu', 'predefined', 'areaChart'),
('Frame Rate', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'fps', 'predefined', 'areaChart'),
('Crashes', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'crashes', 'predefined', 'areaChart'),
('Resources Loaded vs Visually Complete', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
('DOM Build Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
('Pages Response Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{"col":4,"row":2,"position":0}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Speed Index by Location', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'speed_location', 'predefined', 'map'),
('Slowest Domains', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'slowest_domains', 'predefined', 'table'),
('Sessions per Browser', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
('Time To Render', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
('Sessions Impacted by Slow Pages', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
('Memory Consumption', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
('CPU Load', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'cpu', 'predefined', 'areaChart'),
('Frame Rate', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'fps', 'predefined', 'areaChart'),
('Crashes', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'crashes', 'predefined', 'areaChart'),
('Resources Loaded vs Visually Complete', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
('DOM Build Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
('Pages Response Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Missing Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'missing_resources', 'predefined', 'table'),
('Slowest Resources', 'resources', '{"col":4,"row":2,"position":0}', true, true, true, 'slowest_resources', 'predefined', 'table'),
('Resources Fetch Time', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
('Resource Loaded vs Response End', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
('Breakdown of Loaded Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
('Missing Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'missing_resources', 'predefined', 'table'),
('Slowest Resources', 'resources', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'slowest_resources', 'predefined', 'table'),
('Resources Fetch Time', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
('Resource Loaded vs Response End', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
('Breakdown of Loaded Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
ON CONFLICT (predefined_key) DO UPDATE
SET name=excluded.name,
category=excluded.category,
@ -127,10 +336,13 @@ ON CONFLICT (predefined_key) DO UPDATE
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_base_path_nn_idx ON events_common.requests (base_path) WHERE base_path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_base_path_nn_gin_idx ON events_common.requests USING GIN (base_path gin_trgm_ops) WHERE base_path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp);
CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2;

View file

@ -600,7 +600,6 @@ $$
-- --- events_common.sql ---
CREATE SCHEMA IF NOT EXISTS events_common;
CREATE TYPE events_common.custom_level AS ENUM ('info','error');
@ -646,7 +645,7 @@ $$
status_code smallint NULL,
method http_method NULL,
host text NULL,
base_path text NULL,
path text NULL,
query text NULL,
PRIMARY KEY (session_id, timestamp, seq_index)
);
@ -669,13 +668,12 @@ $$
CREATE INDEX requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL;
CREATE INDEX requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX requests_base_path_nn_idx ON events_common.requests (base_path) WHERE base_path IS NOT NULL;
CREATE INDEX requests_base_path_nn_gin_idx ON events_common.requests USING GIN (base_path gin_trgm_ops) WHERE base_path IS NOT NULL;
CREATE INDEX requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
-- --- events.sql ---
CREATE SCHEMA IF NOT EXISTS events;
CREATE TABLE events.pages
(
@ -684,7 +682,6 @@ $$
timestamp bigint NOT NULL,
host text NOT NULL,
path text NOT NULL,
base_path text NOT NULL,
query text NULL,
referrer text DEFAULT NULL,
base_referrer text DEFAULT NULL,
@ -702,13 +699,9 @@ $$
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX pages_session_id_idx ON events.pages (session_id);
CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops);
CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops);
CREATE INDEX pages_timestamp_idx ON events.pages (timestamp);
CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops);
CREATE INDEX pages_base_path_idx ON events.pages (base_path);
CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1));
CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer);
CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
length(base_referrer) - (CASE
@ -739,10 +732,10 @@ $$
time_to_interactive > 0;
CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp);
CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2;
CREATE INDEX IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE INDEX pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp);
CREATE INDEX pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2;
CREATE INDEX pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL;
CREATE INDEX pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE TABLE events.clicks
@ -957,9 +950,9 @@ $$
name text NOT NULL,
is_public boolean NOT NULL DEFAULT FALSE,
active boolean NOT NULL DEFAULT TRUE,
created_at timestamp default timezone('utc'::text, now()) not null,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp,
edited_at timestamp,
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type metric_type NOT NULL DEFAULT 'timeseries',
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-axios",
"description": "Tracker plugin for axios requests recording",
"version": "3.5.0",
"version": "3.5.1",
"keywords": [
"axios",
"logging",
@ -21,11 +21,11 @@
"dependencies": {},
"peerDependencies": {
"@openreplay/tracker": "^3.4.8",
"axios": "^0.21.2"
"axios": "0.x"
},
"devDependencies": {
"@openreplay/tracker": "^3.4.9",
"axios": "^0.21.2",
"axios": "^0.26.0",
"prettier": "^1.18.2",
"replace-in-files-cli": "^1.0.0",
"typescript": "^4.6.0-dev.20211126"

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "3.5.7",
"version": "3.5.9",
"keywords": [
"logging",
"replay"

View file

@ -83,10 +83,6 @@ function _getTarget(target: Element): Element | null {
}
export default function (app: App): void {
// const options: Options = Object.assign(
// {},
// opts,
// );
function getTargetLabel(target: Element): string {
const dl = getLabelAttribute(target);

View file

@ -28,9 +28,10 @@ export default class BatchWriter {
this.beaconSizeLimit = limit
}
// TODO: clear workflow
writeMessage(message: Message) {
if (message instanceof Timestamp) {
this.timestamp = (<any>message).timestamp;
this.timestamp = (<any>message).timestamp
}
if (!message.encode(this.writer)) {
@ -58,10 +59,11 @@ export default class BatchWriter {
this.isEmpty = false
}
flush(): Uint8Array | null {
if (this.isEmpty) { return null }
finaliseBatch() {
if (this.isEmpty) { return }
this.onBatch(this.writer.flush())
this.prepareBatchMeta()
this.isEmpty = true
return this.writer.flush()
}
clean() {

View file

@ -2,6 +2,7 @@ import Message from "../messages/message.js";
import {
classes,
SetPageVisibility,
MouseMove,
} from "../messages/index.js";
import QueueSender from "./QueueSender.js";
import BatchWriter from "./BatchWriter.js";
@ -15,11 +16,10 @@ let sender: QueueSender | null = null
let writer: BatchWriter | null = null
function send(): void {
if (!sender || !writer) {
if (!writer) {
return
}
const batch = writer.flush()
batch && sender.push(batch)
writer.finaliseBatch()
}
@ -58,6 +58,10 @@ self.onmessage = ({ data }: MessageEvent<WorkerMessageData>) => {
}
if (Array.isArray(data)) {
if (!writer) {
throw new Error("WebWorker: writer not initialised.")
}
const w = writer
// Message[]
data.forEach((data) => {
const message: Message = new (<any>classes.get(data._id))();
@ -68,8 +72,8 @@ self.onmessage = ({ data }: MessageEvent<WorkerMessageData>) => {
} else {
clearTimeout(restartTimeoutID)
}
}
writer && writer.writeMessage(message)
}
w.writeMessage(message)
})
return
}
@ -100,8 +104,14 @@ self.onmessage = ({ data }: MessageEvent<WorkerMessageData>) => {
}
if (data.type === "auth") {
sender && sender.authorise(data.token)
data.beaconSizeLimit && writer && writer.setBeaconSizeLimit(data.beaconSizeLimit)
if (!sender) {
throw new Error("WebWorker: sender not initialised. Recieved auth.")
}
if (!writer) {
throw new Error("WebWorker: writer not initialised. Recieved auth.")
}
sender.authorise(data.token)
data.beaconSizeLimit && writer.setBeaconSizeLimit(data.beaconSizeLimit)
return
}
};