Merge remote-tracking branch 'origin/api-v1.5.5' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-04-15 15:10:58 +02:00
commit a2fac8a353
28 changed files with 717 additions and 417 deletions

View file

@ -28,8 +28,8 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-foss
jwt_secret="SET A RANDOM STRING HERE"
peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
assist=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
assistList=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres

View file

@ -21,56 +21,13 @@ SESSION_PROJECTION_COLS = """s.project_id,
"""
def get_live_sessions(project_id, filters=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
connected_peers = connected_peers.json().get("data", [])
if len(connected_peers) == 0:
return []
connected_peers = tuple(connected_peers)
extra_constraints = ["project_id = %(project_id)s", "session_id IN %(connected_peers)s"]
extra_params = {}
if filters is not None:
for i, f in enumerate(filters):
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = sessions.__get_sql_value_multiple(f["value"])
if filter_type == schemas.FilterType.user_id:
op = sessions.__get_sql_operator(f["operator"])
extra_constraints.append(f"user_id {op} %(value_{i})s")
extra_params[f"value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT {SESSION_PROJECTION_COLS}, %(project_key)s||'-'|| session_id AS peer_id
FROM public.sessions AS s
WHERE {" AND ".join(extra_constraints)}
ORDER BY start_ts DESC
LIMIT 500;""",
{"project_id": project_id,
"connected_peers": connected_peers,
"project_key": project_key,
**extra_params})
cur.execute(query)
results = cur.fetchall()
return helper.list_to_camel_case(results)
def get_live_sessions_ws(project_id, user_id=None):
project_key = projects.get_project_key(project_id)
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
try:
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -105,7 +62,7 @@ def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
try:
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)

View file

@ -57,6 +57,8 @@ def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
schemas.CustomMetricSessionsPayloadSchema]) \
-> Union[schemas.CreateCustomMetricsSchema, None]:
if data.series is not None and len(data.series) > 0:
metric["series"] = data.series
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series:
@ -95,6 +97,8 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})

View file

@ -134,16 +134,15 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
with pg_client.PostgresClient() as cur:
pg_query = f"""\
SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(sessions), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(sessions), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
@ -157,8 +156,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
pg_query = f"""\
SELECT COUNT(sessions.session_id) AS count
pg_query = f"""SELECT COUNT(sessions.session_id) AS count
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -205,8 +203,8 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
"count": 0 if len(rows) == 0 else __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp,
pg_sub_query_subset),
"count": 0 if len(rows) == 0 else \
__count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_sub_query_subset),
"impactedSessions": sum([r["count"] for r in rows]),
"chart": rows
}
@ -352,10 +350,9 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, **
pg_sub_query.append("pages.timestamp > %(endTimestamp)s")
pg_sub_query.append("pages.load_time > 0")
pg_sub_query.append("pages.load_time IS NOT NULL")
pg_query = f"""\
SELECT COALESCE(AVG(pages.load_time) ,0) AS avg_page_load_time
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
pg_query = f"""SELECT COALESCE(AVG(pages.load_time) ,0) AS avg_page_load_time
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
@ -365,10 +362,9 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, **
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.duration > 0")
pg_sub_query.append("resources.type= %(type)s")
pg_query = f"""\
SELECT COALESCE(AVG(resources.duration),0) AS avg
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS avg
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
@ -401,12 +397,11 @@ def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_query = f"""\
SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
pg_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
pg_query = f"""SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
@ -451,8 +446,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
COALESCE(AVG(duration), 0) AS avg_duration
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT resources.duration
FROM events.resources
INNER JOIN public.sessions USING (session_id)
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
@ -640,7 +634,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
ORDER BY url, type ASC) AS ranked_values
WHERE ranked_values.r<=5;"""
print(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)}))
rows = cur.fetchall()
rows = [{"value": i["value"], "type": __get_resource_type_from_db_type(i["key"])} for i in rows]
@ -660,9 +653,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.pages INNER JOIN public.sessions USING(session_id)
WHERE {" AND ".join(pg_sub_query)} AND positionUTF8(url_path, %(value)s) != 0
LIMIT 10);"""
print(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text.lower()),
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text.lower()),
"platform_0": platform}))
@ -679,10 +669,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"resource_type": resource_type,
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"resource_type": resource_type,
@ -697,9 +683,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@ -711,9 +694,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.inputs INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@ -725,9 +705,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.clicks INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@ -746,9 +723,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key,
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key,
"platform_0": platform}))
@ -773,10 +747,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
AND sessions.{SESSIONS_META_FIELDS[k]} ILIKE %(value)s
LIMIT 10)""")
pg_query = " UNION ALL ".join(pg_query)
print(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"key": key,
"platform_0": platform}))
cur.execute(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"key": key,
@ -1491,10 +1461,11 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query.append("performance.avg_fps>0")
pg_sub_query_chart.append("performance.avg_fps>0")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS value
COALESCE(AVG(performance.avg_fps),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT avg_fps
@ -1509,7 +1480,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
pg_query = f"""SELECT COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS avg
pg_query = f"""SELECT COALESCE(AVG(performance.avg_fps),0) AS avg
FROM events.performance INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
@ -1843,7 +1814,7 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests,
COUNT(resources.session_id) AS all_requests,
SUM(CASE WHEN resources.status/100 = 4 THEN 1 ELSE 0 END) AS _4xx,
SUM(CASE WHEN resources.status/100 = 5 THEN 1 ELSE 0 END) AS _5xx
FROM events.resources INNER JOIN sessions USING (session_id)
@ -1868,7 +1839,7 @@ def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
COUNT(resources.session_id) AS all_requests
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.method, resources.url_hostpath
@ -1891,7 +1862,7 @@ def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
COUNT(resources.session_id) AS all_requests
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.method, resources.url_hostpath
@ -1940,7 +1911,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
WHERE {" AND ".join(pg_sub_query_subset_e)}
AND source = 'js_exception'
)
SELECT generated_timestamp AS timestamp,
SELECT generated_timestamp AS timestamp,
COALESCE(SUM(CASE WHEN status / 100 = 4 THEN 1 ELSE 0 END), 0) AS _4xx,
COALESCE(SUM(CASE WHEN status / 100 = 5 THEN 1 ELSE 0 END), 0) AS _5xx,
COALESCE((SELECT COUNT(*)
@ -2248,10 +2219,9 @@ def __get_application_activity_avg_image_load_time(cur, project_id, startTimesta
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.duration > 0")
pg_sub_query.append("resources.type= %(type)s")
pg_query = f"""\
SELECT COALESCE(AVG(resources.duration),0) AS value
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
@ -2322,15 +2292,15 @@ def __get_application_activity_avg_page_load_time(cur, project_id, startTimestam
pg_sub_query.append("pages.timestamp > %(endTimestamp)s")
pg_sub_query.append("pages.load_time > 0")
pg_sub_query.append("pages.load_time IS NOT NULL")
pg_query = f"""\
SELECT COALESCE(AVG(pages.load_time) ,0) AS value
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
pg_query = f"""SELECT COALESCE(AVG(pages.load_time) ,0) AS value
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return row
@ -2389,10 +2359,9 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.duration > 0")
pg_sub_query.append("resources.type= %(type)s")
pg_query = f"""\
SELECT COALESCE(AVG(resources.duration),0) AS value
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
@ -2400,6 +2369,7 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return row
@ -2481,7 +2451,7 @@ def __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestam
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
pg_sub_query.append("pages.dom_content_loaded_time > 0")
pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_time, 0)), 0) AS value
pg_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_time), 0) AS value
FROM (SELECT pages.dom_content_loaded_time
FROM events.pages
INNER JOIN public.sessions USING (session_id)
@ -2533,6 +2503,8 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
results = helper.dict_to_camel_case(rows[0])
results["chart"] = __get_page_metrics_avg_first_contentful_pixel_chart(cur, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2549,7 +2521,7 @@ def __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestam
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
pg_sub_query.append("pages.first_contentful_paint_time > 0")
pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.first_contentful_paint_time, 0)), 0) AS value
pg_query = f"""SELECT COALESCE(AVG(pages.first_contentful_paint_time), 0) AS value
FROM (SELECT pages.first_contentful_paint_time
FROM events.pages
INNER JOIN public.sessions USING (session_id)
@ -2562,11 +2534,47 @@ def __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestam
return rows
def __get_page_metrics_avg_first_contentful_pixel_chart(cur, project_id, startTimestamp, endTimestamp, density=20,
**args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="pages", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s")
pg_sub_query_subset.append("pages.first_contentful_paint_time > 0")
pg_query = f"""WITH pages AS(SELECT pages.first_contentful_paint_time, pages.timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.first_contentful_paint_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT pages.first_contentful_paint_time
FROM pages
WHERE {" AND ".join(pg_sub_query_chart)}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
results["chart"] = __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2580,11 +2588,10 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d
def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_query = f"""\
SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS value
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
pg_sub_query.append("sessions.pages_count>0")
pg_query = f"""SELECT COALESCE(CEIL(AVG(sessions.pages_count)),0) AS value
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
@ -2593,11 +2600,44 @@ def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTi
return row
def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="sessions", time_column="start_ts",
duration=False)
pg_sub_query_subset.append("sessions.duration IS NOT NULL")
pg_query = f"""WITH sessions AS(SELECT sessions.pages_count, sessions.start_ts
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(sessions.pages_count),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT sessions.pages_count
FROM sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows
def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args)
results = helper.dict_to_camel_case(row)
results["chart"] = __get_user_activity_avg_session_duration_chart(cur, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2611,11 +2651,11 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_query = f"""\
SELECT COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS value
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
pg_sub_query.append("sessions.duration IS NOT NULL")
pg_sub_query.append("sessions.duration > 0")
pg_query = f"""SELECT COALESCE(AVG(sessions.duration),0) AS value
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
@ -2624,12 +2664,46 @@ def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, en
return row
def __get_user_activity_avg_session_duration_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="sessions", time_column="start_ts",
duration=False)
pg_sub_query_subset.append("sessions.duration IS NOT NULL")
pg_sub_query_subset.append("sessions.duration > 0")
pg_query = f"""WITH sessions AS(SELECT sessions.duration, sessions.start_ts
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(AVG(sessions.duration),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT sessions.duration
FROM sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
rows = cur.fetchall()
return rows
def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COALESCE(AVG(pages.response_time), 0) AS value
FROM events.pages
@ -2638,21 +2712,39 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
AND pages.timestamp >= %(startTimestamp)s
AND pages.timestamp < %(endTimestamp)s
AND pages.response_time > 0;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.response_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT response_time
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)} AND pages.response_time > 0
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp ASC;"""
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(row)
def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COALESCE(AVG(pages.first_paint_time), 0) AS value
FROM events.pages
@ -2661,11 +2753,25 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
AND pages.timestamp >= %(startTimestamp)s
AND pages.timestamp < %(endTimestamp)s
AND pages.first_paint_time > 0;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.first_paint_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT first_paint_time
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)} AND pages.first_paint_time > 0
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp ASC;"""
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(row)
@ -2676,7 +2782,8 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query.append("pages.dom_content_loaded_time>0")
pg_sub_query_chart.append("pages.dom_content_loaded_time>0")
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
@ -2697,7 +2804,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
row = cur.fetchone()
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(NULLIF(pages.dom_content_loaded_time,0)),0) AS value
COALESCE(AVG(pages.dom_content_loaded_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT dom_content_loaded_time
@ -2708,17 +2815,21 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
ORDER BY generated_timestamp ASC;"""
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows),
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(row)
def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COALESCE(AVG(pages.ttfb), 0) AS value
FROM events.pages
@ -2727,44 +2838,84 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
AND pages.timestamp >= %(startTimestamp)s
AND pages.timestamp < %(endTimestamp)s
AND pages.ttfb > 0;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.ttfb),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT ttfb
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)} AND pages.ttfb > 0
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp ASC;"""
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(row)
def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query.append("pages.time_to_interactive > 0")
pg_sub_query_chart.append("pages.time_to_interactive > 0")
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive), 0) AS value
FROM events.pages
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND pages.timestamp >= %(startTimestamp)s
AND pages.timestamp < %(endTimestamp)s
AND pages.time_to_interactive > 0;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
AND pages.timestamp < %(endTimestamp)s;"""
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(AVG(pages.time_to_interactive),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT time_to_interactive
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp ASC;"""
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(row)
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="pages", time_column="timestamp",
duration=False)
if value is not None:
pg_sub_query.append("pages.path = %(value)s")
pg_sub_query_chart.append("pages.path = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COUNT(pages.session_id) AS value
FROM events.pages INNER JOIN public.sessions USING (session_id)
@ -2774,5 +2925,22 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}))
row = cur.fetchone()
pg_query = f"""WITH pages AS(SELECT pages.timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
)
SELECT generated_timestamp AS timestamp,
COUNT(pages.*) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT 1
FROM pages
WHERE {" AND ".join(pg_sub_query_chart)}
) AS pages ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
rows = cur.fetchall()
row["chart"] = rows
row["unit"] = schemas.TemplatePredefinedUnits.count
return helper.dict_to_camel_case(row)

View file

@ -85,7 +85,7 @@ def get_dashboard(project_id, user_id, dashboard_id):
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
LEFT JOIN LATERAL (SELECT JSONB_AGG(metric_series.* ORDER BY index) AS series
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
@ -102,9 +102,12 @@ def get_dashboard(project_id, user_id, dashboard_id):
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
for w in row["widgets"]:
row["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
row["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)

View file

@ -341,7 +341,7 @@ class event_type:
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="base_path")
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="url")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="base_path")
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",

View file

@ -261,7 +261,6 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
}}
@dev.timed
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
@ -313,7 +312,6 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
return f
@dev.timed
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:

View file

@ -3,7 +3,6 @@ from chalicelib.utils import helper, pg_client
from chalicelib.utils import dev
@dev.timed
def get_by_url(project_id, data):
args = {"startDate": data.get('startDate', TimeUTC.now(delta_days=-30)),
"endDate": data.get('endDate', TimeUTC.now()),

View file

@ -28,7 +28,6 @@ JOURNEY_TYPES = {
}
@dev.timed
def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
@ -181,7 +180,6 @@ def __complete_acquisition(rows, start_date, end_date=None):
return rows
@dev.timed
def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -229,7 +227,6 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT
}
@dev.timed
def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -277,7 +274,6 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -367,7 +363,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -460,7 +456,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70),
}
@dev.timed
def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -525,7 +521,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da
return popularity
@dev.timed
def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -595,7 +591,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -655,7 +651,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -720,7 +716,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -757,7 +753,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
return rows
@dev.timed
def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -799,7 +795,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime
return row_users
@dev.timed
def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
@ -824,7 +820,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes
return helper.dict_to_camel_case(row_users)
@dev.timed
def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -889,7 +885,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi
}
@dev.timed
def search(text, feature_type, project_id, platform=None):
if not feature_type:
resource_type = "ALL"

View file

@ -273,7 +273,6 @@ def add_edit_delete(tenant_id, project_id, new_metas):
return {"data": get(project_id)}
@dev.timed
def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id)
results = []

View file

@ -41,7 +41,6 @@ def __create(tenant_id, name):
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@dev.timed
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, version=False,
last_tracker_version=None):
with pg_client.PostgresClient() as cur:

View file

@ -3,7 +3,7 @@ from typing import List
import schemas
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event
from chalicelib.utils import pg_client, helper, dev, metrics_helper
from chalicelib.utils import pg_client, helper, metrics_helper
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -168,7 +168,6 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
@ -659,11 +658,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
# if event_type not in list(events.SUPPORTED_TYPES.keys()) \
# or event.value in [None, "", "*"] \
# and (event_type != events.event_type.ERROR.ui_type \
# or event_type != events.event_type.ERROR_IOS.ui_type):
# continue
if event_type == events.event_type.CLICK.ui_type:
event_from = event_from % f"{events.event_type.CLICK.table} AS main "
if not is_any:

View file

@ -24,7 +24,7 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
@dev.timed
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
"""
Add minimal timestamp
@ -293,7 +293,7 @@ def pearson_corr(x: list, y: list):
return r, confidence, False
@dev.timed
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
"""
Returns two lists with binary values 0/1:
@ -363,7 +363,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
return transitions, errors, all_errors, n_sess_affected
@dev.timed
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
"""
@ -415,7 +415,7 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
@dev.timed
def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows):
@ -467,7 +467,7 @@ def get_stages(stages, rows):
return stages_list
@dev.timed
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
"""
@ -544,7 +544,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues
@dev.timed
def get_top_insights(filter_d, project_id):
output = []
stages = filter_d.get("events", [])
@ -582,7 +582,7 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues
@dev.timed
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
output = dict({'critical_issues_count': 0})
stages = filter_d.get("events", [])

View file

@ -51,8 +51,6 @@ def login(data: schemas.UserLoginSchema = Body(...)):
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]
@ -219,8 +217,6 @@ def get_client(context: schemas.CurrentContext = Depends(OR_context)):
r = tenants.get_by_tenant_id(context.tenant_id)
if r is not None:
r.pop("createdAt")
r["projects"] = projects.get_projects(tenant_id=context.tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
return {
'data': r
}

View file

@ -342,7 +342,8 @@ def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body
{"key": "avg_time_to_render", "data": dashboard.get_time_to_render(project_id=projectId, **data.dict())},
{"key": "avg_used_js_heap_size", "data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": "avg_cpu", "data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps, "data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}

View file

@ -613,7 +613,12 @@ class SessionSearchFilterSchema(__MixedSearchFilter):
return values
class SessionsSearchPayloadSchema(BaseModel):
class _PaginatedSchema(BaseModel):
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
class SessionsSearchPayloadSchema(_PaginatedSchema):
events: List[_SessionSearchEventSchema] = Field([])
filters: List[SessionSearchFilterSchema] = Field([])
startDate: int = Field(None)
@ -622,8 +627,6 @@ class SessionsSearchPayloadSchema(BaseModel):
order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
bookmarked: bool = Field(default=False)
class Config:
@ -803,9 +806,10 @@ class TimeseriesMetricOfType(str, Enum):
session_count = "sessionCount"
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch):
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
startTimestamp: int = Field(TimeUTC.now(-7))
endTimestamp: int = Field(TimeUTC.now())
series: Optional[List[CustomMetricCreateSeriesSchema]] = Field(default=None)
class Config:
alias_generator = attribute_to_camel_case

View file

@ -37,8 +37,8 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-ee
jwt_secret="SET A RANDOM STRING HERE"
peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
assist=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
assistList=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres

View file

@ -1,5 +1,4 @@
import math
import random
import schemas
from chalicelib.utils import pg_client
@ -203,7 +202,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
count = count[0]["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results
@ -249,15 +248,15 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta
return results
def __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, ch_sub_query, meta=False, **args):
def __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp, ch_sub_query, meta=False, **args):
ch_query = f"""\
SELECT
COUNT(DISTINCT errors.message) AS count
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if meta else ""}
WHERE {" AND ".join(ch_sub_query)};"""
count = cur.execute(query=ch_query,
params={"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
count = ch.execute(query=ch_query,
params={"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
if count is not None and len(count) > 0:
return count[0]["count"]
@ -343,12 +342,12 @@ def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("(pages.dom_content_loaded_event_end>0 OR pages.first_contentful_paint>0)")
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""\
SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
@ -376,10 +375,9 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""\
SELECT AVG(NULLIF(pages.load_event_end ,0)) AS avg_page_load_time
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_query = f"""SELECT AVG(pages.load_event_end) AS avg_page_load_time
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND pages.load_event_end>0;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
row = ch.execute(query=ch_query, params=params)[0]
@ -389,10 +387,9 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_query = f"""\
SELECT AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_query = f"""SELECT AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0;"""
row = ch.execute(query=ch_query,
params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]
@ -431,20 +428,19 @@ def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return results
def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args):
def __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""\
SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
ch_query = f"""SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = cur.execute(query=ch_query, params=params)
rows = ch.execute(query=ch_query, params=params)
return rows
@ -464,14 +460,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
AVG(NULLIF(resources.duration,0)) AS avg,
AVG(resources.duration) AS avg,
COUNT(resources.session_id) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
# print(ch.client().substitute_params(ch_query, params))
rows = ch.execute(query=ch_query, params=params)
rows = [{"url": i["url"], "avgDuration": i["avg"], "sessions": i["count"]} for i in rows]
@ -480,17 +475,15 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
urls = [row["url"] for row in rows]
charts = {}
ch_query = f"""\
SELECT url,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY url, timestamp
ORDER BY url, timestamp;"""
ch_query = f"""SELECT url,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
GROUP BY url, timestamp
ORDER BY url, timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "url": urls, **__get_constraint_values(args)}
# print(ch.client().substitute_params(ch_query, params))
u_rows = ch.execute(query=ch_query, params=params)
for url in urls:
sub_rows = []
@ -547,10 +540,10 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS avg
AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
AND resources.type = 'img' AND resources.duration>0
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
@ -560,10 +553,10 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
end_time=endTimestamp,
density=density, neutral={"avg": 0})]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS avg
AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
AND resources.type = 'fetch' AND resources.duration>0
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
@ -578,9 +571,9 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
ch_sub_query_chart += meta_condition
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(pages.load_event_end ,0)) AS avg
AVG(pages.load_event_end) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
WHERE {" AND ".join(ch_sub_query_chart)} AND pages.load_event_end>0
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
@ -899,10 +892,11 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
ch_sub_query_chart.append(f"resources.url = %(value)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("resources.duration>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS avg
AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -912,7 +906,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
"endTimestamp": endTimestamp,
"value": url, "type": type, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT AVG(NULLIF(resources.duration,0)) AS avg
ch_query = f"""SELECT AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
@ -952,7 +946,8 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})}
density=density, neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -977,16 +972,16 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
splitByChar('/', resources.url_hostpath)[-1] AS name,
AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY name
ORDER BY avg DESC
LIMIT 10;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
print(ch.format(query=ch_query, params=params))
rows = ch.execute(query=ch_query, params=params)
if len(rows) == 0:
return []
ch_sub_query.append(ch_sub_query_chart[-1])
results = []
names = {f"name_{i}": r["name"] for i, r in enumerate(rows)}
@ -994,7 +989,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
WHERE {" AND ".join(ch_sub_query_chart)}
AND ({" OR ".join([f"endsWith(resources.url_hostpath, %(name_{i})s)>0" for i in range(len(names.keys()))])})
GROUP BY name,timestamp
ORDER BY name,timestamp;"""
@ -1002,7 +997,6 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
**names, **__get_constraint_values(args)}
# print(ch.format(query=ch_query, params=params))
charts = ch.execute(query=ch_query, params=params)
for r in rows:
sub_chart = []
@ -1097,7 +1091,8 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})}
density=density, neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1279,7 +1274,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
AVG(pages.visually_complete) AS value
AVG(pages.visually_complete) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -1295,7 +1290,8 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})}
neutral={"value": 0}),
"unit": schemas.TemplatePredefinedUnits.millisecond}
def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1359,7 +1355,8 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))}
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.memory}
def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1390,7 +1387,8 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))}
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.percentage}
def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1421,7 +1419,8 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))}
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.frame}
def __get_crashed_sessions_ids(project_id, startTimestamp, endTimestamp):
@ -2010,8 +2009,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
return helper.list_to_camel_case(
__merge_charts(
[{"timestamp": i["timestamp"], "avgCountResources": i["avg"], "types": i["types"]} for i in resources],
[{"timestamp": i["timestamp"], "avgTimeToRender": i["avg"]} for i in
time_to_render["chart"]]))
[{"timestamp": i["timestamp"], "avgTimeToRender": i["value"]} for i in time_to_render["chart"]]))
def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2104,6 +2102,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
@ -2111,11 +2110,10 @@ def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""\
SELECT AVG(NULLIF(pages.load_event_end ,0)) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_sub_query.append("pages.load_event_end>0")
ch_query = f"""SELECT AVG(pages.load_event_end) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
row = ch.execute(query=ch_query, params=params)[0]
@ -2147,26 +2145,25 @@ def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.no
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True,
data=args)
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("pages.load_event_end>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(pages.load_event_end ,0)) AS value
COALESCE(AVG(pages.load_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={**params, **location_constraints_vals, **__get_constraint_values(args)})
pages = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
rows = ch.execute(query=ch_query, params={**params, **location_constraints_vals, **__get_constraint_values(args)})
pages = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
for s in pages:
for k in s:
if s[k] is None:
s[k] = 0
# for s in pages:
# for k in s:
# if s[k] is None:
# s[k] = 0
return pages
@ -2182,6 +2179,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
@ -2190,17 +2188,18 @@ def __get_application_activity_avg_image_load_time(ch, project_id, startTimestam
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_sub_query.append("resources.duration>0")
ch_query = f"""\
SELECT AVG(NULLIF(resources.duration,0)) AS value
SELECT COALESCE(AVG(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
row = ch.execute(query=ch_query,
params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]
result = row
for k in result:
if result[k] is None:
result[k] = 0
# for k in result:
# if result[k] is None:
# result[k] = 0
return result
@ -2223,8 +2222,9 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("resources.duration>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
COALESCE(AVG(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
@ -2232,15 +2232,14 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **img_constraints_vals, **__get_constraint_values(args)})
images = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
images = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
for s in images:
for k in s:
if s[k] is None:
s[k] = 0
# for s in images:
# for k in s:
# if s[k] is None:
# s[k] = 0
return images
@ -2256,6 +2255,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
@ -2264,17 +2264,17 @@ def __get_application_activity_avg_request_load_time(ch, project_id, startTimest
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_query = f"""\
SELECT AVG(NULLIF(resources.duration,0)) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_sub_query.append("resources.duration>0")
ch_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
row = ch.execute(query=ch_query,
params={"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]
result = row
for k in result:
if result[k] is None:
result[k] = 0
# for k in result:
# if result[k] is None:
# result[k] = 0
return result
@ -2296,9 +2296,9 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("resources.duration>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(resources.duration,0)) AS value
COALESCE(AVG(resources.duration),0) AS value
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
@ -2307,15 +2307,14 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={**params, **request_constraints_vals, **__get_constraint_values(args)})
requests = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})]
requests = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp, density=density,
neutral={"value": 0})
for s in requests:
for k in s:
if s[k] is None:
s[k] = 0
# for s in requests:
# for k in s:
# if s[k] is None:
# s[k] = 0
return requests
@ -2343,10 +2342,10 @@ def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""\
SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_sub_query.append("pages.dom_content_loaded_event_end>0")
ch_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
@ -2362,23 +2361,22 @@ def __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTim
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("pages.dom_content_loaded_event_end>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
AVG(NULLIF(pages.dom_content_loaded_event_end,0)) AS value
COALESCE(AVG(pages.dom_content_loaded_event_end),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})]
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
for s in rows:
for k in s:
if s[k] is None:
s[k] = 0
# for s in rows:
# for k in s:
# if s[k] is None:
# s[k] = 0
return rows
@ -2388,6 +2386,8 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
rows = __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args)
if len(rows) > 0:
results = helper.dict_to_camel_case(rows[0])
results["chart"] = __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2395,6 +2395,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
@ -2402,9 +2403,10 @@ def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("pages.first_contentful_paint>0")
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""\
SELECT COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS value
SELECT COALESCE(AVG(pages.first_contentful_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -2413,6 +2415,29 @@ def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp
return rows
def __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTimestamp, endTimestamp, density=20,
**args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("pages.first_contentful_paint>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(pages.first_contentful_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
return rows
def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
results = {}
@ -2424,6 +2449,9 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d
for key in results:
if isnan(results[key]):
results[key] = 0
results["chart"] = __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2432,26 +2460,48 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results
def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args):
def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_query = f"""\
SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_sub_query.append("sessions.pages_count>0")
ch_query = f"""SELECT COALESCE(CEIL(AVG(sessions.pages_count)),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = cur.execute(query=ch_query, params=params)
rows = ch.execute(query=ch_query, params=params)
return rows
def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_sub_query_chart.append("sessions.pages_count>0")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(sessions.pages_count),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
return rows
def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
results = {}
@ -2463,6 +2513,8 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
for key in results:
if isnan(results[key]):
results[key] = 0
results["chart"] = __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestamp,
endTimestamp, **args)
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
@ -2471,84 +2523,163 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args):
def __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query.append("isNotNull(sessions.duration)")
ch_sub_query.append("sessions.duration>0")
ch_query = f"""\
SELECT COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
ch_query = f"""SELECT COALESCE(AVG(sessions.duration),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
rows = cur.execute(query=ch_query, params=params)
rows = ch.execute(query=ch_query, params=params)
return rows
def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
def __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("isNotNull(sessions.duration)")
ch_sub_query_chart.append("sessions.duration>0")
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(AVG(sessions.duration),0) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
return rows
def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.response_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
return helper.dict_to_camel_case(rows[0])
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COUNT(pages.response_time) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.response_time) AND pages.response_time>0
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
results["chart"] = rows
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(results)
def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COUNT(pages.session_id) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
return helper.dict_to_camel_case(rows[0])
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
result = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COUNT(pages.session_id) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
result["chart"] = rows
result["unit"] = schemas.TemplatePredefinedUnits.count
return helper.dict_to_camel_case(result)
def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.first_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
return helper.dict_to_camel_case(rows[0])
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.first_paint),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.first_paint) AND pages.first_paint>0
GROUP BY timestamp
ORDER BY timestamp;;"""
rows = ch.execute(query=ch_query, params=params)
results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(results)
def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2564,10 +2695,14 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
ch_sub_query.append("isNotNull(pages.dom_content_loaded_event_time)")
ch_sub_query.append("pages.dom_content_loaded_event_time>0")
ch_sub_query_chart.append("isNotNull(pages.dom_content_loaded_event_time)")
ch_sub_query_chart.append("pages.dom_content_loaded_event_time>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_event_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0;"""
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
@ -2575,54 +2710,99 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
rows = ch.execute(query=ch_query, params=params)
results = helper.dict_to_camel_case(rows[0])
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_time ,0)),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;;"""
COALESCE(AVG(pages.dom_content_loaded_event_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params=params)
results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return results
def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
ch_sub_query.append("isNotNull(pages.ttfb)")
ch_sub_query.append("pages.ttfb>0")
ch_sub_query_chart.append("isNotNull(pages.ttfb)")
ch_sub_query_chart.append("pages.ttfb>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.ttfb),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
return helper.dict_to_camel_case(rows[0])
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.ttfb),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params=params)
results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(results)
def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
endTimestamp=TimeUTC.now(), value=None, density=20, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
ch_sub_query += meta_condition
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
ch_sub_query.append("isNotNull(pages.time_to_interactive)")
ch_sub_query.append("pages.time_to_interactive >0")
ch_sub_query_chart.append("isNotNull(pages.time_to_interactive)")
ch_sub_query_chart.append("pages.time_to_interactive >0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
return helper.dict_to_camel_case(rows[0])
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COALESCE(AVG(pages.time_to_interactive),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params=params)
results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"value": 0}))
results["unit"] = schemas.TemplatePredefinedUnits.millisecond
return helper.dict_to_camel_case(results)

View file

@ -29,7 +29,7 @@ JOURNEY_TYPES = {
}
@dev.timed
def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
event_start = None
event_table = JOURNEY_TYPES["CLICK"]["table"]
@ -190,7 +190,7 @@ def __complete_acquisition(rows, start_date, end_date=None):
return rows
@dev.timed
def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -233,7 +233,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT
}
@dev.timed
def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -286,7 +286,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -386,7 +386,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -497,7 +497,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70),
}
@dev.timed
def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -572,7 +572,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da
return popularity
@dev.timed
def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
event_type = "CLICK"
@ -658,7 +658,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
event_type = "CLICK"
@ -728,7 +728,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
event_type = "CLICK"
@ -796,7 +796,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
event_table = JOURNEY_TYPES["CLICK"]["table"]
@ -838,7 +838,7 @@ PERIOD_TO_FUNCTION = {
}
@dev.timed
def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
meta_condition = __get_meta_constraint(args)
@ -885,7 +885,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime
return {"avg": avg, "chart": rows}
@dev.timed
def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args):
ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args)
meta_condition = __get_meta_constraint(args)
@ -925,7 +925,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes
return {"avg": avg, "partition": helper.list_to_camel_case(rows)}
@dev.timed
def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
ch_sub_query = __get_basic_constraints(table_name="feature", data=args)
@ -1008,7 +1008,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi
}
@dev.timed
def search(text, feature_type, project_id, platform=None):
if not feature_type:
resource_type = "ALL"

View file

@ -56,8 +56,6 @@ def login(data: schemas.UserLoginSchema = Body(...)):
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True, user_id=r["id"])
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]

View file

@ -270,7 +270,7 @@ VALUES ('Captured sessions', 'overview', '{
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 2,
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),

View file

@ -1474,7 +1474,7 @@ VALUES ('Captured sessions', 'overview', '{
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 2,
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),

View file

@ -268,9 +268,9 @@ function extractSessionInfo(socket) {
socket.handshake.query.sessionInfo.userDevice = ua.device.model || null;
socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop';
socket.handshake.query.sessionInfo.userCountry = null;
if (geoip !== null) {
if (geoip() !== null) {
debug && console.log(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`);
let country = geoip.country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
let country = geoip().country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
socket.handshake.query.sessionInfo.userCountry = country.country.isoCode;
}
}

View file

@ -247,9 +247,9 @@ function extractSessionInfo(socket) {
socket.handshake.query.sessionInfo.userDevice = ua.device.model || null;
socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop';
socket.handshake.query.sessionInfo.userCountry = null;
if (geoip !== null) {
if (geoip() !== null) {
debug && console.log(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`);
let country = geoip.country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
let country = geoip().country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
socket.handshake.query.sessionInfo.userCountry = country.country.isoCode;
}
}

View file

@ -107,7 +107,7 @@ VALUES ('Captured sessions', 'overview', '{"col":1,"row":1,"position":0}', true,
('Resources Loaded vs Visually Complete', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
('DOM Build Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
('Pages Response Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Pages Response Time Distribution', 'performance', '{"col":4,"row":2,"position":0}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Missing Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'missing_resources', 'predefined', 'table'),
('Slowest Resources', 'resources', '{"col":4,"row":2,"position":0}', true, true, true, 'slowest_resources', 'predefined', 'table'),

View file

@ -1264,7 +1264,7 @@ VALUES ('Captured sessions', 'overview', '{
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 2,
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),

View file

@ -218,9 +218,9 @@ function extractSessionInfo(socket) {
socket.handshake.query.sessionInfo.userDevice = ua.device.model || null;
socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop';
socket.handshake.query.sessionInfo.userCountry = null;
if (geoip !== null) {
if (geoip() !== null) {
debug && console.log(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`);
let country = geoip.country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
let country = geoip().country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address);
socket.handshake.query.sessionInfo.userCountry = country.country.isoCode;
}
}

View file

@ -13,4 +13,8 @@ if (process.env.MAXMINDDB_FILE !== undefined) {
console.error("!!! please provide a valid value for MAXMINDDB_FILE env var.");
}
module.exports = {geoip}
module.exports = {
geoip: () => {
return geoip;
}
}