* refactor(chalice): upgraded dependencies

* refactor(chalice): upgraded dependencies
feat(chalice): support heatmaps

* feat(chalice): support predefined metric users-count

* feat(chalice): support timeseries of users-count

* refactor(sourcemap-uploader): refactored code
This commit is contained in:
Kraiem Taha Yassine 2024-06-21 14:26:18 +02:00 committed by GitHub
parent fca98c8c85
commit 7b6c02a955
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 209 additions and 73 deletions

View file

@ -56,6 +56,7 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type,
schemas.MetricOfWebVitals.count_users: metrics.get_unique_users,}
return supported.get(key, lambda *args: None)(project_id=project_id, **data) return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -2913,3 +2913,52 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
row["chart"] = rows row["chart"] = rows
row["unit"] = schemas.TemplatePredefinedUnits.count row["unit"] = schemas.TemplatePredefinedUnits.count
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("user_id != ''")
pg_sub_query_chart.append("user_id IS NOT NULL")
pg_sub_query_chart.append("user_id != ''")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(sessions), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT user_id
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
"value": sum([r["value"] for r in rows]),
"chart": rows
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
pg_query = f"""SELECT COUNT(DISTINCT sessions.user_id) AS count
FROM public.sessions
WHERE {" AND ".join(pg_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
count = cur.fetchone()["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results

View file

@ -189,17 +189,35 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
if metric_type == schemas.MetricType.timeseries: if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts if metric_of == schemas.MetricOfTimeseries.session_count:
{query_part}) # main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
SELECT generated_timestamp AS timestamp, main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.session_id, s.start_ts
COUNT(s) AS count {query_part})
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp SELECT generated_timestamp AS timestamp,
LEFT JOIN LATERAL ( SELECT 1 AS s COUNT(s) AS count
FROM full_sessions FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
WHERE start_ts >= generated_timestamp LEFT JOIN LATERAL ( SELECT 1 AS s
AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) FROM full_sessions
GROUP BY generated_timestamp WHERE start_ts >= generated_timestamp
ORDER BY generated_timestamp;""", full_args) AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
elif metric_of == schemas.MetricOfTimeseries.user_count:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.user_id, s.start_ts
{query_part}
AND s.user_id IS NOT NULL
AND s.user_id != '')
SELECT generated_timestamp AS timestamp,
COUNT(s) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT user_id AS s
FROM full_sessions
WHERE start_ts >= generated_timestamp
AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
else:
raise Exception(f"Unsupported metricOf:{metric_of}")
else: else:
main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count
{query_part};""", full_args) {query_part};""", full_args)
@ -726,7 +744,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main " event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
if not is_any: if not is_any:
event_where.append( event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s", event.value, sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
event.value,
value_key=e_k)) value_key=e_k))
elif event_type == events.EventType.TAG.ui_type: elif event_type == events.EventType.TAG.ui_type:
@ -750,7 +769,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main " event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
if not is_any: if not is_any:
event_where.append( event_where.append(
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s", event.value, sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
event.value,
value_key=e_k)) value_key=e_k))

View file

@ -994,6 +994,7 @@ class MetricOfWebVitals(str, Enum):
avg_visited_pages = "avgVisitedPages" avg_visited_pages = "avgVisitedPages"
count_requests = "countRequests" count_requests = "countRequests"
count_sessions = "countSessions" count_sessions = "countSessions"
count_users = "countUsers"
class MetricOfTable(str, Enum): class MetricOfTable(str, Enum):
@ -1012,6 +1013,7 @@ class MetricOfTable(str, Enum):
class MetricOfTimeseries(str, Enum): class MetricOfTimeseries(str, Enum):
session_count = "sessionCount" session_count = "sessionCount"
user_count = "userCount"
class MetricOfFunnels(str, Enum): class MetricOfFunnels(str, Enum):

View file

@ -2798,3 +2798,54 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
neutral={"value": 0})) neutral={"value": 0}))
helper.__time_value(results) helper.__time_value(results)
return helper.dict_to_camel_case(results) return helper.dict_to_camel_case(results)
def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("isNotNull(sessions.user_id)")
ch_sub_query_chart.append("sessions.user_id!=''")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""\
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT sessions.user_id) AS value
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;\
"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
results = {
"value": sum([r["value"] for r in rows]),
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp,
density=density,
neutral={"value": 0})
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
ch_query = f""" SELECT COUNT(DISTINCT user_id) AS count
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
**__get_constraint_values(args)}
count = ch.execute(query=ch_query, params=params)
count = count[0]["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
return results

View file

@ -269,15 +269,30 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
with ch_client.ClickHouseClient() as cur: with ch_client.ClickHouseClient() as cur:
if metric_type == schemas.MetricType.timeseries: if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
query = f"""SELECT toUnixTimestamp( if metric_of == schemas.MetricOfTimeseries.session_count:
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) query = f"""SELECT toUnixTimestamp(
) * 1000 AS timestamp, toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
COUNT(processed_sessions.session_id) AS count ) * 1000 AS timestamp,
FROM (SELECT DISTINCT ON(s.session_id) s.session_id AS session_id, COUNT(processed_sessions.session_id) AS count
s.datetime AS datetime FROM (SELECT s.session_id AS session_id,
{query_part}) AS processed_sessions s.datetime AS datetime
GROUP BY timestamp {query_part}) AS processed_sessions
ORDER BY timestamp;""" GROUP BY timestamp
ORDER BY timestamp;"""
elif metric_of == schemas.MetricOfTimeseries.user_count:
query = f"""SELECT toUnixTimestamp(
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
) * 1000 AS timestamp,
COUNT(DISTINCT processed_sessions.user_id) AS count
FROM (SELECT s.user_id AS user_id,
s.datetime AS datetime
{query_part}
WHERE isNotNull(s.user_id)
AND s.user_id != '') AS processed_sessions
GROUP BY timestamp
ORDER BY timestamp;"""
else:
raise Exception(f"Unsupported metricOf:{metric_of}")
main_query = cur.format(query, full_args) main_query = cur.format(query, full_args)
else: else:
main_query = cur.format(f"""SELECT count(DISTINCT s.session_id) AS count main_query = cur.format(f"""SELECT count(DISTINCT s.session_id) AS count

View file

@ -1,89 +1,87 @@
#!/usr/bin/env node #!/usr/bin/env node
'use strict'; 'use strict';
const { ArgumentParser } = require('argparse'); const {ArgumentParser} = require('argparse');
const { version, description } = require('./package.json'); const {version, description} = require('./package.json');
const { uploadFile, uploadDir } = require('./index.js'); const {uploadFile, uploadDir} = require('./index.js');
const parser = new ArgumentParser({ const parser = new ArgumentParser({
description, description,
}); });
parser.add_argument('-v', '--version', { action: 'version', version }); parser.add_argument('-v', '--version', {action: 'version', version});
parser.add_argument('-k', '--api-key', { parser.add_argument('-k', '--api-key', {
help: 'API key', help: 'API key',
required: true, required: true,
}); });
parser.add_argument('-p', '-i', '--project-key', { parser.add_argument('-p', '-i', '--project-key', {
// -i is depricated // -i is depricated
help: 'Project Key', help: 'Project Key',
required: true, required: true,
}); });
parser.add_argument('-s', '--server', { parser.add_argument('-s', '--server', {
help: 'OpenReplay API server URL for upload', help: 'OpenReplay API server URL for upload',
}); });
// Should be verbose, but conflicting on npm compilation into bin // Should be verbose, but conflicting on npm compilation into bin
parser.add_argument('-l', '--logs', { parser.add_argument('-l', '--logs', {
help: 'Log requests information', help: 'Log requests information',
action: 'store_true', action: 'store_true',
}); });
const subparsers = parser.add_subparsers({ const subparsers = parser.add_subparsers({
title: 'commands', title: 'commands',
dest: 'command', dest: 'command',
required: true, required: true,
}); });
const file = subparsers.add_parser('file'); const file = subparsers.add_parser('file');
file.add_argument('-m', '--sourcemap-file-path', { file.add_argument('-m', '--sourcemap-file-path', {
help: 'Local path to the sourcemap file', help: 'Local path to the sourcemap file',
required: true, required: true,
}); });
file.add_argument('-u', '--js-file-url', { file.add_argument('-u', '--js-file-url', {
help: 'URL to the minified js file', help: 'URL to the minified js file',
required: true, required: true,
}); });
const dir = subparsers.add_parser('dir'); const dir = subparsers.add_parser('dir');
dir.add_argument('-m', '--sourcemap-dir-path', { dir.add_argument('-m', '--sourcemap-dir-path', {
help: 'Dir with the sourcemap files', help: 'Dir with the sourcemap files',
required: true, required: true,
}); });
dir.add_argument('-u', '--js-dir-url', { dir.add_argument('-u', '--js-dir-url', {
help: 'Base URL where the corresponding dir will be placed', help: 'Base URL where the corresponding dir will be placed',
required: true, required: true,
}); });
// TODO: exclude in dir // TODO: exclude in dir
const { command, api_key, project_key, server, logs, ...args } = const {command, api_key, project_key, server, logs, ...args} =
parser.parse_args(); parser.parse_args();
global._VERBOSE = !!logs; global._VERBOSE = !!logs;
console.log(command); console.log(command);
(command === 'file' (command === 'file'
? uploadFile( ? uploadFile(
api_key, api_key,
project_key, project_key,
args.sourcemap_file_path, args.sourcemap_file_path,
args.js_file_url, args.js_file_url,
server, server,
)
: uploadDir(
api_key,
project_key,
args.sourcemap_dir_path,
args.js_dir_url,
server,
)
)
.then((sourceFiles) => console.log('asd') ||
sourceFiles.length > 0
? console.log(
`Successfully uploaded ${sourceFiles.length} sourcemap file${
sourceFiles.length > 1 ? 's' : ''
} for: \n` + sourceFiles.join('\t\n'),
) )
: console.log(`No sourcemaps found in ${args.sourcemap_dir_path}`), : uploadDir(
) api_key,
.catch((e) => console.error(`Sourcemap Uploader: ${e}`)); project_key,
args.sourcemap_dir_path,
args.js_dir_url,
server,
)
)
.then((sourceFiles) => sourceFiles.length > 0 ? console.log(
`Successfully uploaded ${sourceFiles.length} sourcemap file${
sourceFiles.length > 1 ? 's' : ''
} for: \n` + sourceFiles.join('\t\n'),
)
: console.log(`No sourcemaps found in ${args.sourcemap_dir_path}`),
)
.catch((e) => console.error(`Sourcemap Uploader: ${e}`));