diff --git a/api/chalicelib/core/custom_metrics_predefined.py b/api/chalicelib/core/custom_metrics_predefined.py index 71eb9d7d3..49b984fe1 100644 --- a/api/chalicelib/core/custom_metrics_predefined.py +++ b/api/chalicelib/core/custom_metrics_predefined.py @@ -56,6 +56,7 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, - schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } + schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, + schemas.MetricOfWebVitals.count_users: metrics.get_unique_users,} return supported.get(key, lambda *args: None)(project_id=project_id, **data) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index cf263da88..436556897 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -2913,3 +2913,52 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ row["chart"] = rows row["unit"] = schemas.TemplatePredefinedUnits.count return helper.dict_to_camel_case(row) + + +def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=7, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("user_id != ''") + pg_sub_query_chart.append("user_id IS NOT NULL") + pg_sub_query_chart.append("user_id != ''") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(sessions), 0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + results = { + "value": sum([r["value"] for r in rows]), + "chart": rows + } + + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + + pg_query = f"""SELECT COUNT(DISTINCT sessions.user_id) AS count + FROM public.sessions + WHERE {" AND ".join(pg_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + cur.execute(cur.mogrify(pg_query, params)) + + count = cur.fetchone()["count"] + + results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count + return results diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index e24cf54d0..07a8ebfff 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -189,17 +189,35 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d with pg_client.PostgresClient() as cur: if metric_type == schemas.MetricType.timeseries: if view_type == schemas.MetricTimeseriesViewType.line_chart: - main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts - {query_part}) - SELECT generated_timestamp AS timestamp, - COUNT(s) AS count - FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT 1 AS s - FROM full_sessions - WHERE start_ts >= generated_timestamp - AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""", full_args) + if metric_of == schemas.MetricOfTimeseries.session_count: + # main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts + main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.session_id, s.start_ts + {query_part}) + SELECT generated_timestamp AS timestamp, + COUNT(s) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT 1 AS s + FROM full_sessions + WHERE start_ts >= generated_timestamp + AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""", full_args) + elif metric_of == schemas.MetricOfTimeseries.user_count: + main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.user_id, s.start_ts + {query_part} + AND s.user_id IS NOT NULL + AND s.user_id != '') + SELECT generated_timestamp AS timestamp, + COUNT(s) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id AS s + FROM full_sessions + WHERE start_ts >= generated_timestamp + AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""", full_args) + else: + raise Exception(f"Unsupported metricOf:{metric_of}") else: main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count {query_part};""", full_args) @@ -726,7 +744,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s", + event.value, value_key=e_k)) elif event_type == events.EventType.TAG.ui_type: @@ -750,7 +769,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main " if not is_any: event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s", event.value, + sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s", + event.value, value_key=e_k)) diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py index bcee02924..33ee07a89 100644 --- a/api/schemas/schemas.py +++ b/api/schemas/schemas.py @@ -994,6 +994,7 @@ class MetricOfWebVitals(str, Enum): avg_visited_pages = "avgVisitedPages" count_requests = "countRequests" count_sessions = "countSessions" + count_users = "countUsers" class MetricOfTable(str, Enum): @@ -1012,6 +1013,7 @@ class MetricOfTable(str, Enum): class MetricOfTimeseries(str, Enum): session_count = "sessionCount" + user_count = "userCount" class MetricOfFunnels(str, Enum): diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index c41676d4a..9d6ec289d 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -2798,3 +2798,54 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n neutral={"value": 0})) helper.__time_value(results) return helper.dict_to_camel_case(results) + + +def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=7, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query_chart += meta_condition + ch_sub_query_chart.append("isNotNull(sessions.user_id)") + ch_sub_query_chart.append("sessions.user_id!=''") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""\ + SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COUNT(DISTINCT sessions.user_id) AS value + FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;\ + """ + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + + rows = ch.execute(query=ch_query, params=params) + + results = { + "value": sum([r["value"] for r in rows]), + "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, + density=density, + neutral={"value": 0}) + } + + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + + ch_query = f""" SELECT COUNT(DISTINCT user_id) AS count + FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + count = ch.execute(query=ch_query, params=params) + + count = count[0]["count"] + + results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count + return results diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 72a746a52..24a4885ed 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -269,15 +269,30 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d with ch_client.ClickHouseClient() as cur: if metric_type == schemas.MetricType.timeseries: if view_type == schemas.MetricTimeseriesViewType.line_chart: - query = f"""SELECT toUnixTimestamp( - toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) - ) * 1000 AS timestamp, - COUNT(processed_sessions.session_id) AS count - FROM (SELECT DISTINCT ON(s.session_id) s.session_id AS session_id, - s.datetime AS datetime - {query_part}) AS processed_sessions - GROUP BY timestamp - ORDER BY timestamp;""" + if metric_of == schemas.MetricOfTimeseries.session_count: + query = f"""SELECT toUnixTimestamp( + toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) + ) * 1000 AS timestamp, + COUNT(processed_sessions.session_id) AS count + FROM (SELECT s.session_id AS session_id, + s.datetime AS datetime + {query_part}) AS processed_sessions + GROUP BY timestamp + ORDER BY timestamp;""" + elif metric_of == schemas.MetricOfTimeseries.user_count: + query = f"""SELECT toUnixTimestamp( + toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) + ) * 1000 AS timestamp, + COUNT(DISTINCT processed_sessions.user_id) AS count + FROM (SELECT s.user_id AS user_id, + s.datetime AS datetime + {query_part} + WHERE isNotNull(s.user_id) + AND s.user_id != '') AS processed_sessions + GROUP BY timestamp + ORDER BY timestamp;""" + else: + raise Exception(f"Unsupported metricOf:{metric_of}") main_query = cur.format(query, full_args) else: main_query = cur.format(f"""SELECT count(DISTINCT s.session_id) AS count diff --git a/sourcemap-uploader/cli.js b/sourcemap-uploader/cli.js index 10977a57a..df3649598 100755 --- a/sourcemap-uploader/cli.js +++ b/sourcemap-uploader/cli.js @@ -1,89 +1,87 @@ #!/usr/bin/env node 'use strict'; -const { ArgumentParser } = require('argparse'); -const { version, description } = require('./package.json'); +const {ArgumentParser} = require('argparse'); +const {version, description} = require('./package.json'); -const { uploadFile, uploadDir } = require('./index.js'); +const {uploadFile, uploadDir} = require('./index.js'); const parser = new ArgumentParser({ - description, + description, }); -parser.add_argument('-v', '--version', { action: 'version', version }); +parser.add_argument('-v', '--version', {action: 'version', version}); parser.add_argument('-k', '--api-key', { - help: 'API key', - required: true, + help: 'API key', + required: true, }); parser.add_argument('-p', '-i', '--project-key', { - // -i is depricated - help: 'Project Key', - required: true, + // -i is depricated + help: 'Project Key', + required: true, }); parser.add_argument('-s', '--server', { - help: 'OpenReplay API server URL for upload', + help: 'OpenReplay API server URL for upload', }); // Should be verbose, but conflicting on npm compilation into bin parser.add_argument('-l', '--logs', { - help: 'Log requests information', - action: 'store_true', + help: 'Log requests information', + action: 'store_true', }); const subparsers = parser.add_subparsers({ - title: 'commands', - dest: 'command', - required: true, + title: 'commands', + dest: 'command', + required: true, }); const file = subparsers.add_parser('file'); file.add_argument('-m', '--sourcemap-file-path', { - help: 'Local path to the sourcemap file', - required: true, + help: 'Local path to the sourcemap file', + required: true, }); file.add_argument('-u', '--js-file-url', { - help: 'URL to the minified js file', - required: true, + help: 'URL to the minified js file', + required: true, }); const dir = subparsers.add_parser('dir'); dir.add_argument('-m', '--sourcemap-dir-path', { - help: 'Dir with the sourcemap files', - required: true, + help: 'Dir with the sourcemap files', + required: true, }); dir.add_argument('-u', '--js-dir-url', { - help: 'Base URL where the corresponding dir will be placed', - required: true, + help: 'Base URL where the corresponding dir will be placed', + required: true, }); // TODO: exclude in dir -const { command, api_key, project_key, server, logs, ...args } = - parser.parse_args(); +const {command, api_key, project_key, server, logs, ...args} = + parser.parse_args(); global._VERBOSE = !!logs; console.log(command); (command === 'file' - ? uploadFile( - api_key, - project_key, - args.sourcemap_file_path, - args.js_file_url, - server, - ) - : uploadDir( - api_key, - project_key, - args.sourcemap_dir_path, - args.js_dir_url, - server, - ) -) - .then((sourceFiles) => console.log('asd') || - sourceFiles.length > 0 - ? console.log( - `Successfully uploaded ${sourceFiles.length} sourcemap file${ - sourceFiles.length > 1 ? 's' : '' - } for: \n` + sourceFiles.join('\t\n'), + ? uploadFile( + api_key, + project_key, + args.sourcemap_file_path, + args.js_file_url, + server, ) - : console.log(`No sourcemaps found in ${args.sourcemap_dir_path}`), - ) - .catch((e) => console.error(`Sourcemap Uploader: ${e}`)); + : uploadDir( + api_key, + project_key, + args.sourcemap_dir_path, + args.js_dir_url, + server, + ) +) + .then((sourceFiles) => sourceFiles.length > 0 ? console.log( + `Successfully uploaded ${sourceFiles.length} sourcemap file${ + sourceFiles.length > 1 ? 's' : '' + } for: \n` + sourceFiles.join('\t\n'), + ) + : console.log(`No sourcemaps found in ${args.sourcemap_dir_path}`), + ) + .catch((e) => console.error(`Sourcemap Uploader: ${e}`));