From 18f8ee9d15f12d2a6b089706ef1c715d397025ed Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 6 May 2025 16:21:02 +0200 Subject: [PATCH 01/25] ui: fix max meta length, add horizontal layout for player --- .../Player/ReplayPlayer/PlayerBlockHeader.tsx | 12 +++++------- .../shared/SessionItem/MetaItem/MetaItem.tsx | 2 ++ .../SessionItem/SessionMetaList/SessionMetaList.tsx | 5 +++-- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx index ef769ec5f..b7060595b 100644 --- a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx +++ b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx @@ -116,13 +116,11 @@ function PlayerBlockHeader(props: any) { )} {_metaList.length > 0 && ( -
- -
+ )} {uiPlayerStore.showSearchEventsSwitchButton ? ( diff --git a/frontend/app/components/shared/SessionItem/MetaItem/MetaItem.tsx b/frontend/app/components/shared/SessionItem/MetaItem/MetaItem.tsx index 4812bd122..f661d8a77 100644 --- a/frontend/app/components/shared/SessionItem/MetaItem/MetaItem.tsx +++ b/frontend/app/components/shared/SessionItem/MetaItem/MetaItem.tsx @@ -19,11 +19,13 @@ export default function MetaItem(props: Props) { diff --git a/frontend/app/components/shared/SessionItem/SessionMetaList/SessionMetaList.tsx b/frontend/app/components/shared/SessionItem/SessionMetaList/SessionMetaList.tsx index 3c7608138..4f4c96578 100644 --- a/frontend/app/components/shared/SessionItem/SessionMetaList/SessionMetaList.tsx +++ b/frontend/app/components/shared/SessionItem/SessionMetaList/SessionMetaList.tsx @@ -8,13 +8,14 @@ interface Props { metaList: any[]; maxLength?: number; onMetaClick?: (meta: { name: string, value: string }) => void; + horizontal?: boolean; } export default function SessionMetaList(props: Props) { - const { className = '', metaList, maxLength = 14 } = props; + const { className = '', metaList, maxLength = 14, horizontal = false } = props; return ( -
+
{metaList.slice(0, maxLength).map(({ label, value }, index) => (
props.onMetaClick?.({ name: `_${label}`, value })}> From b3cb8df65b717023443b220dacf9c56e402061e4 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 6 May 2025 16:47:53 +0200 Subject: [PATCH 02/25] ui: fix sankey start calculation --- frontend/app/components/Charts/SankeyChart.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Charts/SankeyChart.tsx b/frontend/app/components/Charts/SankeyChart.tsx index 4142e395e..6f51140e0 100644 --- a/frontend/app/components/Charts/SankeyChart.tsx +++ b/frontend/app/components/Charts/SankeyChart.tsx @@ -111,9 +111,13 @@ const EChartsSankey: React.FC = (props) => { if (echartNodes.length === 0) return; - const mainNodeLink = startPoint === 'end' ? echartNodes.findIndex(n => n.id === 0) : 0; + const startDepth = startPoint === 'end' ? Math.max(...echartNodes.map(n => n.depth ?? 0)) : 0; + const mainNodeLinks = echartNodes.filter(n => n.depth === startDepth).map(n => echartNodes.findIndex(node => node.id === n.id)) const startNodeValue = echartLinks - .filter((link) => startPoint === 'start' ? link.source === mainNodeLink : link.target === mainNodeLink) + .filter((link) => startPoint === 'start' + ? mainNodeLinks.includes(link.source) + : mainNodeLinks.includes(link.target) + ) .reduce((sum, link) => sum + link.value, 0); Object.keys(nodeValues).forEach((nodeId) => { From 95d4df7a1b44996644d71491ab634d6017d12612 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 7 May 2025 10:48:16 +0200 Subject: [PATCH 03/25] ui: loading badges for spot videos --- .../Spots/SpotPlayer/components/SpotPlayerHeader.tsx | 9 +++++++-- frontend/app/components/Spots/SpotsList/SpotListItem.tsx | 4 ++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx b/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx index 6bf8671ce..29023c4d7 100644 --- a/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx +++ b/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx @@ -32,6 +32,7 @@ import { Avatar, Icon } from 'UI'; import { TABS, Tab } from '../consts'; import AccessModal from './AccessModal'; import { useTranslation } from 'react-i18next'; +import { toast } from 'react-toastify' const spotLink = spotsList(); @@ -89,8 +90,12 @@ function SpotPlayerHeader({ const onMenuClick = async ({ key }: { key: string }) => { if (key === '1') { + const loader = toast.loading('Retrieving Spot video...') const { url } = await spotStore.getVideo(spotStore.currentSpot!.spotId); await downloadFile(url, `${spotStore.currentSpot!.title}.mp4`); + setTimeout(() => { + toast.dismiss(loader) + }, 0) } else if (key === '2') { spotStore.deleteSpot([spotStore.currentSpot!.spotId]).then(() => { history.push(spotsList()); @@ -245,12 +250,11 @@ function SpotPlayerHeader({ } async function downloadFile(url: string, fileName: string) { - const { t } = useTranslation(); try { const response = await fetch(url); if (!response.ok) { - throw new Error(t('Network response was not ok')); + throw new Error('Network response was not ok'); } const blob = await response.blob(); @@ -263,6 +267,7 @@ async function downloadFile(url: string, fileName: string) { document.body.removeChild(a); URL.revokeObjectURL(blobUrl); } catch (error) { + toast.error('Error downloading file.') console.error('Error downloading file:', error); } } diff --git a/frontend/app/components/Spots/SpotsList/SpotListItem.tsx b/frontend/app/components/Spots/SpotsList/SpotListItem.tsx index 573a2dd81..598706654 100644 --- a/frontend/app/components/Spots/SpotsList/SpotListItem.tsx +++ b/frontend/app/components/Spots/SpotsList/SpotListItem.tsx @@ -80,8 +80,12 @@ function SpotListItem({ case 'rename': return setIsEdit(true); case 'download': + const loader = toast.loading('Retrieving Spot video...') const { url } = await onVideo(spot.spotId); await downloadFile(url, `${spot.title}.mp4`); + setTimeout(() => { + toast.dismiss(loader) + }, 0) return; case 'copy': copy( From d9fe5342235cc53ebf83f1668b793e0c4e5fae80 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 7 May 2025 12:12:50 +0200 Subject: [PATCH 04/25] fix(chalice): fixed get error's details (cherry picked from commit 39eb943b86387c01c3c7da3d25341996d22e9b10) --- ee/api/chalicelib/core/errors/errors_details_exp.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/core/errors/errors_details_exp.py b/ee/api/chalicelib/core/errors/errors_details_exp.py index 2287c5215..2898493f2 100644 --- a/ee/api/chalicelib/core/errors/errors_details_exp.py +++ b/ee/api/chalicelib/core/errors/errors_details_exp.py @@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data): MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False) - ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s") + ch_basic_query.append("error_id = %(error_id)s") with ch_client.ClickHouseClient() as ch: data["startDate24"] = TimeUTC.now(-1) @@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data): "error_id": error_id} main_ch_query = f"""\ - WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id, + WITH pre_processed AS (SELECT error_id, toString(`$properties`.name) AS name, toString(`$properties`.message) AS message, session_id, @@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data): AND `$event_name` = 'ERROR' AND events.created_at >= toDateTime(timestamp / 1000) AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000) - AND toString(`$properties`.error_id) = %(error_id)s + AND error_id = %(error_id)s GROUP BY timestamp ORDER BY timestamp) AS chart_details ) AS chart_details24 ON TRUE @@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data): AND `$event_name` = 'ERROR' AND events.created_at >= toDateTime(timestamp / 1000) AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000) - AND toString(`$properties`.error_id) = %(error_id)s + AND error_id = %(error_id)s GROUP BY timestamp ORDER BY timestamp) AS chart_details ) AS chart_details30 ON TRUE;""" From 1df4a92901f96c8afb7c84c0581fd1d9e8333df4 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 7 May 2025 16:52:03 +0200 Subject: [PATCH 05/25] chore(cli): pin dns Signed-off-by: rjshrjndrn --- scripts/helmcharts/init.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helmcharts/init.sh b/scripts/helmcharts/init.sh index 86cb99062..d31223164 100644 --- a/scripts/helmcharts/init.sh +++ b/scripts/helmcharts/init.sh @@ -22,7 +22,7 @@ usr=$(whoami) # Installing k3s function install_k8s() { echo "nameserver 1.1.1.1" | sudo tee /etc/k3s-resolv.conf - curl -sL https://get.k3s.io | sudo K3S_KUBECONFIG_MODE="644" INSTALL_K3S_VERSION='v1.31.5+k3s1' INSTALL_K3S_EXEC="--disable=traefik server --resolv-conf=/etc/k3s-resolv.conf" sh - + curl -sL https://get.k3s.io | sudo K3S_KUBECONFIG_MODE="644" K3S_RESOLV_CONF="/etc/k3s-resolv.conf" INSTALL_K3S_VERSION='v1.31.5+k3s1' INSTALL_K3S_EXEC="--disable=traefik" sh - [[ -d ~/.kube ]] || mkdir ~/.kube sudo cp /etc/rancher/k3s/k3s.yaml ~/.kube/config sudo chmod 0644 ~/.kube/config From 812983f97c1f8289a8976ef87c6197524aab4765 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 29 Apr 2025 16:16:07 +0100 Subject: [PATCH 06/25] refactor(chalice): changed properties response --- .../core/product_analytics/events.py | 4 +-- .../core/product_analytics/properties.py | 28 +++++++++++++------ api/chalicelib/utils/exp_ch_helper.py | 11 +++++--- 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/api/chalicelib/core/product_analytics/events.py b/api/chalicelib/core/product_analytics/events.py index 41363e7c6..26f71834c 100644 --- a/api/chalicelib/core/product_analytics/events.py +++ b/api/chalicelib/core/product_analytics/events.py @@ -27,7 +27,7 @@ def get_events(project_id: int, page: schemas.PaginatedSchema): total = rows[0]["total"] for i, row in enumerate(rows): row["id"] = f"event_{i}" - row["icon"] = None + row["dataType"] = "string" row["possibleTypes"] = ["string"] row.pop("total") return {"total": total, "list": helper.list_to_camel_case(rows)} @@ -133,7 +133,7 @@ def get_lexicon(project_id: int, page: schemas.PaginatedSchema): total = rows[0]["total"] for i, row in enumerate(rows): row["id"] = f"event_{i}" - row["icon"] = None + row["dataType"] = "string" row["possibleTypes"] = ["string"] row.pop("total") return {"total": total, "list": helper.list_to_camel_case(rows)} diff --git a/api/chalicelib/core/product_analytics/properties.py b/api/chalicelib/core/product_analytics/properties.py index 704f1794c..635ced5f4 100644 --- a/api/chalicelib/core/product_analytics/properties.py +++ b/api/chalicelib/core/product_analytics/properties.py @@ -52,11 +52,11 @@ def get_all_properties(project_id: int, page: schemas.PaginatedSchema): predefined_properties = get_predefined_property_types() for i, p in enumerate(properties): p["id"] = f"prop_{i}" - p["icon"] = None + p["_foundInPredefinedList"] = False if p["name"] in predefined_properties: - p["possibleTypes"].insert(0, predefined_properties[p["name"]]) - p["possibleTypes"] = list(set(p["possibleTypes"])) - p["possibleTypes"] = exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]) + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(predefined_properties[p["name"]]) + p["_foundInPredefinedList"] = True + p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) p.pop("total") return {"total": total, "list": properties} @@ -64,18 +64,29 @@ def get_all_properties(project_id: int, page: schemas.PaginatedSchema): def get_event_properties(project_id: int, event_name): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT all_properties.property_name, - all_properties.display_name + """SELECT all_properties.property_name AS name, + all_properties.display_name, + array_agg(DISTINCT event_properties.value_type) AS possible_types FROM product_analytics.event_properties INNER JOIN product_analytics.all_properties USING (property_name) WHERE event_properties.project_id=%(project_id)s AND all_properties.project_id=%(project_id)s AND event_properties.event_name=%(event_name)s - ORDER BY created_at;""", + GROUP BY ALL + ORDER BY 1;""", parameters={"project_id": project_id, "event_name": event_name}) properties = ch_client.execute(r) + properties = helper.list_to_camel_case(properties) + predefined_properties = get_predefined_property_types() + for i, p in enumerate(properties): + p["id"] = f"prop_{i}" + p["_foundInPredefinedList"] = False + if p["name"] in predefined_properties: + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(predefined_properties[p["name"]]) + p["_foundInPredefinedList"] = True + p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) - return helper.list_to_camel_case(properties) + return properties def get_lexicon(project_id: int, page: schemas.PaginatedSchema): @@ -108,6 +119,5 @@ def get_lexicon(project_id: int, page: schemas.PaginatedSchema): total = properties[0]["total"] for i, p in enumerate(properties): p["id"] = f"prop_{i}" - p["icon"] = None p.pop("total") return {"total": total, "list": helper.list_to_camel_case(properties)} diff --git a/api/chalicelib/utils/exp_ch_helper.py b/api/chalicelib/utils/exp_ch_helper.py index b2c061533..babef4d57 100644 --- a/api/chalicelib/utils/exp_ch_helper.py +++ b/api/chalicelib/utils/exp_ch_helper.py @@ -99,12 +99,13 @@ def simplify_clickhouse_type(ch_type: str) -> str: return "int" # Floats: Float32, Float64 - if re.match(r'^float(32|64)$', normalized_type): + if re.match(r'^float(32|64)|double$', normalized_type): return "float" # Decimal: Decimal(P, S) if normalized_type.startswith("decimal"): - return "decimal" + # return "decimal" + return "float" # Date/DateTime if normalized_type.startswith("date"): @@ -120,11 +121,13 @@ def simplify_clickhouse_type(ch_type: str) -> str: # UUID if normalized_type.startswith("uuid"): - return "uuid" + # return "uuid" + return "string" # Enums: Enum8(...) or Enum16(...) if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"): - return "enum" + # return "enum" + return "string" # Arrays: Array(T) if normalized_type.startswith("array"): From 39d3d8db4c7ed6c26b4248b779069d2add623055 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 29 Apr 2025 16:38:23 +0100 Subject: [PATCH 07/25] refactor(chalice): changed predefined properties types handling refactor(DB): changed predefined properties types --- .../core/product_analytics/properties.py | 90 ++++++++++++------- .../db/init_dbs/clickhouse/1.23.0/1.23.0.sql | 60 ------------- .../clickhouse/create/init_schema.sql | 60 +------------ .../db/init_dbs/clickhouse/1.23.0/1.23.0.sql | 60 ------------- .../clickhouse/create/init_schema.sql | 60 +------------ 5 files changed, 63 insertions(+), 267 deletions(-) diff --git a/api/chalicelib/core/product_analytics/properties.py b/api/chalicelib/core/product_analytics/properties.py index 635ced5f4..a34810d69 100644 --- a/api/chalicelib/core/product_analytics/properties.py +++ b/api/chalicelib/core/product_analytics/properties.py @@ -1,32 +1,62 @@ -import re -from functools import cache - import schemas from chalicelib.utils import helper, exp_ch_helper from chalicelib.utils.ch_client import ClickHouseClient - -@cache -def get_predefined_property_types(): - with ClickHouseClient() as ch_client: - properties_type = ch_client.execute("""\ - SELECT type - FROM system.columns - WHERE database = 'product_analytics' - AND table = 'events' - AND name = '$properties';""") - if len(properties_type) == 0: - return {} - properties_type = properties_type[0]["type"] - - pattern = r'(\w+)\s+(Enum8\([^\)]+\)|[A-Za-z0-9_]+(?:\([^\)]+\))?)' - - # Find all matches - matches = re.findall(pattern, properties_type) - - # Create a dictionary of attribute names and types - attributes = {match[0]: match[1] for match in matches} - return attributes +PREDEFINED_PROPERTY_TYPES = { + "label": "String", + "hesitation_time": "UInt32", + "name": "String", + "payload": "String", + "level": "Enum8", + "source": "Enum8", + "message": "String", + "error_id": "String", + "duration": "UInt16", + "context": "Enum8", + "url_host": "String", + "url_path": "String", + "url_hostpath": "String", + "request_start": "UInt16", + "response_start": "UInt16", + "response_end": "UInt16", + "dom_content_loaded_event_start": "UInt16", + "dom_content_loaded_event_end": "UInt16", + "load_event_start": "UInt16", + "load_event_end": "UInt16", + "first_paint": "UInt16", + "first_contentful_paint_time": "UInt16", + "speed_index": "UInt16", + "visually_complete": "UInt16", + "time_to_interactive": "UInt16", + "ttfb": "UInt16", + "ttlb": "UInt16", + "response_time": "UInt16", + "dom_building_time": "UInt16", + "dom_content_loaded_event_time": "UInt16", + "load_event_time": "UInt16", + "min_fps": "UInt8", + "avg_fps": "UInt8", + "max_fps": "UInt8", + "min_cpu": "UInt8", + "avg_cpu": "UInt8", + "max_cpu": "UInt8", + "min_total_js_heap_size": "UInt64", + "avg_total_js_heap_size": "UInt64", + "max_total_js_heap_size": "UInt64", + "min_used_js_heap_size": "UInt64", + "avg_used_js_heap_size": "UInt64", + "max_used_js_heap_size": "UInt64", + "method": "Enum8", + "status": "UInt16", + "success": "UInt8", + "request_body": "String", + "response_body": "String", + "transfer_size": "UInt32", + "selector": "String", + "normalized_x": "Float32", + "normalized_y": "Float32", + "message_id": "UInt64" +} def get_all_properties(project_id: int, page: schemas.PaginatedSchema): @@ -49,12 +79,11 @@ def get_all_properties(project_id: int, page: schemas.PaginatedSchema): return {"total": 0, "list": []} total = properties[0]["total"] properties = helper.list_to_camel_case(properties) - predefined_properties = get_predefined_property_types() for i, p in enumerate(properties): p["id"] = f"prop_{i}" p["_foundInPredefinedList"] = False - if p["name"] in predefined_properties: - p["dataType"] = exp_ch_helper.simplify_clickhouse_type(predefined_properties[p["name"]]) + if p["name"] in PREDEFINED_PROPERTY_TYPES: + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTY_TYPES[p["name"]]) p["_foundInPredefinedList"] = True p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) p.pop("total") @@ -77,12 +106,11 @@ def get_event_properties(project_id: int, event_name): parameters={"project_id": project_id, "event_name": event_name}) properties = ch_client.execute(r) properties = helper.list_to_camel_case(properties) - predefined_properties = get_predefined_property_types() for i, p in enumerate(properties): p["id"] = f"prop_{i}" p["_foundInPredefinedList"] = False - if p["name"] in predefined_properties: - p["dataType"] = exp_ch_helper.simplify_clickhouse_type(predefined_properties[p["name"]]) + if p["name"] in PREDEFINED_PROPERTY_TYPES: + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTY_TYPES[p["name"]]) p["_foundInPredefinedList"] = True p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql index dcd616e5f..889bb4d49 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql @@ -1,65 +1,5 @@ CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee'; -SET allow_experimental_json_type = 1; -SET enable_json_type = 1; -ALTER TABLE product_analytics.events - MODIFY COLUMN `$properties` JSON( -max_dynamic_paths=0, -label String , -hesitation_time UInt32 , -name String , -payload String , -level Enum8 ('info'=0, 'error'=1), -source Enum8 ('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9), -message String , -error_id String , -duration UInt16, -context Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8), -url_host String , -url_path String , -url_hostpath String , -request_start UInt16 , -response_start UInt16 , -response_end UInt16 , -dom_content_loaded_event_start UInt16 , -dom_content_loaded_event_end UInt16 , -load_event_start UInt16 , -load_event_end UInt16 , -first_paint UInt16 , -first_contentful_paint_time UInt16 , -speed_index UInt16 , -visually_complete UInt16 , -time_to_interactive UInt16, -ttfb UInt16, -ttlb UInt16, -response_time UInt16, -dom_building_time UInt16, -dom_content_loaded_event_time UInt16, -load_event_time UInt16, -min_fps UInt8, -avg_fps UInt8, -max_fps UInt8, -min_cpu UInt8, -avg_cpu UInt8, -max_cpu UInt8, -min_total_js_heap_size UInt64, -avg_total_js_heap_size UInt64, -max_total_js_heap_size UInt64, -min_used_js_heap_size UInt64, -avg_used_js_heap_size UInt64, -max_used_js_heap_size UInt64, -method Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8), -status UInt16, -success UInt8, -request_body String, -response_body String, -transfer_size UInt32, -selector String, -normalized_x Float32, -normalized_y Float32, -message_id UInt64 -) DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events'; - DROP TABLE IF EXISTS product_analytics.all_events; CREATE TABLE IF NOT EXISTS product_analytics.all_events ( diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 5f6a06511..f1c2fbb66 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -431,62 +431,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events "$source" LowCardinality(String) DEFAULT '' COMMENT 'the name of the integration that sent the event', "$duration_s" UInt16 DEFAULT 0 COMMENT 'the duration from session-start in seconds', properties JSON DEFAULT '{}', - "$properties" JSON( -max_dynamic_paths=0, -label String , -hesitation_time UInt32 , -name String , -payload String , -level Enum8 ('info'=0, 'error'=1), -source Enum8 ('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9), -message String , -error_id String , -duration UInt16, -context Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8), -url_host String , -url_path String , -url_hostpath String , -request_start UInt16 , -response_start UInt16 , -response_end UInt16 , -dom_content_loaded_event_start UInt16 , -dom_content_loaded_event_end UInt16 , -load_event_start UInt16 , -load_event_end UInt16 , -first_paint UInt16 , -first_contentful_paint_time UInt16 , -speed_index UInt16 , -visually_complete UInt16 , -time_to_interactive UInt16, -ttfb UInt16, -ttlb UInt16, -response_time UInt16, -dom_building_time UInt16, -dom_content_loaded_event_time UInt16, -load_event_time UInt16, -min_fps UInt8, -avg_fps UInt8, -max_fps UInt8, -min_cpu UInt8, -avg_cpu UInt8, -max_cpu UInt8, -min_total_js_heap_size UInt64, -avg_total_js_heap_size UInt64, -max_total_js_heap_size UInt64, -min_used_js_heap_size UInt64, -avg_used_js_heap_size UInt64, -max_used_js_heap_size UInt64, -method Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8), -status UInt16, -success UInt8, -request_body String, -response_body String, -transfer_size UInt32, -selector String, -normalized_x Float32, -normalized_y Float32, -message_id UInt64 -) DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events', + "$properties" JSON DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events', description String DEFAULT '', group_id1 Array(String) DEFAULT [], group_id2 Array(String) DEFAULT [], @@ -846,8 +791,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples ENGINE = ReplacingMergeTree(_timestamp) ORDER BY (project_id, property_name, is_event_property); -- Incremental materialized view to get random examples of property values using $properties & properties -CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv - REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS SELECT project_id, property_name, TRUE AS is_event_property, diff --git a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql index 7bab7f7a1..1be61c988 100644 --- a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql +++ b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql @@ -12,66 +12,6 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions ORDER BY (project_id, user_id, session_id) TTL _timestamp + INTERVAL 3 MONTH; -SET allow_experimental_json_type = 1; -SET enable_json_type = 1; -ALTER TABLE product_analytics.events - MODIFY COLUMN `$properties` JSON( -max_dynamic_paths=0, -label String , -hesitation_time UInt32 , -name String , -payload String , -level Enum8 ('info'=0, 'error'=1), -source Enum8 ('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9), -message String , -error_id String , -duration UInt16, -context Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8), -url_host String , -url_path String , -url_hostpath String , -request_start UInt16 , -response_start UInt16 , -response_end UInt16 , -dom_content_loaded_event_start UInt16 , -dom_content_loaded_event_end UInt16 , -load_event_start UInt16 , -load_event_end UInt16 , -first_paint UInt16 , -first_contentful_paint_time UInt16 , -speed_index UInt16 , -visually_complete UInt16 , -time_to_interactive UInt16, -ttfb UInt16, -ttlb UInt16, -response_time UInt16, -dom_building_time UInt16, -dom_content_loaded_event_time UInt16, -load_event_time UInt16, -min_fps UInt8, -avg_fps UInt8, -max_fps UInt8, -min_cpu UInt8, -avg_cpu UInt8, -max_cpu UInt8, -min_total_js_heap_size UInt64, -avg_total_js_heap_size UInt64, -max_total_js_heap_size UInt64, -min_used_js_heap_size UInt64, -avg_used_js_heap_size UInt64, -max_used_js_heap_size UInt64, -method Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8), -status UInt16, -success UInt8, -request_body String, -response_body String, -transfer_size UInt32, -selector String, -normalized_x Float32, -normalized_y Float32, -message_id UInt64 -) DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events'; - DROP TABLE IF EXISTS product_analytics.all_events; CREATE TABLE IF NOT EXISTS product_analytics.all_events ( diff --git a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 1283dc4e9..6d6c196eb 100644 --- a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -330,62 +330,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events "$source" LowCardinality(String) DEFAULT '' COMMENT 'the name of the integration that sent the event', "$duration_s" UInt16 DEFAULT 0 COMMENT 'the duration from session-start in seconds', properties JSON DEFAULT '{}', - "$properties" JSON( -max_dynamic_paths=0, -label String , -hesitation_time UInt32 , -name String , -payload String , -level Enum8 ('info'=0, 'error'=1), -source Enum8 ('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9), -message String , -error_id String , -duration UInt16, -context Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8), -url_host String , -url_path String , -url_hostpath String , -request_start UInt16 , -response_start UInt16 , -response_end UInt16 , -dom_content_loaded_event_start UInt16 , -dom_content_loaded_event_end UInt16 , -load_event_start UInt16 , -load_event_end UInt16 , -first_paint UInt16 , -first_contentful_paint_time UInt16 , -speed_index UInt16 , -visually_complete UInt16 , -time_to_interactive UInt16, -ttfb UInt16, -ttlb UInt16, -response_time UInt16, -dom_building_time UInt16, -dom_content_loaded_event_time UInt16, -load_event_time UInt16, -min_fps UInt8, -avg_fps UInt8, -max_fps UInt8, -min_cpu UInt8, -avg_cpu UInt8, -max_cpu UInt8, -min_total_js_heap_size UInt64, -avg_total_js_heap_size UInt64, -max_total_js_heap_size UInt64, -min_used_js_heap_size UInt64, -avg_used_js_heap_size UInt64, -max_used_js_heap_size UInt64, -method Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8), -status UInt16, -success UInt8, -request_body String, -response_body String, -transfer_size UInt32, -selector String, -normalized_x Float32, -normalized_y Float32, -message_id UInt64 -) DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events', + "$properties" JSON DEFAULT '{}' COMMENT 'these properties belongs to the auto-captured events', description String DEFAULT '', group_id1 Array(String) DEFAULT [], group_id2 Array(String) DEFAULT [], @@ -745,8 +690,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples ENGINE = ReplacingMergeTree(_timestamp) ORDER BY (project_id, property_name, is_event_property); -- Incremental materialized view to get random examples of property values using $properties & properties -CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv - REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS SELECT project_id, property_name, TRUE AS is_event_property, From 3ac5c30c5f1103b0b62f0e83035db66b65d166c9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 5 May 2025 17:31:44 +0200 Subject: [PATCH 08/25] refactor(DB): remove TTL for CH tables --- scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql | 3 +-- .../schema/db/init_dbs/clickhouse/create/init_schema.sql | 9 +++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql index 1be61c988..cebb68586 100644 --- a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql +++ b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql @@ -9,8 +9,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMM(_timestamp) - ORDER BY (project_id, user_id, session_id) - TTL _timestamp + INTERVAL 3 MONTH; + ORDER BY (project_id, user_id, session_id); DROP TABLE IF EXISTS product_analytics.all_events; CREATE TABLE IF NOT EXISTS product_analytics.all_events diff --git a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 6d6c196eb..1bd5d0def 100644 --- a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -149,8 +149,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions sign Int8 ) ENGINE = CollapsingMergeTree(sign) PARTITION BY toYYYYMM(_timestamp) - ORDER BY (project_id, user_id, session_id) - TTL _timestamp + INTERVAL 3 MONTH; + ORDER BY (project_id, user_id, session_id); CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions ( @@ -160,8 +159,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMM(_timestamp) - ORDER BY (project_id, user_id, session_id) - TTL _timestamp + INTERVAL 3 MONTH; + ORDER BY (project_id, user_id, session_id); CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors ( @@ -171,8 +169,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) PARTITION BY toYYYYMM(_timestamp) - ORDER BY (project_id, user_id, error_id) - TTL _timestamp + INTERVAL 3 MONTH; + ORDER BY (project_id, user_id, error_id); CREATE TABLE IF NOT EXISTS experimental.issues ( From 1576208e2586a735399810ac3132ec6ae8676c8e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 6 May 2025 19:03:10 +0200 Subject: [PATCH 09/25] refactor(chalice): return all events & properties --- .../core/product_analytics/events.py | 77 +++++++++++---- .../core/product_analytics/properties.py | 98 +++++++++++-------- 2 files changed, 116 insertions(+), 59 deletions(-) diff --git a/api/chalicelib/core/product_analytics/events.py b/api/chalicelib/core/product_analytics/events.py index 26f71834c..f902d91d5 100644 --- a/api/chalicelib/core/product_analytics/events.py +++ b/api/chalicelib/core/product_analytics/events.py @@ -7,30 +7,69 @@ from chalicelib.utils.ch_client import ClickHouseClient from chalicelib.utils.exp_ch_helper import get_sub_condition logger = logging.getLogger(__name__) +PREDEFINED_EVENTS = { + "CLICK": "String", + "INPUT": "String", + "LOCATION": "String", + "ERROR": "String", + "PERFORMANCE": "String", + "REQUEST": "String" +} def get_events(project_id: int, page: schemas.PaginatedSchema): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT DISTINCT ON(event_name,auto_captured) - COUNT(1) OVER () AS total, - event_name AS name, display_name, description, - auto_captured - FROM product_analytics.all_events - WHERE project_id=%(project_id)s - ORDER BY auto_captured,display_name - LIMIT %(limit)s OFFSET %(offset)s;""", + """SELECT DISTINCT + ON(event_name,auto_captured) + COUNT (1) OVER () AS total, + event_name AS name, display_name, description, + auto_captured + FROM product_analytics.all_events + WHERE project_id=%(project_id)s + ORDER BY auto_captured, display_name + LIMIT %(limit)s + OFFSET %(offset)s;""", parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit}) rows = ch_client.execute(r) if len(rows) == 0: - return {"total": 0, "list": []} + return {"total": len(PREDEFINED_EVENTS), "list": [{ + "name": e, + "displayName": "", + "description": "", + "autoCaptured": True, + "id": "event_0", + "dataType": "string", + "possibleTypes": [ + "string" + ], + "_foundInPredefinedList": False + } for e in PREDEFINED_EVENTS]} total = rows[0]["total"] + rows = helper.list_to_camel_case(rows) for i, row in enumerate(rows): row["id"] = f"event_{i}" row["dataType"] = "string" row["possibleTypes"] = ["string"] + row["_foundInPredefinedList"] = True row.pop("total") - return {"total": total, "list": helper.list_to_camel_case(rows)} + keys = [r["name"] for r in rows] + for e in PREDEFINED_EVENTS: + if e not in keys: + total += 1 + rows.append({ + "name": e, + "displayName": "", + "description": "", + "autoCaptured": True, + "id": "event_0", + "dataType": "string", + "possibleTypes": [ + "string" + ], + "_foundInPredefinedList": False + }) + return {"total": total, "list": rows} def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema): @@ -119,21 +158,23 @@ def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema): def get_lexicon(project_id: int, page: schemas.PaginatedSchema): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT COUNT(1) OVER () AS total, - all_events.event_name AS name, - * - FROM product_analytics.all_events - WHERE project_id=%(project_id)s - ORDER BY display_name - LIMIT %(limit)s OFFSET %(offset)s;""", + """SELECT COUNT(1) OVER () AS total, all_events.event_name AS name, + * + FROM product_analytics.all_events + WHERE project_id = %(project_id)s + ORDER BY display_name + LIMIT %(limit)s + OFFSET %(offset)s;""", parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit}) rows = ch_client.execute(r) if len(rows) == 0: return {"total": 0, "list": []} total = rows[0]["total"] + rows = helper.list_to_camel_case(rows) for i, row in enumerate(rows): row["id"] = f"event_{i}" row["dataType"] = "string" row["possibleTypes"] = ["string"] + row["_foundInPredefinedList"] = True row.pop("total") - return {"total": total, "list": helper.list_to_camel_case(rows)} + return {"total": total, "list": rows} diff --git a/api/chalicelib/core/product_analytics/properties.py b/api/chalicelib/core/product_analytics/properties.py index a34810d69..c88fe1c7d 100644 --- a/api/chalicelib/core/product_analytics/properties.py +++ b/api/chalicelib/core/product_analytics/properties.py @@ -2,7 +2,7 @@ import schemas from chalicelib.utils import helper, exp_ch_helper from chalicelib.utils.ch_client import ClickHouseClient -PREDEFINED_PROPERTY_TYPES = { +PREDEFINED_PROPERTIES = { "label": "String", "hesitation_time": "UInt32", "name": "String", @@ -62,15 +62,16 @@ PREDEFINED_PROPERTY_TYPES = { def get_all_properties(project_id: int, page: schemas.PaginatedSchema): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT COUNT(1) OVER () AS total, - property_name AS name, display_name, - array_agg(DISTINCT event_properties.value_type) AS possible_types - FROM product_analytics.all_properties + """SELECT COUNT(1) OVER () AS total, property_name AS name, + display_name, + array_agg(DISTINCT event_properties.value_type) AS possible_types + FROM product_analytics.all_properties LEFT JOIN product_analytics.event_properties USING (project_id, property_name) - WHERE all_properties.project_id=%(project_id)s - GROUP BY property_name,display_name - ORDER BY display_name - LIMIT %(limit)s OFFSET %(offset)s;""", + WHERE all_properties.project_id = %(project_id)s + GROUP BY property_name, display_name + ORDER BY display_name + LIMIT %(limit)s + OFFSET %(offset)s;""", parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit}) @@ -82,35 +83,48 @@ def get_all_properties(project_id: int, page: schemas.PaginatedSchema): for i, p in enumerate(properties): p["id"] = f"prop_{i}" p["_foundInPredefinedList"] = False - if p["name"] in PREDEFINED_PROPERTY_TYPES: - p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTY_TYPES[p["name"]]) + if p["name"] in PREDEFINED_PROPERTIES: + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]]) p["_foundInPredefinedList"] = True p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) p.pop("total") + keys = [p["name"] for p in properties] + for p in PREDEFINED_PROPERTIES: + if p not in keys: + total += 1 + properties.append({ + "name": p, + "displayName": "", + "possibleTypes": [ + ], + "id": f"prop_{len(properties) + 1}", + "_foundInPredefinedList": False, + "dataType": PREDEFINED_PROPERTIES[p] + }) return {"total": total, "list": properties} def get_event_properties(project_id: int, event_name): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT all_properties.property_name AS name, - all_properties.display_name, - array_agg(DISTINCT event_properties.value_type) AS possible_types - FROM product_analytics.event_properties - INNER JOIN product_analytics.all_properties USING (property_name) - WHERE event_properties.project_id=%(project_id)s - AND all_properties.project_id=%(project_id)s - AND event_properties.event_name=%(event_name)s - GROUP BY ALL - ORDER BY 1;""", + """SELECT all_properties.property_name AS name, + all_properties.display_name, + array_agg(DISTINCT event_properties.value_type) AS possible_types + FROM product_analytics.event_properties + INNER JOIN product_analytics.all_properties USING (property_name) + WHERE event_properties.project_id = %(project_id)s + AND all_properties.project_id = %(project_id)s + AND event_properties.event_name = %(event_name)s + GROUP BY ALL + ORDER BY 1;""", parameters={"project_id": project_id, "event_name": event_name}) properties = ch_client.execute(r) properties = helper.list_to_camel_case(properties) for i, p in enumerate(properties): p["id"] = f"prop_{i}" p["_foundInPredefinedList"] = False - if p["name"] in PREDEFINED_PROPERTY_TYPES: - p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTY_TYPES[p["name"]]) + if p["name"] in PREDEFINED_PROPERTIES: + p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]]) p["_foundInPredefinedList"] = True p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"]))) @@ -120,24 +134,26 @@ def get_event_properties(project_id: int, event_name): def get_lexicon(project_id: int, page: schemas.PaginatedSchema): with ClickHouseClient() as ch_client: r = ch_client.format( - """SELECT COUNT(1) OVER () AS total, - all_properties.property_name AS name, - all_properties.*, - possible_types.values AS possible_types, - possible_values.values AS sample_values - FROM product_analytics.all_properties - LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS values - FROM product_analytics.event_properties - WHERE project_id=%(project_id)s - GROUP BY 1, 2) AS possible_types - USING (project_id, property_name) - LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS values - FROM product_analytics.property_values_samples - WHERE project_id=%(project_id)s - GROUP BY 1, 2) AS possible_values USING (project_id, property_name) - WHERE project_id=%(project_id)s - ORDER BY display_name - LIMIT %(limit)s OFFSET %(offset)s;""", + """SELECT COUNT(1) OVER () AS total, all_properties.property_name AS name, + all_properties.*, + possible_types.values AS possible_types, + possible_values.values AS sample_values + FROM product_analytics.all_properties + LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS + values + FROM product_analytics.event_properties + WHERE project_id=%(project_id)s + GROUP BY 1, 2) AS possible_types + USING (project_id, property_name) + LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS + values + FROM product_analytics.property_values_samples + WHERE project_id=%(project_id)s + GROUP BY 1, 2) AS possible_values USING (project_id, property_name) + WHERE project_id = %(project_id)s + ORDER BY display_name + LIMIT %(limit)s + OFFSET %(offset)s;""", parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit}) From 8a69316b8274b08832a72d53cd0d1a0517dac3b7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 7 May 2025 17:32:40 +0200 Subject: [PATCH 10/25] refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies --- api/requirements-alerts.txt | 14 +++++++------- api/requirements.txt | 16 ++++++++-------- ee/api/requirements-alerts.txt | 16 ++++++++-------- ee/api/requirements-crons.txt | 12 ++++++------ ee/api/requirements.txt | 18 +++++++++--------- 5 files changed, 38 insertions(+), 38 deletions(-) diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index d4cd202c3..cac321549 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,16 +1,16 @@ -urllib3==2.3.0 +urllib3==2.4.0 requests==2.32.3 -boto3==1.37.21 +boto3==1.38.10 pyjwt==2.10.1 psycopg2-binary==2.9.10 -psycopg[pool,binary]==3.2.6 -clickhouse-connect==0.8.15 -elasticsearch==8.17.2 +psycopg[pool,binary]==3.2.7 +clickhouse-connect==0.8.17 +elasticsearch==9.0.1 jira==3.8.0 cachetools==5.5.2 fastapi==0.115.12 -uvicorn[standard]==0.34.0 +uvicorn[standard]==0.34.2 python-decouple==3.8 -pydantic[email]==2.10.6 +pydantic[email]==2.11.4 apscheduler==3.11.0 diff --git a/api/requirements.txt b/api/requirements.txt index dca445128..de8243ad4 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,18 +1,18 @@ -urllib3==2.3.0 +urllib3==2.4.0 requests==2.32.3 -boto3==1.37.21 +boto3==1.38.10 pyjwt==2.10.1 psycopg2-binary==2.9.10 -psycopg[pool,binary]==3.2.6 -clickhouse-connect==0.8.15 -elasticsearch==8.17.2 +psycopg[pool,binary]==3.2.7 +clickhouse-connect==0.8.17 +elasticsearch==9.0.1 jira==3.8.0 cachetools==5.5.2 fastapi==0.115.12 -uvicorn[standard]==0.34.0 +uvicorn[standard]==0.34.2 python-decouple==3.8 -pydantic[email]==2.10.6 +pydantic[email]==2.11.4 apscheduler==3.11.0 -redis==5.2.1 +redis==6.0.0 diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index f07a5a381..fff5eb968 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,18 +1,18 @@ -urllib3==2.3.0 +urllib3==2.4.0 requests==2.32.3 -boto3==1.37.21 +boto3==1.38.10 pyjwt==2.10.1 psycopg2-binary==2.9.10 -psycopg[pool,binary]==3.2.6 -clickhouse-connect==0.8.15 -elasticsearch==8.17.2 +psycopg[pool,binary]==3.2.7 +clickhouse-connect==0.8.17 +elasticsearch==9.0.1 jira==3.8.0 cachetools==5.5.2 fastapi==0.115.12 -uvicorn[standard]==0.34.0 +uvicorn[standard]==0.34.2 python-decouple==3.8 -pydantic[email]==2.10.6 +pydantic[email]==2.11.4 apscheduler==3.11.0 -azure-storage-blob==12.25.0 +azure-storage-blob==12.25.1 diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index 5c51cabf6..f960d68c7 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -3,16 +3,16 @@ requests==2.32.3 boto3==1.37.21 pyjwt==2.10.1 psycopg2-binary==2.9.10 -psycopg[pool,binary]==3.2.6 -clickhouse-connect==0.8.15 -elasticsearch==8.17.2 +psycopg[pool,binary]==3.2.7 +clickhouse-connect==0.8.17 +elasticsearch==9.0.1 jira==3.8.0 cachetools==5.5.2 fastapi==0.115.12 python-decouple==3.8 -pydantic[email]==2.10.6 +pydantic[email]==2.11.4 apscheduler==3.11.0 -redis==5.2.1 -azure-storage-blob==12.25.0 +redis==6.0.0 +azure-storage-blob==12.25.1 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 65c3c78ac..075415335 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,19 +1,19 @@ -urllib3==2.3.0 +urllib3==2.4.0 requests==2.32.3 -boto3==1.37.21 +boto3==1.38.10 pyjwt==2.10.1 psycopg2-binary==2.9.10 -psycopg[pool,binary]==3.2.6 -clickhouse-connect==0.8.15 -elasticsearch==8.17.2 +psycopg[pool,binary]==3.2.7 +clickhouse-connect==0.8.17 +elasticsearch==9.0.1 jira==3.8.0 cachetools==5.5.2 fastapi==0.115.12 -uvicorn[standard]==0.34.0 +uvicorn[standard]==0.34.2 gunicorn==23.0.0 python-decouple==3.8 -pydantic[email]==2.10.6 +pydantic[email]==2.11.4 apscheduler==3.11.0 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 @@ -21,6 +21,6 @@ apscheduler==3.11.0 python3-saml==1.16.0 --no-binary=lxml python-multipart==0.0.20 -redis==5.2.1 +redis==6.0.0 #confluent-kafka==2.1.0 -azure-storage-blob==12.25.0 +azure-storage-blob==12.25.1 From a13f427816a5bb3cd35fb7ba0f1227b8089dc91b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 8 May 2025 18:51:44 +0200 Subject: [PATCH 11/25] refactor(chalice): autocomplete for event-names refactor(chalice): autocomplete for properties-names refactor(chalice): autocomplete for properties-values --- .../core/product_analytics/autocomplete.py | 57 ++++++++++++ .../core/product_analytics/events.py | 4 +- .../core/sessions/sessions_search_ch.py | 4 +- .../core/sessions/sessions_search_pg.py | 10 +- api/routers/subs/product_analytics.py | 20 +++- .../db/init_dbs/clickhouse/1.23.0/1.23.0.sql | 89 ++++++++++++++++++ .../clickhouse/create/init_schema.sql | 92 +++++++++++++++++- .../db/init_dbs/clickhouse/1.23.0/1.23.0.sql | 93 ++++++++++++++++++- .../clickhouse/create/init_schema.sql | 92 +++++++++++++++++- 9 files changed, 448 insertions(+), 13 deletions(-) create mode 100644 api/chalicelib/core/product_analytics/autocomplete.py diff --git a/api/chalicelib/core/product_analytics/autocomplete.py b/api/chalicelib/core/product_analytics/autocomplete.py new file mode 100644 index 000000000..5915a8ab6 --- /dev/null +++ b/api/chalicelib/core/product_analytics/autocomplete.py @@ -0,0 +1,57 @@ +from typing import Optional + +from chalicelib.utils import helper +from chalicelib.utils.ch_client import ClickHouseClient + + +def search_events(project_id: int, q: Optional[str] = None): + with ClickHouseClient() as ch_client: + full_args = {"project_id": project_id, "limit": 20} + + constraints = ["project_id = %(project_id)s", + "_timestamp >= now()-INTERVAL 1 MONTH"] + if q: + constraints += ["value ILIKE %(q)s"] + full_args["q"] = helper.string_to_sql_like(q) + query = ch_client.format( + f"""SELECT value,data_count + FROM product_analytics.autocomplete_events_grouped + WHERE {" AND ".join(constraints)} + ORDER BY data_count DESC + LIMIT %(limit)s;""", + parameters=full_args) + rows = ch_client.execute(query) + + return {"values": helper.list_to_camel_case(rows), "_src": 2} + + +def search_properties(project_id: int, property_name: Optional[str] = None, event_name: Optional[str] = None, + q: Optional[str] = None): + with ClickHouseClient() as ch_client: + select = "value" + full_args = {"project_id": project_id, "limit": 20, + "event_name": event_name, "property_name": property_name} + + constraints = ["project_id = %(project_id)s", + "_timestamp >= now()-INTERVAL 1 MONTH"] + if event_name: + constraints += ["event_name = %(event_name)s"] + if property_name and q: + constraints += ["property_name = %(property_name)s"] + elif property_name: + select = "DISTINCT ON(property_name) property_name AS value" + constraints += ["property_name ILIKE %(property_name)s"] + full_args["property_name"] = helper.string_to_sql_like(property_name) + if q: + constraints += ["value ILIKE %(q)s"] + full_args["q"] = helper.string_to_sql_like(q) + query = ch_client.format( + f"""SELECT {select},data_count + FROM product_analytics.autocomplete_event_properties_grouped + WHERE {" AND ".join(constraints)} + ORDER BY data_count DESC + LIMIT %(limit)s;""", + parameters=full_args) + rows = ch_client.execute(query) + + return {"values": helper.list_to_camel_case(rows), "_src": 2} diff --git a/api/chalicelib/core/product_analytics/events.py b/api/chalicelib/core/product_analytics/events.py index f902d91d5..10e578c7d 100644 --- a/api/chalicelib/core/product_analytics/events.py +++ b/api/chalicelib/core/product_analytics/events.py @@ -148,11 +148,11 @@ def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema): parameters=full_args) rows = ch_client.execute(query) if len(rows) == 0: - return {"total": 0, "rows": [], "src": 2} + return {"total": 0, "rows": [], "_src": 2} total = rows[0]["total"] for r in rows: r.pop("total") - return {"total": total, "rows": rows, "src": 2} + return {"total": total, "rows": rows, "_src": 2} def get_lexicon(project_id: int, page: schemas.PaginatedSchema): diff --git a/api/chalicelib/core/sessions/sessions_search_ch.py b/api/chalicelib/core/sessions/sessions_search_ch.py index 38ada500d..c0142bae4 100644 --- a/api/chalicelib/core/sessions/sessions_search_ch.py +++ b/api/chalicelib/core/sessions/sessions_search_ch.py @@ -73,7 +73,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas. return { 'total': 0, 'sessions': [], - 'src': 2 + '_src': 2 } if project.platform == "web": full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=error_status, @@ -216,7 +216,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas. return { 'total': total, 'sessions': sessions_list, - 'src': 2 + '_src': 2 } diff --git a/api/chalicelib/core/sessions/sessions_search_pg.py b/api/chalicelib/core/sessions/sessions_search_pg.py index f28af757a..9036e2686 100644 --- a/api/chalicelib/core/sessions/sessions_search_pg.py +++ b/api/chalicelib/core/sessions/sessions_search_pg.py @@ -49,7 +49,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas. return { 'total': 0, 'sessions': [], - 'src': 1 + '_src': 1 } full_args, query_part = sessions_legacy.search_query_parts(data=data, error_status=error_status, errors_only=errors_only, @@ -177,7 +177,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas. return { 'total': total, 'sessions': helper.list_to_camel_case(sessions), - 'src': 1 + '_src': 1 } @@ -240,7 +240,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): cur.execute("\nUNION\n".join(sub_queries)) rows = cur.fetchall() for i in rows: - i["src"] = 1 + i["_src"] = 1 results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) return results @@ -248,7 +248,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id', ascending: bool = False) -> dict: if session_ids is None or len(session_ids) == 0: - return {"total": 0, "sessions": [], "src": 1} + return {"total": 0, "sessions": [], "_src": 1} with pg_client.PostgresClient() as cur: meta_keys = metadata.get(project_id=project_id) params = {"project_id": project_id, "session_ids": tuple(session_ids)} @@ -267,4 +267,4 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's s["metadata"] = {} for m in meta_keys: s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}') - return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "src": 1} + return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "_src": 1} diff --git a/api/routers/subs/product_analytics.py b/api/routers/subs/product_analytics.py index 5b18ca93e..d7dbcba23 100644 --- a/api/routers/subs/product_analytics.py +++ b/api/routers/subs/product_analytics.py @@ -4,9 +4,10 @@ from fastapi import Body, Depends, Query import schemas from chalicelib.core import metadata -from chalicelib.core.product_analytics import events, properties +from chalicelib.core.product_analytics import events, properties, autocomplete from or_dependencies import OR_context from routers.base import get_routers +from typing import Optional public_app, app, app_apikey = get_routers() @@ -53,3 +54,20 @@ def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.Pagin def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()], context: schemas.CurrentContext = Depends(OR_context)): return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)} + + +@app.get('/{projectId}/events/autocomplete', tags=["autocomplete"]) +def autocomplete_events(projectId: int, q: Optional[str] = None, + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": autocomplete.search_events(project_id=projectId, q=None if not q or len(q) == 0 else q)} + + +@app.get('/{projectId}/properties/autocomplete', tags=["autocomplete"]) +def autocomplete_properties(projectId: int, propertyName: str, eventName: Optional[str] = None, + q: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": autocomplete.search_properties(project_id=projectId, + event_name=None if not eventName \ + or len(eventName) == 0 else eventName, + property_name=None if not propertyName \ + or len(propertyName) == 0 else propertyName, + q=None if not q or len(q) == 0 else q)} diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql index 889bb4d49..953c86662 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql @@ -165,3 +165,92 @@ FROM product_analytics.events WHERE randCanonical() < 0.5 -- This randomly skips inserts AND value != '' LIMIT 2 BY project_id,property_name; + +-- Autocomplete + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events +( + project_id UInt16, + value String COMMENT 'The $event_name', + _timestamp DateTime +) ENGINE = MergeTree() + ORDER BY (project_id, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_mv + TO product_analytics.autocomplete_events AS +SELECT project_id, + `$event_name` AS value, + _timestamp +FROM product_analytics.events +WHERE _timestamp > now() - INTERVAL 1 MONTH; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events_grouped +( + project_id UInt16, + value String COMMENT 'The $event_name', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_events_grouped AS +SELECT project_id, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_events +WHERE autocomplete_events._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, value; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + _timestamp DateTime DEFAULT now() +) ENGINE = MergeTree() + ORDER BY (project_id, event_name, property_name, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_mv + TO product_analytics.autocomplete_event_properties AS +SELECT project_id, + `$event_name` AS event_name, + property_name, + JSONExtractString(toString(`$properties`), property_name) AS value, + _timestamp +FROM product_analytics.events + ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name +WHERE length(value) > 0 AND isNull(toFloat64OrNull(value)) + AND _timestamp > now() - INTERVAL 1 MONTH; + + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, event_name, property_name, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_event_properties_grouped AS +SELECT project_id, + event_name, + property_name, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_event_properties +WHERE length(value) > 0 + AND autocomplete_event_properties._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, event_name, property_name, value; + diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index f1c2fbb66..6c32c70c6 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -791,7 +791,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples ENGINE = ReplacingMergeTree(_timestamp) ORDER BY (project_id, property_name, is_event_property); -- Incremental materialized view to get random examples of property values using $properties & properties -CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv + REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS SELECT project_id, property_name, TRUE AS is_event_property, @@ -812,3 +813,92 @@ FROM product_analytics.events WHERE randCanonical() < 0.5 -- This randomly skips inserts AND value != '' LIMIT 2 BY project_id,property_name; + +-- Autocomplete + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events +( + project_id UInt16, + value String COMMENT 'The $event_name', + _timestamp DateTime +) ENGINE = MergeTree() + ORDER BY (project_id, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_mv + TO product_analytics.autocomplete_events AS +SELECT project_id, + `$event_name` AS value, + _timestamp +FROM product_analytics.events +WHERE _timestamp > now() - INTERVAL 1 MONTH; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events_grouped +( + project_id UInt16, + value String COMMENT 'The $event_name', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_events_grouped AS +SELECT project_id, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_events +WHERE autocomplete_events._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, value; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + _timestamp DateTime DEFAULT now() +) ENGINE = MergeTree() + ORDER BY (project_id, event_name, property_name, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_mv + TO product_analytics.autocomplete_event_properties AS +SELECT project_id, + `$event_name` AS event_name, + property_name, + JSONExtractString(toString(`$properties`), property_name) AS value, + _timestamp +FROM product_analytics.events + ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name +WHERE length(value) > 0 AND isNull(toFloat64OrNull(value)) + AND _timestamp > now() - INTERVAL 1 MONTH; + + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, event_name, property_name, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_event_properties_grouped AS +SELECT project_id, + event_name, + property_name, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_event_properties +WHERE length(value) > 0 + AND autocomplete_event_properties._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, event_name, property_name, value; + diff --git a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql index cebb68586..d8472807d 100644 --- a/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql +++ b/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql @@ -155,7 +155,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples ENGINE = ReplacingMergeTree(_timestamp) ORDER BY (project_id, property_name, is_event_property); -CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv + REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS SELECT project_id, property_name, TRUE AS is_event_property, @@ -175,3 +176,93 @@ FROM product_analytics.events WHERE randCanonical() < 0.5 -- This randomly skips inserts AND value != '' LIMIT 2 BY project_id,property_name; + + +-- Autocomplete + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events +( + project_id UInt16, + value String COMMENT 'The $event_name', + _timestamp DateTime +) ENGINE = MergeTree() + ORDER BY (project_id, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_mv + TO product_analytics.autocomplete_events AS +SELECT project_id, + `$event_name` AS value, + _timestamp +FROM product_analytics.events +WHERE _timestamp > now() - INTERVAL 1 MONTH; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events_grouped +( + project_id UInt16, + value String COMMENT 'The $event_name', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_events_grouped AS +SELECT project_id, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_events +WHERE autocomplete_events._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, value; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + _timestamp DateTime DEFAULT now() +) ENGINE = MergeTree() + ORDER BY (project_id, event_name, property_name, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_mv + TO product_analytics.autocomplete_event_properties AS +SELECT project_id, + `$event_name` AS event_name, + property_name, + JSONExtractString(toString(`$properties`), property_name) AS value, + _timestamp +FROM product_analytics.events + ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name +WHERE length(value) > 0 AND isNull(toFloat64OrNull(value)) + AND _timestamp > now() - INTERVAL 1 MONTH; + + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, event_name, property_name, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_event_properties_grouped AS +SELECT project_id, + event_name, + property_name, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_event_properties +WHERE length(value) > 0 + AND autocomplete_event_properties._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, event_name, property_name, value; + diff --git a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 1bd5d0def..b9cb4b173 100644 --- a/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -687,7 +687,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples ENGINE = ReplacingMergeTree(_timestamp) ORDER BY (project_id, property_name, is_event_property); -- Incremental materialized view to get random examples of property values using $properties & properties -CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv + REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS SELECT project_id, property_name, TRUE AS is_event_property, @@ -708,3 +709,92 @@ FROM product_analytics.events WHERE randCanonical() < 0.5 -- This randomly skips inserts AND value != '' LIMIT 2 BY project_id,property_name; + +-- Autocomplete + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events +( + project_id UInt16, + value String COMMENT 'The $event_name', + _timestamp DateTime +) ENGINE = MergeTree() + ORDER BY (project_id, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_mv + TO product_analytics.autocomplete_events AS +SELECT project_id, + `$event_name` AS value, + _timestamp +FROM product_analytics.events +WHERE _timestamp > now() - INTERVAL 1 MONTH; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events_grouped +( + project_id UInt16, + value String COMMENT 'The $event_name', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_events_grouped AS +SELECT project_id, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_events +WHERE autocomplete_events._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, value; + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + _timestamp DateTime DEFAULT now() +) ENGINE = MergeTree() + ORDER BY (project_id, event_name, property_name, value, _timestamp) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_mv + TO product_analytics.autocomplete_event_properties AS +SELECT project_id, + `$event_name` AS event_name, + property_name, + JSONExtractString(toString(`$properties`), property_name) AS value, + _timestamp +FROM product_analytics.events + ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name +WHERE length(value) > 0 AND isNull(toFloat64OrNull(value)) + AND _timestamp > now() - INTERVAL 1 MONTH; + + +CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped +( + project_id UInt16, + event_name String COMMENT 'The $event_name', + property_name String, + value String COMMENT 'The property-value as a string', + data_count UInt16 COMMENT 'The number of appearance during the past month', + _timestamp DateTime DEFAULT now() +) ENGINE = ReplacingMergeTree(_timestamp) + ORDER BY (project_id, event_name, property_name, value) + TTL _timestamp + INTERVAL 1 MONTH; + +CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mv + REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_event_properties_grouped AS +SELECT project_id, + event_name, + property_name, + value, + count(1) AS data_count, + max(_timestamp) AS _timestamp +FROM product_analytics.autocomplete_event_properties +WHERE length(value) > 0 + AND autocomplete_event_properties._timestamp > now() - INTERVAL 1 MONTH +GROUP BY project_id, event_name, property_name, value; + From a009ff928ce335aa650eba096776c3175b6afc62 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 7 May 2025 16:24:29 +0200 Subject: [PATCH 12/25] spot: refactor popup code, split audio logic from ui code --- spot/entrypoints/popup/App.tsx | 349 +++--------------- .../popup/components/AudioPicker.tsx | 57 +++ spot/entrypoints/popup/components/Header.tsx | 73 ++++ .../popup/components/RecordingControls.tsx | 48 +++ spot/entrypoints/popup/hooks/useAppState.ts | 63 ++++ .../popup/hooks/useAudioDevices.ts | 100 +++++ spot/entrypoints/popup/types/index.ts | 14 + spot/entrypoints/popup/utils/audio.ts | 24 ++ 8 files changed, 428 insertions(+), 300 deletions(-) create mode 100644 spot/entrypoints/popup/components/AudioPicker.tsx create mode 100644 spot/entrypoints/popup/components/Header.tsx create mode 100644 spot/entrypoints/popup/components/RecordingControls.tsx create mode 100644 spot/entrypoints/popup/hooks/useAppState.ts create mode 100644 spot/entrypoints/popup/hooks/useAudioDevices.ts create mode 100644 spot/entrypoints/popup/types/index.ts create mode 100644 spot/entrypoints/popup/utils/audio.ts diff --git a/spot/entrypoints/popup/App.tsx b/spot/entrypoints/popup/App.tsx index 3d9314f76..3317418aa 100644 --- a/spot/entrypoints/popup/App.tsx +++ b/spot/entrypoints/popup/App.tsx @@ -1,165 +1,49 @@ -import orLogo from "~/assets/orSpot.svg"; -import micOff from "~/assets/mic-off-red.svg"; -import micOn from "~/assets/mic-on-dark.svg"; +import { createEffect, onMount } from "solid-js"; import Login from "~/entrypoints/popup/Login"; import Settings from "~/entrypoints/popup/Settings"; -import { createSignal, createEffect, onMount } from "solid-js"; -import Dropdown from "~/entrypoints/popup/Dropdown"; -import Button from "~/entrypoints/popup/Button"; -import { - ChevronSvg, - RecordDesktopSvg, - RecordTabSvg, - HomePageSvg, - SlackSvg, - SettingsSvg, -} from "./Icons"; - -async function getAudioDevices() { - try { - await navigator.mediaDevices.getUserMedia({ audio: true }); - const devices = await navigator.mediaDevices.enumerateDevices(); - const audioDevices = devices - .filter((device) => device.kind === "audioinput") - .map((device) => ({ label: device.label, id: device.deviceId })); - - return { granted: true, audioDevices }; - } catch (error) { - console.error("Error accessing audio devices:", error); - const msg = error.message ?? ""; - return { - granted: false, - denied: msg.includes("denied"), - audioDevices: [], - }; - } -} - -const orSite = () => { - window.open("https://openreplay.com", "_blank"); -}; - -function Header({ openSettings }: { openSettings: () => void }) { - const openHomePage = async () => { - const { settings } = await chrome.storage.local.get("settings"); - return window.open(`${settings.ingestPoint}/spots`, "_blank"); - }; - return ( -
-
- {"OpenReplay -
- OpenReplay Spot -
-
- -
-
-
-
- -
-
-
- - - -
-
- -
-
-
-
- ); -} - -const STATE = { - empty: "empty", - login: "login", - ready: "ready", - starting: "starting", - recording: "recording", -}; +import Header from "./components/Header"; +import RecordingControls from "./components/RecordingControls"; +import AudioPicker from "./components/AudioPicker"; +import { useAppState } from "./hooks/useAppState"; +import { useAudioDevices } from "./hooks/useAudioDevices"; +import { AppState } from "./types"; function App() { - const [state, setState] = createSignal(STATE.empty); - const [isSettingsOpen, setIsSettingsOpen] = createSignal(false); - const [mic, setMic] = createSignal(false); - const [selectedAudioDevice, setSelectedAudioDevice] = createSignal(""); - const [hasPermissions, setHasPermissions] = createSignal(false); + const { + state, + isSettingsOpen, + startRecording, + stopRecording, + openSettings, + closeSettings, + } = useAppState(); + const { + audioDevices, + selectedAudioDevice, + mic, + hasPermissions, + isChecking, + checkAudioDevices, + handleMicToggle, + selectAudioDevice, + } = useAudioDevices(); + + // Listen for mic status updates from background onMount(() => { browser.runtime.onMessage.addListener((message) => { - if (message.type === "popup:no-login") { - setState(STATE.login); - } - if (message.type === "popup:login") { - setState(STATE.ready); - } - if (message.type === "popup:stopped") { - setState(STATE.ready); - } - if (message.type === "popup:started") { - setState(STATE.recording); - } if (message.type === "popup:mic-status") { setMic(message.status); } }); - void browser.runtime.sendMessage({ type: "popup:check-status" }); }); - const startRecording = async (reqTab: "tab" | "desktop") => { - setState(STATE.starting); - await browser.runtime.sendMessage({ - type: "popup:start", - area: reqTab, - mic: mic(), - audioId: selectedAudioDevice(), - permissions: hasPermissions(), - }); - window.close(); + const handleStartRecording = (area: "tab" | "desktop") => { + startRecording(area, mic(), selectedAudioDevice(), hasPermissions()); }; - const stopRecording = () => { - void browser.runtime.sendMessage({ - type: "popup:stop", - mic: mic(), - audioId: selectedAudioDevice(), - }); - }; - - const toggleMic = async () => { - setMic(!mic()); - }; - - const openSettings = () => { - setIsSettingsOpen(true); - }; - const closeSettings = () => { - setIsSettingsOpen(false); + const handleStopRecording = () => { + stopRecording(mic(), selectedAudioDevice()); }; return ( @@ -167,58 +51,30 @@ function App() { {isSettingsOpen() ? ( ) : ( -
+
- {state() === STATE.login ? ( + {state() === AppState.LOGIN ? ( ) : ( <> - {state() === STATE.recording ? ( - + - -
- - - ) : null} + {state() === AppState.READY && ( + + )} )}
@@ -227,111 +83,4 @@ function App() { ); } -interface IAudioPicker { - mic: () => boolean; - toggleMic: () => void; - selectedAudioDevice: () => string; - setSelectedAudioDevice: (value: string) => void; - setHasPermissions: (value: boolean) => void; -} -function AudioPicker(props: IAudioPicker) { - const [audioDevices, setAudioDevices] = createSignal( - [] as { label: string; id: string }[], - ); - const [checkedAudioDevices, setCheckedAudioDevices] = createSignal(0); - - createEffect(() => { - chrome.storage.local.get("audioPerm", (data) => { - if (data.audioPerm && audioDevices().length === 0) { - props.setHasPermissions(true); - void checkAudioDevices(); - } - }); - }); - - const checkAudioDevices = async () => { - const { granted, audioDevices, denied } = await getAudioDevices(); - if (!granted && !denied) { - void browser.runtime.sendMessage({ - type: "popup:get-audio-perm", - }); - browser.runtime.onMessage.addListener((message) => { - if (message.type === "popup:audio-perm") { - void checkAudioDevices(); - } - }); - } else if (audioDevices.length > 0) { - chrome.storage.local.set({ audioPerm: granted }); - setAudioDevices(audioDevices); - props.setSelectedAudioDevice(audioDevices[0]?.id || ""); - } - }; - - const checkAudio = async () => { - if (checkedAudioDevices() > 0) { - return; - } - setCheckedAudioDevices(1); - await checkAudioDevices(); - setCheckedAudioDevices(2); - }; - const onSelect = (value) => { - props.setSelectedAudioDevice(value); - if (!props.mic()) { - props.toggleMic(); - } - }; - - const onMicToggle = async () => { - if (!audioDevices().length) { - return await checkAudioDevices(); - } - if (!props.selectedAudioDevice() && audioDevices().length) { - onSelect(audioDevices()[0].id); - } else { - props.toggleMic(); - } - }; - - return ( -
-
- {props.mic() -
-
- {audioDevices().length === 0 ? ( -
- {checkedAudioDevices() === 1 - ? "Loading audio devices" - : "Grant microphone access"} -
- ) : ( - - )} - -
-
- ); -} - export default App; diff --git a/spot/entrypoints/popup/components/AudioPicker.tsx b/spot/entrypoints/popup/components/AudioPicker.tsx new file mode 100644 index 000000000..627b5c840 --- /dev/null +++ b/spot/entrypoints/popup/components/AudioPicker.tsx @@ -0,0 +1,57 @@ +import { Component, For } from "solid-js"; +import micOff from "~/assets/mic-off-red.svg"; +import micOn from "~/assets/mic-on-dark.svg"; +import Dropdown from "~/entrypoints/popup/Dropdown"; +import { ChevronSvg } from "../Icons"; +import { AudioDevice } from "../types"; + +interface AudioPickerProps { + mic: () => boolean; + audioDevices: () => AudioDevice[]; + selectedAudioDevice: () => string; + isChecking: () => boolean; + onMicToggle: () => void; + onCheckAudio: () => void; + onSelectDevice: (deviceId: string) => void; +} + +const AudioPicker: Component = (props) => { + return ( +
+
+ {props.mic() +
+ +
+ {props.audioDevices().length === 0 ? ( +
+ {props.isChecking() + ? "Loading audio devices" + : "Grant microphone access"} +
+ ) : ( + + )} + +
+
+ ); +}; + +export default AudioPicker; diff --git a/spot/entrypoints/popup/components/Header.tsx b/spot/entrypoints/popup/components/Header.tsx new file mode 100644 index 000000000..ec6f9d5b8 --- /dev/null +++ b/spot/entrypoints/popup/components/Header.tsx @@ -0,0 +1,73 @@ +import { Component } from "solid-js"; +import orLogo from "~/assets/orSpot.svg"; +import { + HomePageSvg, + SlackSvg, + SettingsSvg, +} from "../Icons"; + +interface HeaderProps { + openSettings: () => void; +} + +const Header: Component = (props) => { + const openHomePage = async () => { + const { settings } = await chrome.storage.local.get("settings"); + return window.open(`${settings.ingestPoint}/spots`, "_blank"); + }; + + const openOrSite = () => { + window.open("https://openreplay.com", "_blank"); + }; + + return ( +
+
+ OpenReplay Spot +
+ OpenReplay Spot +
+
+ +
+
+
+
+ +
+
+
+ + + +
+
+ +
+
+
+
+ ); +}; + +export default Header; diff --git a/spot/entrypoints/popup/components/RecordingControls.tsx b/spot/entrypoints/popup/components/RecordingControls.tsx new file mode 100644 index 000000000..1fdd8fa61 --- /dev/null +++ b/spot/entrypoints/popup/components/RecordingControls.tsx @@ -0,0 +1,48 @@ +import { Component } from "solid-js"; +import { RecordTabSvg, RecordDesktopSvg } from "../Icons"; +import Button from "~/entrypoints/popup/Button"; +import { AppState, RecordingArea } from "../types"; + +interface RecordingControlsProps { + state: AppState; + startRecording: (area: RecordingArea) => void; + stopRecording: () => void; +} + +const RecordingControls: Component = (props) => { + return ( + <> + {props.state === AppState.RECORDING && ( + + + +
+ )} + + ); +}; + +export default RecordingControls; diff --git a/spot/entrypoints/popup/hooks/useAppState.ts b/spot/entrypoints/popup/hooks/useAppState.ts new file mode 100644 index 000000000..4c7b6a844 --- /dev/null +++ b/spot/entrypoints/popup/hooks/useAppState.ts @@ -0,0 +1,63 @@ +import { createSignal, onMount } from "solid-js"; +import { AppState, RecordingArea } from "../types"; + +export function useAppState() { + const [state, setState] = createSignal(AppState.EMPTY); + const [isSettingsOpen, setIsSettingsOpen] = createSignal(false); + + onMount(() => { + browser.runtime.onMessage.addListener((message) => { + if (message.type === "popup:no-login") { + setState(AppState.LOGIN); + } + if (message.type === "popup:login") { + setState(AppState.READY); + } + if (message.type === "popup:stopped") { + setState(AppState.READY); + } + if (message.type === "popup:started") { + setState(AppState.RECORDING); + } + }); + + void browser.runtime.sendMessage({ type: "popup:check-status" }); + }); + + const startRecording = async ( + area: RecordingArea, + mic: boolean, + audioId: string, + permissions: boolean + ) => { + setState(AppState.STARTING); + await browser.runtime.sendMessage({ + type: "popup:start", + area, + mic, + audioId, + permissions, + }); + window.close(); + }; + + const stopRecording = (mic: boolean, audioId: string) => { + void browser.runtime.sendMessage({ + type: "popup:stop", + mic, + audioId, + }); + }; + + const openSettings = () => setIsSettingsOpen(true); + const closeSettings = () => setIsSettingsOpen(false); + + return { + state, + isSettingsOpen, + startRecording, + stopRecording, + openSettings, + closeSettings, + }; +} diff --git a/spot/entrypoints/popup/hooks/useAudioDevices.ts b/spot/entrypoints/popup/hooks/useAudioDevices.ts new file mode 100644 index 000000000..25ae807d3 --- /dev/null +++ b/spot/entrypoints/popup/hooks/useAudioDevices.ts @@ -0,0 +1,100 @@ +import { createSignal, createEffect } from "solid-js"; +import { AudioDevice } from "../types"; +import { getAudioDevices } from "../utils/audio"; + +export function useAudioDevices() { + const [audioDevices, setAudioDevices] = createSignal([]); + const [selectedAudioDevice, setSelectedAudioDevice] = createSignal(""); + const [mic, setMic] = createSignal(false); + const [hasPermissions, setHasPermissions] = createSignal(false); + const [isChecking, setIsChecking] = createSignal(false); + + createEffect(() => { + chrome.storage.local.get("audioPerm", (data) => { + if (data.audioPerm && audioDevices().length === 0) { + setHasPermissions(true); + checkAudioDevices().then(async (devices) => { + const { selectedAudioId, micOn } = await chrome.storage.local.get([ + "selectedAudioId", + "micOn", + ]); + + if (selectedAudioId) { + const selectedDevice = devices.find( + (device) => device.id === selectedAudioId + ); + if (selectedDevice) { + setSelectedAudioDevice(selectedDevice.id); + } + } + + if (micOn) { + toggleMic(); + } + }); + } + }); + }); + + const checkAudioDevices = async (): Promise => { + setIsChecking(true); + + const { granted, audioDevices, denied } = await getAudioDevices(); + + if (!granted && !denied) { + void browser.runtime.sendMessage({ + type: "popup:get-audio-perm", + }); + + browser.runtime.onMessage.addListener((message) => { + if (message.type === "popup:audio-perm") { + void checkAudioDevices(); + } + }); + } else if (audioDevices.length > 0) { + chrome.storage.local.set({ audioPerm: granted }); + setAudioDevices(audioDevices); + setSelectedAudioDevice(audioDevices[0]?.id || ""); + } + + setIsChecking(false); + return audioDevices; + }; + + const toggleMic = () => { + setMic(!mic()); + }; + + const selectAudioDevice = (deviceId: string) => { + setSelectedAudioDevice(deviceId); + if (!mic()) { + toggleMic(); + } + chrome.storage.local.set({ selectedAudioId: deviceId, micOn: true }); + }; + + const handleMicToggle = async () => { + if (!audioDevices().length) { + return await checkAudioDevices(); + } + + if (!selectedAudioDevice() && audioDevices().length) { + selectAudioDevice(audioDevices()[0].id); + } else { + chrome.storage.local.set({ micOn: !mic() }); + toggleMic(); + } + }; + + return { + audioDevices, + selectedAudioDevice, + mic, + hasPermissions, + isChecking, + checkAudioDevices, + toggleMic, + selectAudioDevice, + handleMicToggle, + }; +} diff --git a/spot/entrypoints/popup/types/index.ts b/spot/entrypoints/popup/types/index.ts new file mode 100644 index 000000000..71f13490f --- /dev/null +++ b/spot/entrypoints/popup/types/index.ts @@ -0,0 +1,14 @@ +export type AudioDevice = { + label: string; + id: string; +}; + +export enum AppState { + EMPTY = "empty", + LOGIN = "login", + READY = "ready", + STARTING = "starting", + RECORDING = "recording", +} + +export type RecordingArea = "tab" | "desktop"; diff --git a/spot/entrypoints/popup/utils/audio.ts b/spot/entrypoints/popup/utils/audio.ts new file mode 100644 index 000000000..dab6cf210 --- /dev/null +++ b/spot/entrypoints/popup/utils/audio.ts @@ -0,0 +1,24 @@ +import type { AudioDevice } from "../types"; +export async function getAudioDevices(): Promise<{ + granted: boolean; + denied?: boolean; + audioDevices: AudioDevice[]; +}> { + try { + await navigator.mediaDevices.getUserMedia({ audio: true }); + const devices = await navigator.mediaDevices.enumerateDevices(); + const audioDevices = devices + .filter((device) => device.kind === "audioinput") + .map((device) => ({ label: device.label, id: device.deviceId })); + + return { granted: true, audioDevices }; + } catch (error) { + console.error("Error accessing audio devices:", error); + const msg = error.message ?? ""; + return { + granted: false, + denied: msg.includes("denied"), + audioDevices: [], + }; + } +} From d2d886b3221ef7ca41185d936d710136ebe7176a Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 9 May 2025 11:20:52 +0200 Subject: [PATCH 13/25] ui: fix filter options reset, fix dashboard chart density --- .../components/WidgetChart/WidgetChart.tsx | 5 ++-- .../WidgetDateRange/RangeGranularity.tsx | 2 +- .../FilterAutoComplete/AutocompleteModal.tsx | 6 ++--- .../FilterAutoComplete/FilterAutoComplete.tsx | 4 ++- frontend/app/mstore/dashboardStore.ts | 26 +++++++++++++++---- frontend/app/mstore/types/widget.ts | 1 + 6 files changed, 32 insertions(+), 12 deletions(-) diff --git a/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx b/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx index 0460053a5..abdbc9066 100644 --- a/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx +++ b/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx @@ -181,9 +181,10 @@ function WidgetChart(props: Props) { } prevMetricRef.current = _metric; const timestmaps = drillDownPeriod.toTimestamps(); + const density = props.isPreview ? metric.density : dashboardStore.selectedDensity const payload = isSaved - ? { ...metricParams } - : { ...params, ...timestmaps, ..._metric.toJson() }; + ? { ...metricParams, density } + : { ...params, ...timestmaps, ..._metric.toJson(), density }; debounceRequest( _metric, payload, diff --git a/frontend/app/components/Dashboard/components/WidgetDateRange/RangeGranularity.tsx b/frontend/app/components/Dashboard/components/WidgetDateRange/RangeGranularity.tsx index 02d9a970c..18a2b7815 100644 --- a/frontend/app/components/Dashboard/components/WidgetDateRange/RangeGranularity.tsx +++ b/frontend/app/components/Dashboard/components/WidgetDateRange/RangeGranularity.tsx @@ -55,7 +55,7 @@ function RangeGranularity({ } const PAST_24_HR_MS = 24 * 60 * 60 * 1000; -function calculateGranularities(periodDurationMs: number) { +export function calculateGranularities(periodDurationMs: number) { const granularities = [ { label: 'Hourly', durationMs: 60 * 60 * 1000 }, { label: 'Daily', durationMs: 24 * 60 * 60 * 1000 }, diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/AutocompleteModal.tsx b/frontend/app/components/shared/Filters/FilterAutoComplete/AutocompleteModal.tsx index e8d86beb4..88042887c 100644 --- a/frontend/app/components/shared/Filters/FilterAutoComplete/AutocompleteModal.tsx +++ b/frontend/app/components/shared/Filters/FilterAutoComplete/AutocompleteModal.tsx @@ -125,7 +125,7 @@ export function AutocompleteModal({ if (index === blocksAmount - 1 && blocksAmount > 1) { str += ' and '; } - str += `"${block.trim()}"`; + str += block.trim(); if (index < blocksAmount - 2) { str += ', '; } @@ -188,10 +188,10 @@ export function AutocompleteModal({ {query.length ? (
- {t('Apply')} {queryStr} + {t('Apply')} {queryStr}
) : null} diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx index 2b6fa260a..41fc685b1 100644 --- a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx +++ b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx @@ -128,8 +128,10 @@ const FilterAutoComplete = observer( }; const handleFocus = () => { + if (!initialFocus) { + setOptions(topValues.map((i) => ({ value: i.value, label: i.value }))); + } setInitialFocus(true); - setOptions(topValues.map((i) => ({ value: i.value, label: i.value }))); }; return ( diff --git a/frontend/app/mstore/dashboardStore.ts b/frontend/app/mstore/dashboardStore.ts index 723b97c7d..f985b97ca 100644 --- a/frontend/app/mstore/dashboardStore.ts +++ b/frontend/app/mstore/dashboardStore.ts @@ -1,4 +1,4 @@ -import { makeAutoObservable, runInAction } from 'mobx'; +import { makeAutoObservable, runInAction, reaction } from 'mobx'; import { dashboardService, metricService } from 'App/services'; import { toast } from 'react-toastify'; import Period, { LAST_24_HOURS, LAST_7_DAYS } from 'Types/app/period'; @@ -6,6 +6,7 @@ import { getRE } from 'App/utils'; import Filter from './types/filter'; import Widget from './types/widget'; import Dashboard from './types/dashboard'; +import { calculateGranularities } from '@/components/Dashboard/components/WidgetDateRange/RangeGranularity'; interface DashboardFilter { query?: string; @@ -36,7 +37,7 @@ export default class DashboardStore { drillDownPeriod: Record = Period({ rangeName: LAST_24_HOURS }); - selectedDensity: number = 7; // depends on default drilldown, 7 points here!!!; + selectedDensity: number = 7; comparisonPeriods: Record = {}; @@ -83,10 +84,25 @@ export default class DashboardStore { makeAutoObservable(this); this.resetDrillDownFilter(); + + this.createDensity(this.period.getDuration()); + reaction( + () => this.period, + (period) => { + this.createDensity(period.getDuration()); + } + ) } - setDensity = (density: any) => { - this.selectedDensity = parseInt(density, 10); + createDensity = (duration: number) => { + const densityOpts = calculateGranularities(duration); + const defaultOption = densityOpts[densityOpts.length - 2]; + + this.setDensity(defaultOption.key) + } + + setDensity = (density: number) => { + this.selectedDensity = density; }; get sortedDashboards() { @@ -529,7 +545,7 @@ export default class DashboardStore { const data = await metricService.getMetricChartData( metric, params, - isSaved, + isSaved ); resolve(metric.setData(data, period, isComparison, density)); } catch (error) { diff --git a/frontend/app/mstore/types/widget.ts b/frontend/app/mstore/types/widget.ts index 2809ef63f..5ce757500 100644 --- a/frontend/app/mstore/types/widget.ts +++ b/frontend/app/mstore/types/widget.ts @@ -163,6 +163,7 @@ export default class Widget { fromJson(json: any, period?: any) { json.config = json.config || {}; runInAction(() => { + this.dashboardId = json.dashboardId; this.metricId = json.metricId; this.widgetId = json.widgetId; this.metricValue = this.metricValueFromArray( From c6b0649613c903202a9d9caf5f5ca306f3ca38a4 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 29 Apr 2025 14:32:21 +0200 Subject: [PATCH 14/25] ui: starting dark theme --- frontend/app/ThemeContext.tsx | 64 ++ .../components/AddCardSelectionModal.tsx | 10 - .../ReplayPlayer/playerBlockHeader.module.css | 2 +- .../Session_/EventsBlock/EventsBlock.tsx | 2 +- .../Session_/EventsBlock/event.module.css | 4 +- .../Session_/playerBlockHeader.module.css | 2 +- frontend/app/components/ThemeToggle/index.tsx | 31 + .../shared/SearchActions/SearchActions.tsx | 2 +- .../app/components/shared/Select/Select.tsx | 184 +----- .../shared/SessionItem/sessionItem.module.css | 4 +- .../components/ListingVisibility.tsx | 3 + .../components/ui/SlideModal/SlideModal.js | 2 - frontend/app/initialize.js | 75 --- frontend/app/initialize.tsx | 188 ++++-- frontend/app/layout/TopRight.tsx | 2 + frontend/app/styles/colors-autogen.css | 582 ++++++++++-------- frontend/app/styles/general.css | 4 - frontend/app/styles/global.css | 2 +- frontend/app/styles/main.css | 135 +--- frontend/app/theme/colors.js | 57 +- frontend/postcss.config.js | 31 +- frontend/scripts/colors.js | 26 +- frontend/tailwind.config.js | 111 +++- 23 files changed, 764 insertions(+), 759 deletions(-) create mode 100644 frontend/app/ThemeContext.tsx create mode 100644 frontend/app/components/ThemeToggle/index.tsx delete mode 100644 frontend/app/initialize.js diff --git a/frontend/app/ThemeContext.tsx b/frontend/app/ThemeContext.tsx new file mode 100644 index 000000000..51e340231 --- /dev/null +++ b/frontend/app/ThemeContext.tsx @@ -0,0 +1,64 @@ +import React, { createContext, useContext, useState, useEffect, ReactNode } from 'react'; + +type ThemeType = 'light' | 'dark'; + +interface ThemeContextType { + theme: ThemeType; + toggleTheme: () => void; +} + +const ThemeContext = createContext(undefined); + +export const ThemeProvider: React.FC<{ children: ReactNode }> = ({ children }) => { + const getInitialTheme = (): ThemeType => { + const savedTheme = localStorage.getItem('theme'); + if (savedTheme && (savedTheme === 'light' || savedTheme === 'dark')) { + return savedTheme; + } + return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'; + }; + + const [theme, setTheme] = useState(getInitialTheme); + + useEffect(() => { + if (theme === 'dark') { + document.documentElement.classList.add('dark'); + } else { + document.documentElement.classList.remove('dark'); + } + + localStorage.setItem('theme', theme); + }, [theme]); + + useEffect(() => { + const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); + + const handleChange = (e: MediaQueryListEvent) => { + // Only apply system preference if user hasn't manually set a preference + if (!localStorage.getItem('theme')) { + setTheme(e.matches ? 'dark' : 'light'); + } + }; + + mediaQuery.addEventListener('change', handleChange); + return () => mediaQuery.removeEventListener('change', handleChange); + }, []); + + const toggleTheme = () => { + setTheme(prevTheme => (prevTheme === 'dark' ? 'light' : 'dark')); + }; + + return ( + + {children} + + ); +}; + +export const useTheme = (): ThemeContextType => { + const context = useContext(ThemeContext); + if (context === undefined) { + throw new Error('useTheme must be used within a ThemeProvider'); + } + return context; +}; diff --git a/frontend/app/components/Dashboard/components/AddCardSelectionModal.tsx b/frontend/app/components/Dashboard/components/AddCardSelectionModal.tsx index dd2de2cf0..63b2521b2 100644 --- a/frontend/app/components/Dashboard/components/AddCardSelectionModal.tsx +++ b/frontend/app/components/Dashboard/components/AddCardSelectionModal.tsx @@ -44,16 +44,6 @@ function AddCardSelectionModal(props: Props) { className="addCard" width={isSaas ? 900 : undefined} > - {isSaas ? ( - <> - - - -
- {t('or')} -
- - ) : null}
{uxtestingStore.isUxt() ? ( diff --git a/frontend/app/components/Session_/EventsBlock/event.module.css b/frontend/app/components/Session_/EventsBlock/event.module.css index 5b5cec4a5..f6685d02b 100644 --- a/frontend/app/components/Session_/EventsBlock/event.module.css +++ b/frontend/app/components/Session_/EventsBlock/event.module.css @@ -13,7 +13,7 @@ .event { position: relative; - background: #f6f6f6; + background: $gray-lightest; /* border-radius: 3px; */ user-select: none; transition: all 0.2s; @@ -147,5 +147,5 @@ } .lastInGroup { - background: white; + background: $white; } diff --git a/frontend/app/components/Session_/playerBlockHeader.module.css b/frontend/app/components/Session_/playerBlockHeader.module.css index 29c6e1648..96a8cecad 100644 --- a/frontend/app/components/Session_/playerBlockHeader.module.css +++ b/frontend/app/components/Session_/playerBlockHeader.module.css @@ -3,7 +3,7 @@ border-bottom: solid thin $gray-light; padding-left: 15px; padding-right: 0; - background-color: white; + background-color: $white; } .divider { diff --git a/frontend/app/components/ThemeToggle/index.tsx b/frontend/app/components/ThemeToggle/index.tsx new file mode 100644 index 000000000..1b5bde18f --- /dev/null +++ b/frontend/app/components/ThemeToggle/index.tsx @@ -0,0 +1,31 @@ +import React from 'react'; +import { Button } from 'antd'; +import { BulbOutlined, BulbFilled } from '@ant-design/icons'; +import { useTheme } from 'App/ThemeContext'; + +interface ThemeToggleProps { + className?: string; + style?: React.CSSProperties; + size?: 'large' | 'middle' | 'small'; +} + +const ThemeToggle: React.FC = ({ + className = '', + style = {}, + size = 'middle' +}) => { + const { theme, toggleTheme } = useTheme(); + + return ( +