From a654e30df2c5acfc7fda53ef830415ee15d3e372 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 12 Dec 2024 12:37:39 +0100 Subject: [PATCH 01/10] Dev (#2866) * refactor(chalice): refactored errors * refactor(chalice): refactored metrics/cards/dashboards refactor(chalice): refactored sessions refactor(chalice): refactored sourcemaps --- api/chalicelib/core/boarding.py | 22 +- api/chalicelib/core/errors/__init__.py | 12 + api/chalicelib/core/{ => errors}/errors.py | 3 +- .../chalicelib/core/errors/errors_ch.py | 6 +- .../core/{ => errors}/errors_favorite.py | 0 .../core/{ => errors}/errors_viewed.py | 0 .../core/errors/modules/__init__.py | 10 + .../issue_tracking/integrations_global.py | 2 +- api/chalicelib/core/log_tools/log_tools.py | 2 +- .../core/log_tools/modules/__init__.py | 1 - api/chalicelib/core/metrics/__init__.py | 13 + .../core/{ => metrics}/custom_metrics.py | 3 +- .../custom_metrics_predefined.py | 2 +- .../core/{ => metrics}/dashboards.py | 2 +- api/chalicelib/core/{ => metrics}/funnels.py | 2 +- api/chalicelib/core/{ => metrics}/heatmaps.py | 0 .../chalicelib/core/metrics/heatmaps_ch.py | 8 +- api/chalicelib/core/{ => metrics}/metrics.py | 0 .../chalicelib/core/metrics}/metrics_ch.py | 0 .../core/metrics/modules/__init__.py | 12 + .../metrics/modules/significance/__init__.py | 10 + .../modules/significance}/significance.py | 15 +- .../modules/significance/significance_ch.py | 8 +- .../core/{ => metrics}/product_analytics.py | 0 .../core/{ => metrics}/product_anaytics2.py | 0 .../{issue_tracking => }/modules/__init__.py | 1 + .../core/sessions/sessions_devtool.py | 3 +- .../core/sessions/sessions_replay.py | 4 +- api/chalicelib/core/sourcemaps/__init__.py | 0 .../core/{ => sourcemaps}/sourcemaps.py | 2 +- .../{ => sourcemaps}/sourcemaps_parser.py | 0 api/env.default | 4 +- api/routers/core.py | 3 +- api/routers/core_dynamic.py | 5 +- api/routers/subs/insights.py | 4 - api/routers/subs/metrics.py | 2 +- api/routers/subs/product_anaytics.py | 6 +- ee/api/.gitignore | 29 +- ee/api/chalicelib/core/__init__.py | 24 +- ee/api/chalicelib/core/boarding.py | 119 ---- ee/api/chalicelib/core/custom_metrics_ee.py | 236 ------- ee/api/chalicelib/core/errors.py | 609 ------------------ ee/api/chalicelib/core/errors/__init__.py | 14 + .../errors_viewed_ee.py} | 7 +- ee/api/chalicelib/core/errors_viewed.py | 39 -- ee/api/chalicelib/core/events.py | 223 ------- ee/api/chalicelib/core/integrations_global.py | 67 -- .../core/issue_tracking/modules/__init__.py | 1 - ee/api/chalicelib/core/metrics/__init__.py | 9 + .../core/metrics/custom_metrics_ee.py | 99 +++ .../core/{log_tools => }/modules/__init__.py | 1 + ee/api/chalicelib/core/product_analytics.py | 4 +- ee/api/chalicelib/core/sessions/__init__.py | 4 + .../core/sessions/sessions_devtool.py | 39 -- .../core/sessions/sessions_devtool_ee.py | 13 + .../core/sessions/sessions_favorite.py | 97 --- .../core/sessions/sessions_favorite_ee.py | 75 +++ .../core/sessions/sessions_favorite_exp.py | 24 - .../core/sessions/sessions_notes.py | 4 +- .../core/sessions/sessions_replay.py | 157 ----- .../core/sessions/sessions_viewed.py | 13 - ...ns_viewed_exp.py => sessions_viewed_ee.py} | 4 + ee/api/clean-dev.sh | 29 +- ee/api/routers/core_dynamic.py | 4 +- ee/api/routers/subs/metrics.py | 2 +- 65 files changed, 386 insertions(+), 1727 deletions(-) create mode 100644 api/chalicelib/core/errors/__init__.py rename api/chalicelib/core/{ => errors}/errors.py (99%) rename ee/api/chalicelib/core/errors_exp.py => api/chalicelib/core/errors/errors_ch.py (99%) rename api/chalicelib/core/{ => errors}/errors_favorite.py (100%) rename api/chalicelib/core/{ => errors}/errors_viewed.py (100%) create mode 100644 api/chalicelib/core/errors/modules/__init__.py delete mode 100644 api/chalicelib/core/log_tools/modules/__init__.py create mode 100644 api/chalicelib/core/metrics/__init__.py rename api/chalicelib/core/{ => metrics}/custom_metrics.py (99%) rename api/chalicelib/core/{ => metrics}/custom_metrics_predefined.py (96%) rename api/chalicelib/core/{ => metrics}/dashboards.py (99%) rename api/chalicelib/core/{ => metrics}/funnels.py (97%) rename api/chalicelib/core/{ => metrics}/heatmaps.py (100%) rename ee/api/chalicelib/core/heatmaps.py => api/chalicelib/core/metrics/heatmaps_ch.py (99%) rename api/chalicelib/core/{ => metrics}/metrics.py (100%) rename {ee/api/chalicelib/core => api/chalicelib/core/metrics}/metrics_ch.py (100%) create mode 100644 api/chalicelib/core/metrics/modules/__init__.py create mode 100644 api/chalicelib/core/metrics/modules/significance/__init__.py rename api/chalicelib/core/{ => metrics/modules/significance}/significance.py (99%) rename ee/api/chalicelib/core/significance_exp.py => api/chalicelib/core/metrics/modules/significance/significance_ch.py (98%) rename api/chalicelib/core/{ => metrics}/product_analytics.py (100%) rename api/chalicelib/core/{ => metrics}/product_anaytics2.py (100%) rename api/chalicelib/core/{issue_tracking => }/modules/__init__.py (72%) create mode 100644 api/chalicelib/core/sourcemaps/__init__.py rename api/chalicelib/core/{ => sourcemaps}/sourcemaps.py (99%) rename api/chalicelib/core/{ => sourcemaps}/sourcemaps_parser.py (100%) delete mode 100644 ee/api/chalicelib/core/boarding.py delete mode 100644 ee/api/chalicelib/core/custom_metrics_ee.py delete mode 100644 ee/api/chalicelib/core/errors.py create mode 100644 ee/api/chalicelib/core/errors/__init__.py rename ee/api/chalicelib/core/{errors_viewed_exp.py => errors/errors_viewed_ee.py} (70%) delete mode 100644 ee/api/chalicelib/core/errors_viewed.py delete mode 100644 ee/api/chalicelib/core/events.py delete mode 100644 ee/api/chalicelib/core/integrations_global.py delete mode 100644 ee/api/chalicelib/core/issue_tracking/modules/__init__.py create mode 100644 ee/api/chalicelib/core/metrics/__init__.py create mode 100644 ee/api/chalicelib/core/metrics/custom_metrics_ee.py rename ee/api/chalicelib/core/{log_tools => }/modules/__init__.py (51%) delete mode 100644 ee/api/chalicelib/core/sessions/sessions_devtool.py create mode 100644 ee/api/chalicelib/core/sessions/sessions_devtool_ee.py delete mode 100644 ee/api/chalicelib/core/sessions/sessions_favorite.py create mode 100644 ee/api/chalicelib/core/sessions/sessions_favorite_ee.py delete mode 100644 ee/api/chalicelib/core/sessions/sessions_favorite_exp.py delete mode 100644 ee/api/chalicelib/core/sessions/sessions_replay.py delete mode 100644 ee/api/chalicelib/core/sessions/sessions_viewed.py rename ee/api/chalicelib/core/sessions/{sessions_viewed_exp.py => sessions_viewed_ee.py} (81%) diff --git a/api/chalicelib/core/boarding.py b/api/chalicelib/core/boarding.py index 99a93a645..26f2e3dcb 100644 --- a/api/chalicelib/core/boarding.py +++ b/api/chalicelib/core/boarding.py @@ -1,8 +1,8 @@ -from chalicelib.utils import pg_client from chalicelib.core import projects -from chalicelib.core.log_tools import datadog, stackdriver, sentry - from chalicelib.core import users +from chalicelib.core.log_tools import datadog, stackdriver, sentry +from chalicelib.core.modules import TENANT_CONDITION +from chalicelib.utils import pg_client def get_state(tenant_id): @@ -21,21 +21,23 @@ def get_state(tenant_id): recorded = cur.fetchone()["exists"] meta = False if recorded: - cur.execute("""SELECT EXISTS((SELECT 1 + query = cur.mogrify("""SELECT EXISTS((SELECT 1 FROM public.projects AS p LEFT JOIN LATERAL ( SELECT 1 FROM public.sessions WHERE sessions.project_id = p.project_id AND sessions.user_id IS NOT NULL LIMIT 1) AS sessions(user_id) ON (TRUE) - WHERE p.deleted_at ISNULL + WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL OR p.metadata_10 IS NOT NULL ) - )) AS exists;""") + )) AS exists;""", + {"tenant_id": tenant_id}) + cur.execute(query) meta = cur.fetchone()["exists"] @@ -78,21 +80,23 @@ def get_state_installing(tenant_id): def get_state_identify_users(tenant_id): with pg_client.PostgresClient() as cur: - cur.execute("""SELECT EXISTS((SELECT 1 + query = cur.mogrify(f"""SELECT EXISTS((SELECT 1 FROM public.projects AS p LEFT JOIN LATERAL ( SELECT 1 FROM public.sessions WHERE sessions.project_id = p.project_id AND sessions.user_id IS NOT NULL LIMIT 1) AS sessions(user_id) ON (TRUE) - WHERE p.deleted_at ISNULL + WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL OR p.metadata_10 IS NOT NULL ) - )) AS exists;""") + )) AS exists;""", + {"tenant_id": tenant_id}) + cur.execute(query) meta = cur.fetchone()["exists"] diff --git a/api/chalicelib/core/errors/__init__.py b/api/chalicelib/core/errors/__init__.py new file mode 100644 index 000000000..2bb054058 --- /dev/null +++ b/api/chalicelib/core/errors/__init__.py @@ -0,0 +1,12 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +if config("EXP_ERRORS_SEARCH", cast=bool, default=False): + logger.info(">>> Using experimental error search") + from . import errors as errors_legacy + from . import errors_ch as errors +else: + from . import errors diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors/errors.py similarity index 99% rename from api/chalicelib/core/errors.py rename to api/chalicelib/core/errors/errors.py index 9ba9c1a39..4ea1e1f89 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors/errors.py @@ -1,7 +1,8 @@ import json import schemas -from chalicelib.core import sourcemaps, sessions +from chalicelib.core import sourcemaps +from chalicelib.core.errors.modules import sessions from chalicelib.utils import errors_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC diff --git a/ee/api/chalicelib/core/errors_exp.py b/api/chalicelib/core/errors/errors_ch.py similarity index 99% rename from ee/api/chalicelib/core/errors_exp.py rename to api/chalicelib/core/errors/errors_ch.py index 5665e658e..c7d28adbf 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/api/chalicelib/core/errors/errors_ch.py @@ -1,7 +1,7 @@ from decouple import config import schemas -from chalicelib.core import errors_legacy +from . import errors as errors_legacy from chalicelib.core import metrics, metadata from chalicelib.core import sessions from chalicelib.utils import ch_client, exp_ch_helper @@ -151,9 +151,6 @@ def __process_tags_map(row): def get_details(project_id, error_id, user_id, **data): - if not config("EXP_ERRORS_GET", cast=bool, default=False): - return errors_legacy.get_details(project_id, error_id, user_id, **data) - MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0) MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0) MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) @@ -167,7 +164,6 @@ def get_details(project_id, error_id, user_id, **data): ch_basic_query = __get_basic_constraints(time_constraint=False) ch_basic_query.append("error_id = %(error_id)s") - with ch_client.ClickHouseClient() as ch: data["startDate24"] = TimeUTC.now(-1) data["endDate24"] = TimeUTC.now() diff --git a/api/chalicelib/core/errors_favorite.py b/api/chalicelib/core/errors/errors_favorite.py similarity index 100% rename from api/chalicelib/core/errors_favorite.py rename to api/chalicelib/core/errors/errors_favorite.py diff --git a/api/chalicelib/core/errors_viewed.py b/api/chalicelib/core/errors/errors_viewed.py similarity index 100% rename from api/chalicelib/core/errors_viewed.py rename to api/chalicelib/core/errors/errors_viewed.py diff --git a/api/chalicelib/core/errors/modules/__init__.py b/api/chalicelib/core/errors/modules/__init__.py new file mode 100644 index 000000000..1ef2bd659 --- /dev/null +++ b/api/chalicelib/core/errors/modules/__init__.py @@ -0,0 +1,10 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +if config("EXP_ERRORS_SEARCH", cast=bool, default=False): + from chalicelib.core.sessions import sessions_ch as sessions +else: + from chalicelib.core.sessions import sessions diff --git a/api/chalicelib/core/issue_tracking/integrations_global.py b/api/chalicelib/core/issue_tracking/integrations_global.py index 7fcb19e74..a71141750 100644 --- a/api/chalicelib/core/issue_tracking/integrations_global.py +++ b/api/chalicelib/core/issue_tracking/integrations_global.py @@ -1,5 +1,5 @@ import schemas -from chalicelib.core.issue_tracking.modules import TENANT_CONDITION +from chalicelib.core.modules import TENANT_CONDITION from chalicelib.utils import pg_client diff --git a/api/chalicelib/core/log_tools/log_tools.py b/api/chalicelib/core/log_tools/log_tools.py index 78047ddb5..9b223a66a 100644 --- a/api/chalicelib/core/log_tools/log_tools.py +++ b/api/chalicelib/core/log_tools/log_tools.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client, helper import json -from chalicelib.core.log_tools.modules import TENANT_CONDITION +from chalicelib.core.modules import TENANT_CONDITION EXCEPT = ["jira_server", "jira_cloud"] diff --git a/api/chalicelib/core/log_tools/modules/__init__.py b/api/chalicelib/core/log_tools/modules/__init__.py deleted file mode 100644 index a9100a3ac..000000000 --- a/api/chalicelib/core/log_tools/modules/__init__.py +++ /dev/null @@ -1 +0,0 @@ -TENANT_CONDITION = "TRUE" diff --git a/api/chalicelib/core/metrics/__init__.py b/api/chalicelib/core/metrics/__init__.py new file mode 100644 index 000000000..4f297cedb --- /dev/null +++ b/api/chalicelib/core/metrics/__init__.py @@ -0,0 +1,13 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +if config("EXP_METRICS", cast=bool, default=False): + logger.info(">>> Using experimental metrics") + from chalicelib.core.metrics import heatmaps_ch as heatmaps + from chalicelib.core.metrics import metrics_ch as metrics +else: + from chalicelib.core.metrics import heatmaps + from chalicelib.core.metrics import metrics diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/metrics/custom_metrics.py similarity index 99% rename from api/chalicelib/core/custom_metrics.py rename to api/chalicelib/core/metrics/custom_metrics.py index a9d51dfc9..fb5e33ba9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/metrics/custom_metrics.py @@ -4,7 +4,8 @@ import logging from fastapi import HTTPException, status import schemas -from chalicelib.core import funnels, errors, issues, heatmaps, product_analytics, custom_metrics_predefined +from chalicelib.core import errors, issues +from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined from chalicelib.core.sessions import sessions from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC diff --git a/api/chalicelib/core/custom_metrics_predefined.py b/api/chalicelib/core/metrics/custom_metrics_predefined.py similarity index 96% rename from api/chalicelib/core/custom_metrics_predefined.py rename to api/chalicelib/core/metrics/custom_metrics_predefined.py index 36c061717..923a2d296 100644 --- a/api/chalicelib/core/custom_metrics_predefined.py +++ b/api/chalicelib/core/metrics/custom_metrics_predefined.py @@ -2,7 +2,7 @@ import logging from typing import Union import schemas -from chalicelib.core import metrics +from chalicelib.core.metrics import metrics logger = logging.getLogger(__name__) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/metrics/dashboards.py similarity index 99% rename from api/chalicelib/core/dashboards.py rename to api/chalicelib/core/metrics/dashboards.py index 436397044..c02dea35e 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/metrics/dashboards.py @@ -1,7 +1,7 @@ import json import schemas -from chalicelib.core import custom_metrics +from chalicelib.core.metrics import custom_metrics from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/metrics/funnels.py similarity index 97% rename from api/chalicelib/core/funnels.py rename to api/chalicelib/core/metrics/funnels.py index 9d9e59915..40643f8d1 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/metrics/funnels.py @@ -1,7 +1,7 @@ from typing import List import schemas -from chalicelib.core import significance +from chalicelib.core.metrics.modules import significance from chalicelib.utils import helper from chalicelib.utils import sql_helper as sh diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/metrics/heatmaps.py similarity index 100% rename from api/chalicelib/core/heatmaps.py rename to api/chalicelib/core/metrics/heatmaps.py diff --git a/ee/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/metrics/heatmaps_ch.py similarity index 99% rename from ee/api/chalicelib/core/heatmaps.py rename to api/chalicelib/core/metrics/heatmaps_ch.py index 41cffa237..d4a6a72cb 100644 --- a/ee/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/metrics/heatmaps_ch.py @@ -3,14 +3,10 @@ import logging from decouple import config import schemas -from chalicelib.core import sessions_mobs, events +from chalicelib.core import events +from chalicelib.core.metrics.modules import sessions, sessions_mobs from chalicelib.utils import sql_helper as sh -if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from chalicelib.core import sessions_ch as sessions -else: - from chalicelib.core import sessions - from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper logger = logging.getLogger(__name__) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics/metrics.py similarity index 100% rename from api/chalicelib/core/metrics.py rename to api/chalicelib/core/metrics/metrics.py diff --git a/ee/api/chalicelib/core/metrics_ch.py b/api/chalicelib/core/metrics/metrics_ch.py similarity index 100% rename from ee/api/chalicelib/core/metrics_ch.py rename to api/chalicelib/core/metrics/metrics_ch.py diff --git a/api/chalicelib/core/metrics/modules/__init__.py b/api/chalicelib/core/metrics/modules/__init__.py new file mode 100644 index 000000000..30a6fb567 --- /dev/null +++ b/api/chalicelib/core/metrics/modules/__init__.py @@ -0,0 +1,12 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +if config("EXP_METRICS", cast=bool, default=False): + from chalicelib.core.sessions import sessions_ch as sessions +else: + from chalicelib.core.sessions import sessions + +from chalicelib.core.sessions import sessions_mobs diff --git a/api/chalicelib/core/metrics/modules/significance/__init__.py b/api/chalicelib/core/metrics/modules/significance/__init__.py new file mode 100644 index 000000000..34e574d85 --- /dev/null +++ b/api/chalicelib/core/metrics/modules/significance/__init__.py @@ -0,0 +1,10 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +from .significance import * + +if config("EXP_METRICS", cast=bool, default=False): + from .significance_ch import * diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/metrics/modules/significance/significance.py similarity index 99% rename from api/chalicelib/core/significance.py rename to api/chalicelib/core/metrics/modules/significance/significance.py index d3ae2a443..48162836d 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/metrics/modules/significance/significance.py @@ -1,20 +1,15 @@ import logging - -import schemas -from chalicelib.core import events, metadata -from chalicelib.utils import sql_helper as sh - -""" -todo: remove LIMIT from the query -""" - -from typing import List import math import warnings from collections import defaultdict +from typing import List from psycopg2.extras import RealDictRow + +import schemas +from chalicelib.core import events, metadata from chalicelib.utils import pg_client, helper +from chalicelib.utils import sql_helper as sh logger = logging.getLogger(__name__) SIGNIFICANCE_THRSH = 0.4 diff --git a/ee/api/chalicelib/core/significance_exp.py b/api/chalicelib/core/metrics/modules/significance/significance_ch.py similarity index 98% rename from ee/api/chalicelib/core/significance_exp.py rename to api/chalicelib/core/metrics/modules/significance/significance_ch.py index 0e04fe8c5..0dae4b59a 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/api/chalicelib/core/metrics/modules/significance/significance_ch.py @@ -1,6 +1,12 @@ +import logging +from typing import List + +from psycopg2.extras import RealDictRow + +import schemas from chalicelib.utils import ch_client from chalicelib.utils import exp_ch_helper -from .significance import * +from chalicelib.utils import helper logger = logging.getLogger(__name__) diff --git a/api/chalicelib/core/product_analytics.py b/api/chalicelib/core/metrics/product_analytics.py similarity index 100% rename from api/chalicelib/core/product_analytics.py rename to api/chalicelib/core/metrics/product_analytics.py diff --git a/api/chalicelib/core/product_anaytics2.py b/api/chalicelib/core/metrics/product_anaytics2.py similarity index 100% rename from api/chalicelib/core/product_anaytics2.py rename to api/chalicelib/core/metrics/product_anaytics2.py diff --git a/api/chalicelib/core/issue_tracking/modules/__init__.py b/api/chalicelib/core/modules/__init__.py similarity index 72% rename from api/chalicelib/core/issue_tracking/modules/__init__.py rename to api/chalicelib/core/modules/__init__.py index a9100a3ac..9b76dd704 100644 --- a/api/chalicelib/core/issue_tracking/modules/__init__.py +++ b/api/chalicelib/core/modules/__init__.py @@ -1 +1,2 @@ TENANT_CONDITION = "TRUE" +MOB_KEY="" \ No newline at end of file diff --git a/api/chalicelib/core/sessions/sessions_devtool.py b/api/chalicelib/core/sessions/sessions_devtool.py index 4512a3b0c..04b6e7236 100644 --- a/api/chalicelib/core/sessions/sessions_devtool.py +++ b/api/chalicelib/core/sessions/sessions_devtool.py @@ -1,5 +1,6 @@ from decouple import config +import schemas from chalicelib.utils.storage import StorageClient @@ -13,7 +14,7 @@ def __get_devtools_keys(project_id, session_id): ] -def get_urls(session_id, project_id, check_existence: bool = True): +def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True): results = [] for k in __get_devtools_keys(project_id=project_id, session_id=session_id): if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): diff --git a/api/chalicelib/core/sessions/sessions_replay.py b/api/chalicelib/core/sessions/sessions_replay.py index 0cdd1dad1..46b469737 100644 --- a/api/chalicelib/core/sessions/sessions_replay.py +++ b/api/chalicelib/core/sessions/sessions_replay.py @@ -4,6 +4,7 @@ from chalicelib.core import events, metadata, events_mobile, \ from chalicelib.core.sessions import sessions_mobs, sessions_devtool from chalicelib.utils import errors_helper from chalicelib.utils import pg_client, helper +from chalicelib.core.modules import MOB_KEY def __is_mobile_session(platform): @@ -42,6 +43,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat SELECT s.*, s.session_id::text AS session_id, + {MOB_KEY} (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} @@ -63,7 +65,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat else: data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - check_existence=False) + context=context, check_existence=False) data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) if user_testing.has_test_signals(session_id=session_id, project_id=project_id): data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id, diff --git a/api/chalicelib/core/sourcemaps/__init__.py b/api/chalicelib/core/sourcemaps/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps/sourcemaps.py similarity index 99% rename from api/chalicelib/core/sourcemaps.py rename to api/chalicelib/core/sourcemaps/sourcemaps.py index cca4d9ece..8a0b04f06 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps/sourcemaps.py @@ -3,7 +3,7 @@ from urllib.parse import urlparse import requests from decouple import config -from chalicelib.core import sourcemaps_parser +from chalicelib.core.sourcemaps import sourcemaps_parser from chalicelib.utils.storage import StorageClient, generators diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps/sourcemaps_parser.py similarity index 100% rename from api/chalicelib/core/sourcemaps_parser.py rename to api/chalicelib/core/sourcemaps/sourcemaps_parser.py diff --git a/api/env.default b/api/env.default index e54f9dfb4..947e3ad12 100644 --- a/api/env.default +++ b/api/env.default @@ -72,4 +72,6 @@ STAGE=default-foss TZ=UTC EXP_CH_DRIVER=true EXP_AUTOCOMPLETE=true -EXP_ALERTS=true \ No newline at end of file +EXP_ALERTS=true +EXP_ERRORS_SEARCH=true +EXP_METRICS=true \ No newline at end of file diff --git a/api/routers/core.py b/api/routers/core.py index 918dc9995..584aa4bd9 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -8,7 +8,8 @@ from chalicelib.core import sourcemaps, events, projects, alerts, issues, \ metadata, reset_password, \ log_tools, sessions, announcements, \ weekly_report, assist, mobile, tenants, boarding, \ - notifications, webhook, users, custom_metrics, saved_search, tags, autocomplete + notifications, webhook, users, saved_search, tags, autocomplete +from chalicelib.core.metrics import custom_metrics from chalicelib.core.issue_tracking import github, integrations_global, integrations_manager, \ jira_cloud from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \ diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 8f646d67c..d61dbdcd4 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -8,8 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re import schemas from chalicelib.core import scope -from chalicelib.core import errors, errors_viewed, errors_favorite, heatmaps, \ - assist, signup, feature_flags +from chalicelib.core import errors, assist, signup, feature_flags +from chalicelib.core.metrics import heatmaps +from chalicelib.core.errors import errors_favorite, errors_viewed from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \ sessions_assignments, unprocessed_sessions from chalicelib.core import tenants, users, projects, license diff --git a/api/routers/subs/insights.py b/api/routers/subs/insights.py index 2bd550dfe..66a5b1b18 100644 --- a/api/routers/subs/insights.py +++ b/api/routers/subs/insights.py @@ -1,7 +1,3 @@ -from fastapi import Body - -import schemas -from chalicelib.core import product_analytics from routers.base import get_routers public_app, app, app_apikey = get_routers() diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index e7580e353..05fecadf3 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -1,7 +1,7 @@ from typing import Union import schemas -from chalicelib.core import dashboards, custom_metrics +from chalicelib.core.metrics import custom_metrics, dashboards from fastapi import Body, Depends from or_dependencies import OR_context from routers.base import get_routers diff --git a/api/routers/subs/product_anaytics.py b/api/routers/subs/product_anaytics.py index 5f5de83c1..95851c253 100644 --- a/api/routers/subs/product_anaytics.py +++ b/api/routers/subs/product_anaytics.py @@ -1,8 +1,6 @@ -from typing import Union - import schemas -from chalicelib.core import product_anaytics2 -from fastapi import Body, Depends +from chalicelib.core.metrics import product_anaytics2 +from fastapi import Depends from or_dependencies import OR_context from routers.base import get_routers diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 2db51f61c..fa2f87b39 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -188,30 +188,38 @@ Pipfile.lock /chalicelib/core/assist.py /chalicelib/core/authorizers.py /chalicelib/core/autocomplete/* +/chalicelib/core/boarding.py /chalicelib/core/canvas.py /chalicelib/core/collaborations/* /chalicelib/core/countries.py -/chalicelib/core/metrics.py -/chalicelib/core/custom_metrics.py -/chalicelib/core/custom_metrics_predefined.py -/chalicelib/core/dashboards.py -/chalicelib/core/errors_favorite.py +/chalicelib/core/metrics/metrics.py +/chalicelib/core/metrics/custom_metrics.py +/chalicelib/core/metrics/custom_metrics_predefined.py +/chalicelib/core/metrics/dashboards.py +/chalicelib/core/metrics/funnels.py +/chalicelib/core/metrics/heatmaps.py +/chalicelib/core/metrics/heatmaps_ch.py +/chalicelib/core/metrics/metrics_ch.py +/chalicelib/core/events.py /chalicelib/core/events_mobile.py /chalicelib/core/feature_flags.py -/chalicelib/core/funnels.py -/chalicelib/core/issue_tracking/*.py +/chalicelib/core/issue_tracking/* /chalicelib/core/issues.py /chalicelib/core/jobs.py -/chalicelib/core/log_tools/*.py +/chalicelib/core/log_tools/* /chalicelib/core/metadata.py /chalicelib/core/mobile.py /chalicelib/core/saved_search.py /chalicelib/core/sessions/sessions.py /chalicelib/core/sessions/sessions_ch.py +/chalicelib/core/sessions/sessions_devtool.py +/chalicelib/core/sessions/sessions_favorite.py /chalicelib/core/sessions/sessions_assignments.py /chalicelib/core/sessions/sessions_metas.py /chalicelib/core/sessions/sessions_mobs.py +/chalicelib/core/sessions/sessions_replay.py /chalicelib/core/sessions/performance_event.py +/chalicelib/core/sessions/sessions_viewed.py /chalicelib/core/sessions/unprocessed_sessions.py /chalicelib/core/significance.py /chalicelib/core/socket_ios.py @@ -276,3 +284,8 @@ Pipfile.lock /chalicelib/core/alerts/alerts_processor_ch.py /chalicelib/core/alerts/alerts_listener.py /chalicelib/core/alerts/modules/helpers.py +/chalicelib/core/errors/modules/* +/chalicelib/core/errors/errors.py +/chalicelib/core/errors/errors_ch.py +/chalicelib/core/errors/errors_favorite.py +/chalicelib/core/errors/errors_viewed.py diff --git a/ee/api/chalicelib/core/__init__.py b/ee/api/chalicelib/core/__init__.py index 3af66a9c1..f18c3e969 100644 --- a/ee/api/chalicelib/core/__init__.py +++ b/ee/api/chalicelib/core/__init__.py @@ -3,31 +3,9 @@ import logging from decouple import config logger = logging.getLogger(__name__) -from . import custom_metrics as custom_metrics_legacy -from . import custom_metrics_ee as custom_metrics -from . import metrics_ch as metrics -from . import metrics as metrics_legacy + if config("EXP_AUTOCOMPLETE", cast=bool, default=False): logger.info(">>> Using experimental autocomplete") else: from . import autocomplete as autocomplete - -if config("EXP_ERRORS_SEARCH", cast=bool, default=False): - logger.info(">>> Using experimental error search") - from . import errors as errors_legacy - from . import errors_exp as errors - - if config("EXP_ERRORS_GET", cast=bool, default=False): - logger.info(">>> Using experimental error get") -else: - from . import errors as errors - -if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False): - logger.info(">>> Using experimental sessions search for metrics") - -if config("EXP_FUNNELS", cast=bool, default=False): - logger.info(">>> Using experimental funnels") - from . import significance_exp as significance -else: - from . import significance as significance diff --git a/ee/api/chalicelib/core/boarding.py b/ee/api/chalicelib/core/boarding.py deleted file mode 100644 index 8a2076b58..000000000 --- a/ee/api/chalicelib/core/boarding.py +++ /dev/null @@ -1,119 +0,0 @@ -from chalicelib.utils import pg_client -from chalicelib.core import log_tool_datadog, log_tool_stackdriver, log_tool_sentry - -from chalicelib.core import users -from chalicelib.core import projects - - -def get_state(tenant_id): - pids = projects.get_projects_ids(tenant_id=tenant_id) - with pg_client.PostgresClient() as cur: - recorded = False - meta = False - - if len(pids) > 0: - cur.execute( - cur.mogrify("""SELECT EXISTS(( SELECT 1 - FROM public.sessions AS s - WHERE s.project_id IN %(ids)s)) AS exists;""", - {"ids": tuple(pids)}) - ) - recorded = cur.fetchone()["exists"] - meta = False - if recorded: - cur.execute( - cur.mogrify("""SELECT EXISTS((SELECT 1 - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 1 - FROM public.sessions - WHERE sessions.project_id = p.project_id - AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON (TRUE) - WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL - AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL - OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL - OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL - OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL - OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL - OR p.metadata_10 IS NOT NULL ) - )) AS exists;""" - , {"tenant_id": tenant_id})) - - meta = cur.fetchone()["exists"] - - return [ - {"task": "Install OpenReplay", - "done": recorded, - "URL": "https://docs.openreplay.com/getting-started/quick-start"}, - {"task": "Identify Users", - "done": meta, - "URL": "https://docs.openreplay.com/data-privacy-security/metadata"}, - {"task": "Invite Team Members", - "done": len(users.get_members(tenant_id=tenant_id)) > 1, - "URL": "https://app.openreplay.com/client/manage-users"}, - {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, - "URL": "https://docs.openreplay.com/integrations"} - ] - - -def get_state_installing(tenant_id): - pids = projects.get_projects_ids(tenant_id=tenant_id) - with pg_client.PostgresClient() as cur: - recorded = False - - if len(pids) > 0: - cur.execute( - cur.mogrify("""SELECT EXISTS(( SELECT 1 - FROM public.sessions AS s - WHERE s.project_id IN %(ids)s)) AS exists;""", - {"ids": tuple(pids)}) - ) - recorded = cur.fetchone()["exists"] - - return {"task": "Install OpenReplay", - "done": recorded, - "URL": "https://docs.openreplay.com/getting-started/quick-start"} - - -def get_state_identify_users(tenant_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""SELECT EXISTS((SELECT 1 - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 1 - FROM public.sessions - WHERE sessions.project_id = p.project_id - AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON (TRUE) - WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL - AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL - OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL - OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL - OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL - OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL - OR p.metadata_10 IS NOT NULL ) - )) AS exists;""" - , {"tenant_id": tenant_id})) - - meta = cur.fetchone()["exists"] - - return {"task": "Identify Users", - "done": meta, - "URL": "https://docs.openreplay.com/data-privacy-security/metadata"} - - -def get_state_manage_users(tenant_id): - return {"task": "Invite Team Members", - "done": len(users.get_members(tenant_id=tenant_id)) > 1, - "URL": "https://app.openreplay.com/client/manage-users"} - - -def get_state_integrations(tenant_id): - return {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, - "URL": "https://docs.openreplay.com/integrations"} diff --git a/ee/api/chalicelib/core/custom_metrics_ee.py b/ee/api/chalicelib/core/custom_metrics_ee.py deleted file mode 100644 index dcfadfb0f..000000000 --- a/ee/api/chalicelib/core/custom_metrics_ee.py +++ /dev/null @@ -1,236 +0,0 @@ -import json -import logging - -from decouple import config -from fastapi import HTTPException, status -from .custom_metrics import * -import schemas -from chalicelib.core import funnels, issues, heatmaps, sessions_mobs, sessions_favorite, \ - product_analytics, custom_metrics_predefined -from chalicelib.utils import helper, pg_client -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.storage import extra - -# TODO: fix this import -from . import errors as errors -# if config("EXP_ERRORS_SEARCH", cast=bool, default=False): -# logging.info(">>> Using experimental error search") -# from . import errors_exp as errors -# else: -# from . import errors as errors - -if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False): - from chalicelib.core import sessions -else: - from chalicelib.core import sessions_legacy as sessions - -logger = logging.getLogger(__name__) - - -# TODO: refactor this to split -# timeseries / -# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs -# remove "table of" calls from this function -def __try_live(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, - view_type=data.view_type, metric_type=data.metric_type, - metric_of=data.metric_of, metric_value=data.metric_value)) - - return results - - -def __get_table_of_series(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, - metric_of=data.metric_of, metric_value=data.metric_value, - metric_format=data.metric_format)) - - return results - - -def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): - if len(data.series) == 0: - return { - "total": 0, - "errors": [] - } - return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id) - - -def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): - if len(data.series) == 0: - logger.debug("empty series") - return { - "total": 0, - "sessions": [] - } - return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id) - - -def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): - return None - results = [] - for s in data.series: - results.append({"seriesId": s.series_id, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results - - -def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema): - results = [] - if len(data.series) == 0: - return results - for s in data.series: - if len(data.filters) > 0: - s.filter.filters += data.filters - s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True)) - - results.append({"seriesId": None, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results - - -def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False): - with pg_client.PostgresClient() as cur: - session_data = None - if data.metric_type == schemas.MetricType.HEAT_MAP: - if data.session_id is not None: - session_data = {"sessionId": data.session_id} - else: - session_data = __get_heat_map_chart(project=project, user_id=user_id, - data=data, include_mobs=False) - if session_data is not None: - session_data = {"sessionId": session_data["sessionId"]} - - if session_data is not None: - # for EE only - keys = sessions_mobs. \ - __get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"]) - keys += sessions_mobs. \ - __get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions - tag = config('RETENTION_L_VALUE', default='vault') - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") - logger.error(str(e)) - - _data = {"session_data": json.dumps(session_data) if session_data is not None else None} - for i, s in enumerate(data.series): - for k in s.model_dump().keys(): - _data[f"{k}_{i}"] = s.__getattribute__(k) - _data[f"index_{i}"] = i - _data[f"filter_{i}"] = s.filter.json() - series_len = len(data.series) - params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data, - "default_config": json.dumps(data.default_config.model_dump()), "card_info": None} - if data.metric_type == schemas.MetricType.PATH_ANALYSIS: - params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) - - query = """INSERT INTO metrics (project_id, user_id, name, is_public, - view_type, metric_type, metric_of, metric_value, - metric_format, default_config, thumbnail, data, - card_info) - VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, - %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, - %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, - %(card_info)s) - RETURNING metric_id""" - if len(data.series) > 0: - query = f"""WITH m AS ({query}) - INSERT INTO metric_series(metric_id, index, name, filter) - VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)" - for i in range(series_len)])} - RETURNING metric_id;""" - - query = cur.mogrify(query, params) - cur.execute(query) - r = cur.fetchone() - if dashboard: - return r["metric_id"] - return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)} - - -def delete_card(project_id, metric_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.metrics - SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) - WHERE project_id = %(project_id)s - AND metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING data;""", - {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) - ) - # for EE only - row = cur.fetchone() - if row: - if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]): - keys = sessions_mobs. \ - __get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"]) - keys += sessions_mobs. \ - __get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions - tag = config('RETENTION_D_VALUE', default='default') - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") - logger.error(str(e)) - return {"state": "success"} - - -def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, - data: schemas.CardSessionsSchema - # , range_value=None, start_date=None, end_date=None - ): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - # if metric is None: - # return None - if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): - return None - for s in data.series: - s.filter.startTimestamp = data.startTimestamp - s.filter.endTimestamp = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page - issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) - issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", []) - issue = None - for i in issues_list: - if i.get("issueId", "") == issue_id: - issue = i - break - if issue is None: - issue = issues.get(project_id=project_id, issue_id=issue_id) - if issue is not None: - issue = {**issue, - "affectedSessions": 0, - "affectedUsers": 0, - "conversionImpact": 0, - "lostConversions": 0, - "unaffectedSessions": 0} - return {"seriesId": s.series_id, "seriesName": s.name, - "sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, - issue=issue, data=s.filter) - if issue is not None else {"total": 0, "sessions": []}, - "issue": issue} diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py deleted file mode 100644 index 145db55d1..000000000 --- a/ee/api/chalicelib/core/errors.py +++ /dev/null @@ -1,609 +0,0 @@ -import json - -from decouple import config - -import schemas -from chalicelib.core import sourcemaps -from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.metrics_helper import __get_step_size - -if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from chalicelib.core import sessions_legacy as sessions -else: - from chalicelib.core import sessions - - -def get(error_id, family=False): - if family: - return get_batch([error_id]) - with pg_client.PostgresClient() as cur: - # trying: return only 1 error, without event details - query = cur.mogrify( - # "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;", - "SELECT * FROM public.errors WHERE error_id = %(error_id)s LIMIT 1;", - {"error_id": error_id}) - cur.execute(query=query) - result = cur.fetchone() - if result is not None: - result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"]) - return helper.dict_to_camel_case(result) - - -def get_batch(error_ids): - if len(error_ids) == 0: - return [] - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """ - WITH RECURSIVE error_family AS ( - SELECT * - FROM public.errors - WHERE error_id IN %(error_ids)s - UNION - SELECT child_errors.* - FROM public.errors AS child_errors - INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id - ) - SELECT * - FROM error_family;""", - {"error_ids": tuple(error_ids)}) - cur.execute(query=query) - errors = cur.fetchall() - for e in errors: - e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) - return helper.list_to_camel_case(errors) - - -def __flatten_sort_key_count_version(data, merge_nested=False): - if data is None: - return [] - return sorted( - [ - { - "name": f'{o["name"]}@{v["version"]}', - "count": v["count"] - } for o in data for v in o["partition"] - ], - key=lambda o: o["count"], reverse=True) if merge_nested else \ - [ - { - "name": o["name"], - "count": o["count"], - } for o in data - ] - - -def __process_tags(row): - return [ - {"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))}, - {"name": "browser.ver", - "partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)}, - {"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))}, - {"name": "OS.ver", - "partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)}, - {"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))}, - {"name": "device", - "partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)}, - {"name": "country", "partitions": row.pop("country_partition")} - ] - - -def get_details(project_id, error_id, user_id, **data): - pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") - pg_sub_query24.append("error_id = %(error_id)s") - pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, - startTime_arg_name="startDate30", - endTime_arg_name="endDate30", project_key="sessions.project_id") - pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s") - pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s") - pg_sub_query30_session.append("error_id = %(error_id)s") - pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", - endTime_arg_name="endDate30", project_key="errors.project_id") - pg_sub_query30_err.append("sessions.project_id = %(project_id)s") - pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s") - pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s") - pg_sub_query30_err.append("error_id = %(error_id)s") - pg_sub_query30_err.append("source ='js_exception'") - pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30") - pg_sub_query30.append("error_id = %(error_id)s") - pg_basic_query = __get_basic_constraints(time_constraint=False) - pg_basic_query.append("error_id = %(error_id)s") - with pg_client.PostgresClient() as cur: - data["startDate24"] = TimeUTC.now(-1) - data["endDate24"] = TimeUTC.now() - data["startDate30"] = TimeUTC.now(-30) - data["endDate30"] = TimeUTC.now() - density24 = int(data.get("density24", 24)) - step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1) - density30 = int(data.get("density30", 30)) - step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1) - params = { - "startDate24": data['startDate24'], - "endDate24": data['endDate24'], - "startDate30": data['startDate30'], - "endDate30": data['endDate30'], - "project_id": project_id, - "userId": user_id, - "step_size24": step_size24, - "step_size30": step_size30, - "error_id": error_id} - - main_pg_query = f"""\ - SELECT error_id, - name, - message, - users, - sessions, - last_occurrence, - first_occurrence, - last_session_id, - browsers_partition, - os_partition, - device_partition, - country_partition, - chart24, - chart30, - custom_tags - FROM (SELECT error_id, - name, - message, - COUNT(DISTINCT user_id) AS users, - COUNT(DISTINCT session_id) AS sessions - FROM public.errors - INNER JOIN events.errors AS s_errors USING (error_id) - INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_err)} - GROUP BY error_id, name, message) AS details - INNER JOIN (SELECT MAX(timestamp) AS last_occurrence, - MIN(timestamp) AS first_occurrence - FROM events.errors - WHERE error_id = %(error_id)s) AS time_details ON (TRUE) - INNER JOIN (SELECT session_id AS last_session_id, - coalesce(custom_tags, '[]')::jsonb AS custom_tags - FROM events.errors - LEFT JOIN LATERAL ( - SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags - FROM errors_tags - WHERE errors_tags.error_id = %(error_id)s - AND errors_tags.session_id = errors.session_id - AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE) - WHERE error_id = %(error_id)s - ORDER BY errors.timestamp DESC - LIMIT 1) AS last_session_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition - FROM (SELECT * - FROM (SELECT user_browser AS name, - COUNT(session_id) AS count - FROM events.errors - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_browser - ORDER BY count DESC) AS count_per_browser_query - INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition - FROM (SELECT user_browser_version AS version, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_browser = count_per_browser_query.name - GROUP BY user_browser_version - ORDER BY count DESC) AS version_details - ) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition - FROM (SELECT * - FROM (SELECT user_os AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_os - ORDER BY count DESC) AS count_per_os_details - INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition - FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_os = count_per_os_details.name - GROUP BY user_os_version - ORDER BY count DESC) AS count_per_version_details - GROUP BY count_per_os_details.name ) AS os_version_details - ON (TRUE)) AS os_details) AS os_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition - FROM (SELECT * - FROM (SELECT user_device_type AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_device_type - ORDER BY count DESC) AS count_per_device_details - INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition - FROM (SELECT CASE - WHEN user_device = '' OR user_device ISNULL - THEN 'unknown' - ELSE user_device END AS version, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_device_type = count_per_device_details.name - GROUP BY user_device - ORDER BY count DESC) AS count_per_device_v_details - GROUP BY count_per_device_details.name ) AS device_version_details - ON (TRUE)) AS device_details) AS device_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition - FROM (SELECT user_country AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_country - ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24 - FROM (SELECT generated_timestamp AS timestamp, - COUNT(session_id) AS count - FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp - LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors - INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query24)} - ) AS chart_details ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE) - INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30 - FROM (SELECT generated_timestamp AS timestamp, - COUNT(session_id) AS count - FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp - LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30)}) AS chart_details - ON (TRUE) - GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); - """ - - # print("--------------------") - # print(cur.mogrify(main_pg_query, params)) - # print("--------------------") - cur.execute(cur.mogrify(main_pg_query, params)) - row = cur.fetchone() - if row is None: - return {"errors": ["error not found"]} - row["tags"] = __process_tags(row) - - query = cur.mogrify( - f"""SELECT error_id, status, session_id, start_ts, - parent_error_id,session_id, user_anonymous_id, - user_id, user_uuid, user_browser, user_browser_version, - user_os, user_os_version, user_device, payload, - FALSE AS favorite, - True AS viewed - FROM public.errors AS pe - INNER JOIN events.errors AS ee USING (error_id) - INNER JOIN public.sessions USING (session_id) - WHERE pe.project_id = %(project_id)s - AND error_id = %(error_id)s - ORDER BY start_ts DESC - LIMIT 1;""", - {"project_id": project_id, "error_id": error_id, "user_id": user_id}) - cur.execute(query=query) - status = cur.fetchone() - - if status is not None: - row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack") - row["status"] = status.pop("status") - row["parent_error_id"] = status.pop("parent_error_id") - row["favorite"] = status.pop("favorite") - row["viewed"] = status.pop("viewed") - row["last_hydrated_session"] = status - else: - row["stack"] = [] - row["last_hydrated_session"] = None - row["status"] = "untracked" - row["parent_error_id"] = None - row["favorite"] = False - row["viewed"] = False - return {"data": helper.dict_to_camel_case(row)} - - -def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", - endTime_arg_name="endDate", chart=False, step_size_name="step_size", - project_key="project_id"): - if project_key is None: - ch_sub_query = [] - else: - ch_sub_query = [f"{project_key} =%(project_id)s"] - if time_constraint: - ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", - f"timestamp < %({endTime_arg_name})s"] - if chart: - ch_sub_query += [f"timestamp >= generated_timestamp", - f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == schemas.PlatformType.MOBILE: - ch_sub_query.append("user_device_type = 'mobile'") - elif platform == schemas.PlatformType.DESKTOP: - ch_sub_query.append("user_device_type = 'desktop'") - return ch_sub_query - - -def __get_sort_key(key): - return { - schemas.ErrorSort.OCCURRENCE: "max_datetime", - schemas.ErrorSort.USERS_COUNT: "users", - schemas.ErrorSort.SESSIONS_COUNT: "sessions" - }.get(key, 'max_datetime') - - -def search(data: schemas.SearchErrorsSchema, project_id, user_id): - empty_response = { - 'total': 0, - 'errors': [] - } - - platform = None - for f in data.filters: - if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0: - platform = f.value[0] - pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") - pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", - "pe.project_id=%(project_id)s"] - # To ignore Script error - pg_sub_query.append("pe.message!='Script error.'") - pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None) - if platform: - pg_sub_query_chart += ["start_ts>=%(startDate)s", "start_ts<%(endDate)s", "project_id=%(project_id)s"] - pg_sub_query_chart.append("errors.error_id =details.error_id") - statuses = [] - error_ids = None - if data.startTimestamp is None: - data.startTimestamp = TimeUTC.now(-30) - if data.endTimestamp is None: - data.endTimestamp = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0: - print("-- searching for sessions before errors") - statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=data.status) - if len(statuses) == 0: - return empty_response - error_ids = [e["errorId"] for e in statuses] - with pg_client.PostgresClient() as cur: - step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1) - sort = __get_sort_key('datetime') - if data.sort is not None: - sort = __get_sort_key(data.sort) - order = schemas.SortOrderType.DESC - if data.order is not None: - order = data.order - extra_join = "" - - params = { - "startDate": data.startTimestamp, - "endDate": data.endTimestamp, - "project_id": project_id, - "userId": user_id, - "step_size": step_size} - if data.status != schemas.ErrorStatus.ALL: - pg_sub_query.append("status = %(error_status)s") - params["error_status"] = data.status - if data.limit is not None and data.page is not None: - params["errors_offset"] = (data.page - 1) * data.limit - params["errors_limit"] = data.limit - else: - params["errors_offset"] = 0 - params["errors_limit"] = 200 - - if error_ids is not None: - params["error_ids"] = tuple(error_ids) - pg_sub_query.append("error_id IN %(error_ids)s") - # if data.bookmarked: - # pg_sub_query.append("ufe.user_id = %(userId)s") - # extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - if data.query is not None and len(data.query) > 0: - pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") - params["error_query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator.CONTAINS) - - main_pg_query = f"""SELECT full_count, - error_id, - name, - message, - users, - sessions, - last_occurrence, - first_occurrence, - chart - FROM (SELECT COUNT(details) OVER () AS full_count, details.* - FROM (SELECT error_id, - name, - message, - COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users, - COUNT(DISTINCT session_id) AS sessions, - MAX(timestamp) AS max_datetime, - MIN(timestamp) AS min_datetime - FROM events.errors - INNER JOIN public.errors AS pe USING (error_id) - INNER JOIN public.sessions USING (session_id) - {extra_join} - WHERE {" AND ".join(pg_sub_query)} - GROUP BY error_id, name, message - ORDER BY {sort} {order}) AS details - LIMIT %(errors_limit)s OFFSET %(errors_offset)s - ) AS details - INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, - MIN(timestamp) AS first_occurrence - FROM events.errors - WHERE errors.error_id = details.error_id) AS time_details ON (TRUE) - INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart - FROM (SELECT generated_timestamp AS timestamp, - COUNT(session_id) AS count - FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors - {"INNER JOIN public.sessions USING(session_id)" if platform else ""} - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS sessions ON (TRUE) - GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);""" - - # print("--------------------") - # print(cur.mogrify(main_pg_query, params)) - # print("--------------------") - - cur.execute(cur.mogrify(main_pg_query, params)) - rows = cur.fetchall() - total = 0 if len(rows) == 0 else rows[0]["full_count"] - - if total == 0: - rows = [] - else: - if len(statuses) == 0: - query = cur.mogrify( - """SELECT error_id, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE errors.error_id = ve.error_id - AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed - FROM public.errors - WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", - {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), - "user_id": user_id}) - cur.execute(query=query) - statuses = helper.list_to_camel_case(cur.fetchall()) - statuses = { - s["errorId"]: s for s in statuses - } - - for r in rows: - r.pop("full_count") - if r["error_id"] in statuses: - r["viewed"] = statuses[r["error_id"]]["viewed"] - else: - r["viewed"] = False - - return { - 'total': total, - 'errors': helper.list_to_camel_case(rows) - } - - -def __save_stacktrace(error_id, data): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """UPDATE public.errors - SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now()) - WHERE error_id = %(error_id)s;""", - {"error_id": error_id, "data": json.dumps(data)}) - cur.execute(query=query) - - -def get_trace(project_id, error_id): - error = get(error_id=error_id, family=False) - if error is None: - return {"errors": ["error not found"]} - if error.get("source", "") != "js_exception": - return {"errors": ["this source of errors doesn't have a sourcemap"]} - if error.get("payload") is None: - return {"errors": ["null payload"]} - if error.get("stacktrace") is not None: - return {"sourcemapUploaded": True, - "trace": error.get("stacktrace"), - "preparsed": True} - trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"]) - if all_exists: - __save_stacktrace(error_id=error_id, data=trace) - return {"sourcemapUploaded": all_exists, - "trace": trace, - "preparsed": False} - - -def get_sessions(start_date, end_date, project_id, user_id, error_id): - extra_constraints = ["s.project_id = %(project_id)s", - "s.start_ts >= %(startDate)s", - "s.start_ts <= %(endDate)s", - "e.error_id = %(error_id)s"] - if start_date is None: - start_date = TimeUTC.now(-7) - if end_date is None: - end_date = TimeUTC.now() - - params = { - "startDate": start_date, - "endDate": end_date, - "project_id": project_id, - "userId": user_id, - "error_id": error_id} - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_agent, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - s.issue_types, - COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite, - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id) - WHERE {" AND ".join(extra_constraints)} - ORDER BY s.start_ts DESC;""", - params) - cur.execute(query=query) - sessions_list = [] - total = cur.rowcount - row = cur.fetchone() - while row is not None and len(sessions_list) < 100: - sessions_list.append(row) - row = cur.fetchone() - - return { - 'total': total, - 'sessions': helper.list_to_camel_case(sessions_list) - } - - -ACTION_STATE = { - "unsolve": 'unresolved', - "solve": 'resolved', - "ignore": 'ignored' -} - - -def change_state(project_id, user_id, error_id, action): - errors = get(error_id, family=True) - print(len(errors)) - status = ACTION_STATE.get(action) - if errors is None or len(errors) == 0: - return {"errors": ["error not found"]} - if errors[0]["status"] == status: - return {"errors": [f"error is already {status}"]} - - if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]: - return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]} - - params = { - "userId": user_id, - "error_ids": tuple([e["errorId"] for e in errors]), - "status": status} - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """UPDATE public.errors - SET status = %(status)s - WHERE error_id IN %(error_ids)s - RETURNING status""", - params) - cur.execute(query=query) - row = cur.fetchone() - if row is not None: - for e in errors: - e["status"] = row["status"] - return {"data": errors} diff --git a/ee/api/chalicelib/core/errors/__init__.py b/ee/api/chalicelib/core/errors/__init__.py new file mode 100644 index 000000000..621bf311f --- /dev/null +++ b/ee/api/chalicelib/core/errors/__init__.py @@ -0,0 +1,14 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +if config("EXP_ERRORS_SEARCH", cast=bool, default=False): + logger.info(">>> Using experimental error search") + from . import errors as errors_legacy + from . import errors_ch as errors +else: + from . import errors + +from . import errors_viewed_ee as errors_viewed diff --git a/ee/api/chalicelib/core/errors_viewed_exp.py b/ee/api/chalicelib/core/errors/errors_viewed_ee.py similarity index 70% rename from ee/api/chalicelib/core/errors_viewed_exp.py rename to ee/api/chalicelib/core/errors/errors_viewed_ee.py index 7a2a6ddc5..fb0ecf5c8 100644 --- a/ee/api/chalicelib/core/errors_viewed_exp.py +++ b/ee/api/chalicelib/core/errors/errors_viewed_ee.py @@ -1,13 +1,14 @@ import logging -from decouple import config - +from chalicelib.core.errors.errors_viewed import * from chalicelib.utils import ch_client, exp_ch_helper -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) +_add_viewed_error = add_viewed_error +logger = logging.getLogger(__name__) def add_viewed_error(project_id, user_id, error_id): + _add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id) with ch_client.ClickHouseClient() as cur: query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_errors_table()}(project_id,user_id, error_id) VALUES (%(project_id)s,%(userId)s,%(error_id)s);""" diff --git a/ee/api/chalicelib/core/errors_viewed.py b/ee/api/chalicelib/core/errors_viewed.py deleted file mode 100644 index f66e10d90..000000000 --- a/ee/api/chalicelib/core/errors_viewed.py +++ /dev/null @@ -1,39 +0,0 @@ -from chalicelib.utils import pg_client -from chalicelib.core import errors_viewed_exp - - -def add_viewed_error(project_id, user_id, error_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id) - VALUES (%(userId)s,%(error_id)s);""", - {"userId": user_id, "error_id": error_id}) - ) - errors_viewed_exp.add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id) - - -def viewed_error_exists(user_id, error_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """SELECT - errors.error_id AS hydrated, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE ve.error_id = %(error_id)s - AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - FROM public.errors - WHERE error_id = %(error_id)s""", - {"userId": user_id, "error_id": error_id}) - cur.execute( - query=query - ) - r = cur.fetchone() - if r: - return r.get("viewed") - return True - - -def viewed_error(project_id, user_id, error_id): - if viewed_error_exists(user_id=user_id, error_id=error_id): - return None - return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id) diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py deleted file mode 100644 index d397ca3bf..000000000 --- a/ee/api/chalicelib/core/events.py +++ /dev/null @@ -1,223 +0,0 @@ -from typing import Optional - -from decouple import config - -import schemas -from chalicelib.core import issues -from chalicelib.core.sessions import sessions_metas -from chalicelib.utils import pg_client, helper -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.event_filter_definition import SupportedFilter, Event - -if config("EXP_AUTOCOMPLETE", cast=bool, default=False): - from . import autocomplete_exp as autocomplete -else: - from . import autocomplete as autocomplete - - -def get_customs_by_session_id(session_id, project_id): - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""\ - SELECT - c.*, - 'CUSTOM' AS type - FROM events_common.customs AS c - WHERE - c.session_id = %(session_id)s - ORDER BY c.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows = cur.fetchall() - return helper.dict_to_camel_case(rows) - - -def __merge_cells(rows, start, count, replacement): - rows[start] = replacement - rows = rows[:start + 1] + rows[start + count:] - return rows - - -def __get_grouped_clickrage(rows, session_id, project_id): - click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id) - if len(click_rage_issues) == 0: - return rows - - for c in click_rage_issues: - merge_count = c.get("payload") - if merge_count is not None: - merge_count = merge_count.get("Count", 3) - else: - merge_count = 3 - for i in range(len(rows)): - if rows[i]["timestamp"] == c["timestamp"]: - rows = __merge_cells(rows=rows, - start=i, - count=merge_count, - replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count}) - break - return rows - - -def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None): - with pg_client.PostgresClient() as cur: - rows = [] - if event_type is None or event_type == schemas.EventType.CLICK: - cur.execute(cur.mogrify("""\ - SELECT - c.*, - 'CLICK' AS type - FROM events.clicks AS c - WHERE - c.session_id = %(session_id)s - ORDER BY c.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows += cur.fetchall() - if group_clickrage: - rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) - if event_type is None or event_type == schemas.EventType.INPUT: - cur.execute(cur.mogrify(""" - SELECT - i.*, - 'INPUT' AS type - FROM events.inputs AS i - WHERE - i.session_id = %(session_id)s - ORDER BY i.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows += cur.fetchall() - if event_type is None or event_type == schemas.EventType.LOCATION: - cur.execute(cur.mogrify("""\ - SELECT - l.*, - l.path AS value, - l.path AS url, - 'LOCATION' AS type - FROM events.pages AS l - WHERE - l.session_id = %(session_id)s - ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) - rows += cur.fetchall() - rows = helper.list_to_camel_case(rows) - rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"])) - return rows - - -def _search_tags(project_id, value, key=None, source=None): - with pg_client.PostgresClient() as cur: - query = f""" - SELECT public.tags.name - 'TAG' AS type - FROM public.tags - WHERE public.tags.project_id = %(project_id)s - ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC - LIMIT 10 - """ - query = cur.mogrify(query, {'project_id': project_id, 'value': value}) - cur.execute(query) - results = helper.list_to_camel_case(cur.fetchall()) - return results - - -class EventType: - CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label") - INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label") - LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path") - CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name") - REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path") - GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name") - STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name") - TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id") - ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors", - column=None) # column=None because errors are searched by name or message - METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None) - # MOBILE - CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label") - INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label") - VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name") - SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label") - CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name") - REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path") - CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes", - column=None) # column=None because errors are searched by name or message - - -SUPPORTED_TYPES = { - EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK), - query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)), - EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT), - query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)), - EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION), - query=autocomplete.__generic_query( - typename=EventType.LOCATION.ui_type)), - EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM), - query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)), - EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST), - query=autocomplete.__generic_query( - typename=EventType.REQUEST.ui_type)), - EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL), - query=autocomplete.__generic_query( - typename=EventType.GRAPHQL.ui_type)), - EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION), - query=autocomplete.__generic_query( - typename=EventType.STATEACTION.ui_type)), - EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None), - EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors, - query=None), - EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata, - query=None), - # IOS - EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.CLICK_MOBILE.ui_type)), - EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.INPUT_MOBILE.ui_type)), - EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.VIEW_MOBILE.ui_type)), - EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.CUSTOM_MOBILE.ui_type)), - EventType.REQUEST_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.REQUEST_MOBILE.ui_type)), - EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile, - query=None), -} - - -def get_errors_by_session_id(session_id, project_id): - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(f"""\ - SELECT er.*,ur.*, er.timestamp - s.start_ts AS time - FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) - WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s - ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id})) - errors = cur.fetchall() - for e in errors: - e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) - return helper.list_to_camel_case(errors) - - -def search(text, event_type, project_id, source, key): - if not event_type: - return {"data": autocomplete.__get_autocomplete_table(text, project_id)} - - if event_type in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) - # for MOBILE events autocomplete - # if event_type + "_MOBILE" in SUPPORTED_TYPES.keys(): - # rows += SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key,source=source) - elif event_type + "_MOBILE" in SUPPORTED_TYPES.keys(): - rows = SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source) - elif event_type in sessions_metas.SUPPORTED_TYPES.keys(): - return sessions_metas.search(text, event_type, project_id) - elif event_type.endswith("_MOBILE") \ - and event_type[:-len("_MOBILE")] in sessions_metas.SUPPORTED_TYPES.keys(): - return sessions_metas.search(text, event_type, project_id) - else: - return {"errors": ["unsupported event"]} - - return {"data": rows} diff --git a/ee/api/chalicelib/core/integrations_global.py b/ee/api/chalicelib/core/integrations_global.py deleted file mode 100644 index 3f0d96751..000000000 --- a/ee/api/chalicelib/core/integrations_global.py +++ /dev/null @@ -1,67 +0,0 @@ -import schemas -from chalicelib.utils import pg_client - - -def get_global_integrations_status(tenant_id, user_id, project_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""\ - SELECT EXISTS((SELECT 1 - FROM public.oauth_authentication - WHERE user_id = %(user_id)s - AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value}, - EXISTS((SELECT 1 - FROM public.jira_cloud - WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s - AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value}, - EXISTS((SELECT 1 - FROM public.webhooks - WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value}, - EXISTS((SELECT 1 - FROM public.webhooks - WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value}, - EXISTS((SELECT 1 - FROM public.integrations - WHERE project_id=%(project_id)s AND provider='dynatrace')) AS {schemas.IntegrationType.DYNATRACE.value};""", - {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) - ) - current_integrations = cur.fetchone() - result = [] - for k in current_integrations.keys(): - result.append({"name": k, "integrated": current_integrations[k]}) - return result diff --git a/ee/api/chalicelib/core/issue_tracking/modules/__init__.py b/ee/api/chalicelib/core/issue_tracking/modules/__init__.py deleted file mode 100644 index 266d4a821..000000000 --- a/ee/api/chalicelib/core/issue_tracking/modules/__init__.py +++ /dev/null @@ -1 +0,0 @@ -TENANT_CONDITION = "tenant_id=%(tenant_id)s" diff --git a/ee/api/chalicelib/core/metrics/__init__.py b/ee/api/chalicelib/core/metrics/__init__.py new file mode 100644 index 000000000..1846e5c73 --- /dev/null +++ b/ee/api/chalicelib/core/metrics/__init__.py @@ -0,0 +1,9 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) + +from chalicelib.core.metrics import heatmaps_ch as heatmaps +from chalicelib.core.metrics import metrics_ch as metrics +from chalicelib.core.metrics import custom_metrics_ee as custom_metrics diff --git a/ee/api/chalicelib/core/metrics/custom_metrics_ee.py b/ee/api/chalicelib/core/metrics/custom_metrics_ee.py new file mode 100644 index 000000000..a450fd736 --- /dev/null +++ b/ee/api/chalicelib/core/metrics/custom_metrics_ee.py @@ -0,0 +1,99 @@ +import json +import logging + +from decouple import config +from chalicelib.utils.storage import extra +from chalicelib.core.sessions import sessions_mobs, sessions_favorite +from .custom_metrics import * + + +def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False): + with pg_client.PostgresClient() as cur: + session_data = None + if data.metric_type == schemas.MetricType.HEAT_MAP: + if data.session_id is not None: + session_data = {"sessionId": data.session_id} + else: + session_data = __get_heat_map_chart(project=project, user_id=user_id, + data=data, include_mobs=False) + if session_data is not None: + session_data = {"sessionId": session_data["sessionId"]} + + if session_data is not None: + # for EE only + keys = sessions_mobs. \ + __get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"]) + keys += sessions_mobs. \ + __get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions + tag = config('RETENTION_L_VALUE', default='vault') + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") + logger.error(str(e)) + + _data = {"session_data": json.dumps(session_data) if session_data is not None else None} + for i, s in enumerate(data.series): + for k in s.model_dump().keys(): + _data[f"{k}_{i}"] = s.__getattribute__(k) + _data[f"index_{i}"] = i + _data[f"filter_{i}"] = s.filter.json() + series_len = len(data.series) + params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data, + "default_config": json.dumps(data.default_config.model_dump()), "card_info": None} + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: + params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) + + query = """INSERT INTO metrics (project_id, user_id, name, is_public, + view_type, metric_type, metric_of, metric_value, + metric_format, default_config, thumbnail, data, + card_info) + VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, + %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, + %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, + %(card_info)s) + RETURNING metric_id""" + if len(data.series) > 0: + query = f"""WITH m AS ({query}) + INSERT INTO metric_series(metric_id, index, name, filter) + VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)" + for i in range(series_len)])} + RETURNING metric_id;""" + + query = cur.mogrify(query, params) + cur.execute(query) + r = cur.fetchone() + if dashboard: + return r["metric_id"] + return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)} + + +def delete_card(project_id, metric_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.metrics + SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) + WHERE project_id = %(project_id)s + AND metric_id = %(metric_id)s + AND (user_id = %(user_id)s OR is_public) + RETURNING data;""", + {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) + ) + # for EE only + row = cur.fetchone() + if row: + if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]): + keys = sessions_mobs. \ + __get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"]) + keys += sessions_mobs. \ + __get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions + tag = config('RETENTION_D_VALUE', default='default') + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") + logger.error(str(e)) + return {"state": "success"} diff --git a/ee/api/chalicelib/core/log_tools/modules/__init__.py b/ee/api/chalicelib/core/modules/__init__.py similarity index 51% rename from ee/api/chalicelib/core/log_tools/modules/__init__.py rename to ee/api/chalicelib/core/modules/__init__.py index a9d0d44d9..d99c6d8b6 100644 --- a/ee/api/chalicelib/core/log_tools/modules/__init__.py +++ b/ee/api/chalicelib/core/modules/__init__.py @@ -1 +1,2 @@ TENANT_CONDITION = "tenant_id = %(tenant_id)s" +MOB_KEY="encode(file_key,'hex') AS file_key," \ No newline at end of file diff --git a/ee/api/chalicelib/core/product_analytics.py b/ee/api/chalicelib/core/product_analytics.py index d027c1da4..aa2016647 100644 --- a/ee/api/chalicelib/core/product_analytics.py +++ b/ee/api/chalicelib/core/product_analytics.py @@ -1,8 +1,8 @@ from typing import List import schemas -from chalicelib.core.metrics_ch import __get_basic_constraints, __get_meta_constraint -from chalicelib.core.metrics_ch import __get_constraint_values, __complete_missing_steps +from chalicelib.core.metrics.metrics_ch import __get_basic_constraints, __get_meta_constraint, __get_constraint_values, \ + __complete_missing_steps from chalicelib.utils import ch_client, exp_ch_helper from chalicelib.utils import helper, dev from chalicelib.utils.TimeUTC import TimeUTC diff --git a/ee/api/chalicelib/core/sessions/__init__.py b/ee/api/chalicelib/core/sessions/__init__.py index 0d26b2876..63a0db2b5 100644 --- a/ee/api/chalicelib/core/sessions/__init__.py +++ b/ee/api/chalicelib/core/sessions/__init__.py @@ -10,3 +10,7 @@ if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): from . import sessions_ch as sessions else: from . import sessions + +from chalicelib.core.sessions import sessions_devtool_ee as sessions_devtool +from chalicelib.core.sessions import sessions_viewed_ee as sessions_viewed +from chalicelib.core.sessions import sessions_favorite_ee as sessions_favorite diff --git a/ee/api/chalicelib/core/sessions/sessions_devtool.py b/ee/api/chalicelib/core/sessions/sessions_devtool.py deleted file mode 100644 index 6958eda78..000000000 --- a/ee/api/chalicelib/core/sessions/sessions_devtool.py +++ /dev/null @@ -1,39 +0,0 @@ -from decouple import config -from fastapi.security import SecurityScopes - -import schemas -from chalicelib.core import permissions -from chalicelib.utils.storage import StorageClient - -SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS]) - - -def __get_devtools_keys(project_id, session_id): - params = { - "sessionId": session_id, - "projectId": project_id - } - return [ - config("DEVTOOLS_MOB_PATTERN", default="%(sessionId)sdevtools") % params - ] - - -def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True): - if not permissions.check(security_scopes=SCOPES, context=context): - return [] - results = [] - for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k): - continue - results.append(StorageClient.get_presigned_url_for_sharing( - bucket=config("sessions_bucket"), - expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), - key=k - )) - return results - - -def delete_mobs(project_id, session_ids): - for session_id in session_ids: - for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k) diff --git a/ee/api/chalicelib/core/sessions/sessions_devtool_ee.py b/ee/api/chalicelib/core/sessions/sessions_devtool_ee.py new file mode 100644 index 000000000..e62177236 --- /dev/null +++ b/ee/api/chalicelib/core/sessions/sessions_devtool_ee.py @@ -0,0 +1,13 @@ +from fastapi.security import SecurityScopes + +from chalicelib.core import permissions +from chalicelib.core.sessions.sessions_devtool import * + +_get_urls = get_urls +SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS]) + + +def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True): + if not permissions.check(security_scopes=SCOPES, context=context): + return [] + return _get_urls(session_id=session_id, project_id=project_id, context=context, check_existence=check_existence) diff --git a/ee/api/chalicelib/core/sessions/sessions_favorite.py b/ee/api/chalicelib/core/sessions/sessions_favorite.py deleted file mode 100644 index 8f8b0e3f1..000000000 --- a/ee/api/chalicelib/core/sessions/sessions_favorite.py +++ /dev/null @@ -1,97 +0,0 @@ -import schemas -from chalicelib.core import sessions_favorite_exp, sessions_mobs, sessions_devtool -from chalicelib.utils import pg_client -from chalicelib.utils.storage import extra -from decouple import config - - -def add_favorite_session(context: schemas.CurrentContext, project_id, session_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""\ - INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(session_id)s) - RETURNING session_id;""", - {"userId": context.user_id, "session_id": session_id}) - ) - row = cur.fetchone() - if row: - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return {"data": {"sessionId": session_id}} - return {"errors": ["something went wrong"]} - - -def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""\ - DELETE FROM public.user_favorite_sessions - WHERE user_id = %(userId)s - AND session_id = %(session_id)s - RETURNING session_id;""", - {"userId": context.user_id, "session_id": session_id}) - ) - row = cur.fetchone() - if row: - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return {"data": {"sessionId": session_id}} - return {"errors": ["something went wrong"]} - - -def favorite_session(context: schemas.CurrentContext, project_id, session_id): - keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id) - keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions - keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id) - - if favorite_session_exists(user_id=context.user_id, session_id=session_id): - tag = config('RETENTION_D_VALUE', default='default') - - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - print(f"!!!Error while tagging: {k} to {tag} for removal") - print(str(e)) - - return remove_favorite_session(context=context, project_id=project_id, session_id=session_id) - - tag = config('RETENTION_L_VALUE', default='vault') - - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - print(f"!!!Error while tagging: {k} to {tag} for vault") - print(str(e)) - - return add_favorite_session(context=context, project_id=project_id, session_id=session_id) - - -def favorite_session_exists(session_id, user_id=None): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""SELECT session_id - FROM public.user_favorite_sessions - WHERE - session_id = %(session_id)s - {'AND user_id = %(userId)s' if user_id else ''};""", - {"userId": user_id, "session_id": session_id}) - ) - r = cur.fetchone() - return r is not None - - -def get_start_end_timestamp(project_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts - FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id) - WHERE - user_favorite_sessions.user_id = %(userId)s - AND project_id = %(project_id)s;""", - {"userId": user_id, "project_id": project_id}) - ) - r = cur.fetchone() - return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"]) diff --git a/ee/api/chalicelib/core/sessions/sessions_favorite_ee.py b/ee/api/chalicelib/core/sessions/sessions_favorite_ee.py new file mode 100644 index 000000000..5529a0d72 --- /dev/null +++ b/ee/api/chalicelib/core/sessions/sessions_favorite_ee.py @@ -0,0 +1,75 @@ +import logging + +from decouple import config + +from chalicelib.utils import ch_client, exp_ch_helper + +logger = logging.getLogger(__name__) +from chalicelib.core.sessions import sessions_mobs, sessions_devtool +from chalicelib.core.sessions.sessions_favorite import * +from chalicelib.utils.storage import extra + +_add_favorite_session = add_favorite_session +_remove_favorite_session = remove_favorite_session + + +def add_favorite_session(context: schemas.CurrentContext, project_id, session_id): + result = _add_favorite_session(context=context, project_id=project_id, session_id=session_id) + if "data" in result: + add_favorite_session_to_ch(project_id=project_id, user_id=context.user_id, + session_id=session_id) + return result + + +def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): + result = _remove_favorite_session(context=context, project_id=project_id, session_id=session_id) + if "data" in result: + remove_favorite_session_from_ch(project_id=project_id, user_id=context.user_id, + session_id=session_id) + return result + + +def favorite_session(context: schemas.CurrentContext, project_id, session_id): + keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id) + keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions + keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id) + + if favorite_session_exists(user_id=context.user_id, session_id=session_id): + tag = config('RETENTION_D_VALUE', default='default') + + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + print(f"!!!Error while tagging: {k} to {tag} for removal") + print(str(e)) + + return remove_favorite_session(context=context, project_id=project_id, session_id=session_id) + + tag = config('RETENTION_L_VALUE', default='vault') + + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + print(f"!!!Error while tagging: {k} to {tag} for vault") + print(str(e)) + + return add_favorite_session(context=context, project_id=project_id, session_id=session_id) + + +def add_favorite_session_to_ch(project_id, user_id, session_id, sign=1): + try: + with ch_client.ClickHouseClient() as cur: + query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign) + VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);""" + params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign} + cur.execute(query=query, params=params) + + except Exception as err: + logger.error("------- Exception while adding favorite session to CH") + logger.error(err) + + +def remove_favorite_session_from_ch(project_id, user_id, session_id): + add_favorite_session_to_ch(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1) diff --git a/ee/api/chalicelib/core/sessions/sessions_favorite_exp.py b/ee/api/chalicelib/core/sessions/sessions_favorite_exp.py deleted file mode 100644 index 6ee8654b0..000000000 --- a/ee/api/chalicelib/core/sessions/sessions_favorite_exp.py +++ /dev/null @@ -1,24 +0,0 @@ -import logging - -from decouple import config - -from chalicelib.utils import ch_client, exp_ch_helper - -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) - - -def add_favorite_session(project_id, user_id, session_id, sign=1): - try: - with ch_client.ClickHouseClient() as cur: - query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign) - VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);""" - params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign} - cur.execute(query=query, params=params) - - except Exception as err: - logging.error("------- Exception while adding favorite session to CH") - logging.error(err) - - -def remove_favorite_session(project_id, user_id, session_id): - add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1) diff --git a/ee/api/chalicelib/core/sessions/sessions_notes.py b/ee/api/chalicelib/core/sessions/sessions_notes.py index a3b2c9328..008cf1588 100644 --- a/ee/api/chalicelib/core/sessions/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions/sessions_notes.py @@ -4,8 +4,8 @@ from urllib.parse import urljoin from decouple import config import schemas -from chalicelib.core.collaboration_msteams import MSTeams -from chalicelib.core.collaboration_slack import Slack +from chalicelib.core.collaborations.collaboration_msteams import MSTeams +from chalicelib.core.collaborations.collaboration_slack import Slack from chalicelib.utils import pg_client, helper from chalicelib.utils import sql_helper as sh from chalicelib.utils.TimeUTC import TimeUTC diff --git a/ee/api/chalicelib/core/sessions/sessions_replay.py b/ee/api/chalicelib/core/sessions/sessions_replay.py deleted file mode 100644 index 0ba8bab39..000000000 --- a/ee/api/chalicelib/core/sessions/sessions_replay.py +++ /dev/null @@ -1,157 +0,0 @@ -import schemas -from chalicelib.core import events, metadata, events_mobile, \ - sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing -from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper - - -def __is_mobile_session(platform): - return platform in ('ios', 'android') - - -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_pre_replay(project_id, session_id): - return { - 'domURL': [sessions_mobs.get_first_url(project_id=project_id, session_id=session_id, check_existence=False)]} - - -# This function should not use Clickhouse because it doesn't have `file_key` -def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - encode(file_key,'hex') AS file_key, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if __is_mobile_session(data["platform"]): - data['mobsUrl'] = [] - data['videoURL'] = sessions_mobs.get_mobile_videos(session_id=session_id, project_id=project_id, - check_existence=False) - else: - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False) - # for EE - # context is required to check if the use have the right to access devtools - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context, check_existence=False) - data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) - if user_testing.has_test_signals(session_id=session_id, project_id=project_id): - data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id, - project_id=project_id, - check_existence=False) - else: - data['utxVideo'] = [] - - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, - check_existence=False) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - -def get_events(project_id, session_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""SELECT session_id, platform, start_ts, duration - FROM public.sessions AS s - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id} - ) - cur.execute(query=query) - - s_data = cur.fetchone() - if s_data is not None: - s_data = helper.dict_to_camel_case(s_data) - data = {} - if __is_mobile_session(s_data["platform"]): - data['events'] = events_mobile.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - elif e["type"].endswith("_MOBILE"): - e["type"] = e["type"][:-len("_MOBILE")] - data['crashes'] = events_mobile.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_mobile.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['userTesting'] = [] - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id) - - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['issues'] = reduce_issues(data['issues']) - return data - else: - return None - - -# To reduce the number of issues in the replay; -# will be removed once we agree on how to show issues -def reduce_issues(issues_list): - if issues_list is None: - return None - i = 0 - # remove same-type issues if the time between them is <2s - while i < len(issues_list) - 1: - for j in range(i + 1, len(issues_list)): - if issues_list[i]["type"] == issues_list[j]["type"]: - break - else: - i += 1 - break - - if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000: - issues_list.pop(j) - else: - i += 1 - - return issues_list diff --git a/ee/api/chalicelib/core/sessions/sessions_viewed.py b/ee/api/chalicelib/core/sessions/sessions_viewed.py deleted file mode 100644 index 59bb55c75..000000000 --- a/ee/api/chalicelib/core/sessions/sessions_viewed.py +++ /dev/null @@ -1,13 +0,0 @@ -from chalicelib.core import sessions_viewed_exp -from chalicelib.utils import pg_client - - -def view_session(project_id, user_id, session_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""INSERT INTO public.user_viewed_sessions (user_id, session_id) - VALUES (%(userId)s,%(sessionId)s) - ON CONFLICT DO NOTHING;""", - {"userId": user_id, "sessionId": session_id}) - ) - sessions_viewed_exp.view_session(project_id=project_id, user_id=user_id, session_id=session_id) diff --git a/ee/api/chalicelib/core/sessions/sessions_viewed_exp.py b/ee/api/chalicelib/core/sessions/sessions_viewed_ee.py similarity index 81% rename from ee/api/chalicelib/core/sessions/sessions_viewed_exp.py rename to ee/api/chalicelib/core/sessions/sessions_viewed_ee.py index 3b26612cb..cac0150bc 100644 --- a/ee/api/chalicelib/core/sessions/sessions_viewed_exp.py +++ b/ee/api/chalicelib/core/sessions/sessions_viewed_ee.py @@ -1,11 +1,15 @@ from chalicelib.utils import ch_client, exp_ch_helper import logging from decouple import config +from chalicelib.core.sessions.sessions_viewed import * + +_view_session = view_session logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) def view_session(project_id, user_id, session_id): + _view_session(project_id=project_id, user_id=user_id, session_id=session_id) try: with ch_client.ClickHouseClient() as cur: query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_sessions_table()}(project_id, user_id, session_id) diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index e4148863f..e4087bfa0 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -12,28 +12,35 @@ rm -rf ./chalicelib/core/authorizers.py rm -rf ./chalicelib/core/autocomplete rm -rf ./chalicelib/core/collaborations rm -rf ./chalicelib/core/countries.py -rm -rf ./chalicelib/core/metrics.py -rm -rf ./chalicelib/core/custom_metrics.py -rm -rf ./chalicelib/core/custom_metrics_predefined.py -rm -rf ./chalicelib/core/dashboards.py -rm -rf ./chalicelib/core/errors_favorite.py +rm -rf ./chalicelib/core/metrics/metrics.py +rm -rf ./chalicelib/core/metrics/custom_metrics.py +rm -rf ./chalicelib/core/metrics/custom_metrics_predefined.py +rm -rf ./chalicelib/core/metrics/funnels.py +rm -rf ./chalicelib/core/metrics/dashboards.py +rm -rf ./chalicelib/core/metrics/heatmaps.py +rm -rf ./chalicelib/core/metrics/heatmaps_ch.py +rm -rf ./chalicelib/core/metrics/metrics_ch.py +rm -rf ./chalicelib/core/events.py rm -rf ./chalicelib/core/events_mobile.py rm -rf ./chalicelib/core/feature_flags.py -rm -rf ./chalicelib/core/funnels.py -rm -rf ./chalicelib/core/issue_tracking/*.py +rm -rf ./chalicelib/core/issue_tracking rm -rf ./chalicelib/core/integrations_manager.py rm -rf ./chalicelib/core/issues.py rm -rf ./chalicelib/core/jobs.py -rm -rf ./chalicelib/core/log_tools/*.py +rm -rf ./chalicelib/core/log_tools rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/saved_search.py rm -rf ./chalicelib/core/sessions/sessions.py rm -rf ./chalicelib/core/sessions/sessions_ch.py +rm -rf ./chalicelib/core/sessions/sessions_devtool.py +rm -rf ./chalicelib/core/sessions/sessions_favorite.py rm -rf ./chalicelib/core/sessions/sessions_assignments.py rm -rf ./chalicelib/core/sessions/sessions_metas.py rm -rf ./chalicelib/core/sessions/sessions_mobs.py +rm -rf ./chalicelib/core/sessions/sessions_replay.py rm -rf ./chalicelib/core/sessions/performance_event.py +rm -rf ./chalicelib/core/sessions/sessions_viewed.py rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/socket_ios.py @@ -44,6 +51,7 @@ rm -rf ./chalicelib/core/tags.py rm -rf ./chalicelib/saml rm -rf ./chalicelib/utils/__init__.py rm -rf ./chalicelib/utils/args_transformer.py +rm -rf ./chalicelib/core/boarding.py rm -rf ./chalicelib/core/canvas.py rm -rf ./chalicelib/utils/captcha.py rm -rf ./chalicelib/utils/dev.py @@ -96,3 +104,8 @@ rm -rf ./chalicelib/core/alerts/alerts_processor.py rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py rm -rf ./chalicelib/core/alerts/alerts_listener.py rm -rf ./chalicelib/core/alerts/modules/helpers.py +rm -rf /chalicelib/core/errors/modules +rm -rf /chalicelib/core/errors/errors.py +rm -rf /chalicelib/core/errors/errors_ch.py +rm -rf /chalicelib/core/errors/errors_favorite.py +rm -rf /chalicelib/core/errors/errors_viewed.py diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 58ba666b5..3e53e7c92 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -8,7 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re import schemas from chalicelib.core import scope -from chalicelib.core import assist, heatmaps, errors, errors_viewed, errors_favorite, signup, feature_flags +from chalicelib.core import assist, signup, feature_flags +from chalicelib.core.errors import errors, errors_viewed, errors_favorite +from chalicelib.core.metrics import heatmaps from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \ sessions_viewed, unprocessed_sessions from chalicelib.core import tenants, users, projects, license diff --git a/ee/api/routers/subs/metrics.py b/ee/api/routers/subs/metrics.py index ba59488a4..2cf14c885 100644 --- a/ee/api/routers/subs/metrics.py +++ b/ee/api/routers/subs/metrics.py @@ -1,7 +1,7 @@ from typing import Union import schemas -from chalicelib.core import dashboards, custom_metrics +from chalicelib.core.metrics import dashboards, custom_metrics from fastapi import Body, Depends from or_dependencies import OR_context, OR_scope from routers.base import get_routers From 77d4c890cfd0f55a07963014ad2bb3732ecc3c98 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 12 Dec 2024 17:12:29 +0100 Subject: [PATCH 02/10] Dev (#2867) * fix(chalice): fixed CH funnels query for new driver * fix(chalice): fixed CH funnels support for nonexistent sequence --- .../core/metrics/modules/significance/significance_ch.py | 9 +++++---- api/chalicelib/utils/ch_client_exp.py | 1 - 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/api/chalicelib/core/metrics/modules/significance/significance_ch.py b/api/chalicelib/core/metrics/modules/significance/significance_ch.py index 0dae4b59a..d01c49da0 100644 --- a/api/chalicelib/core/metrics/modules/significance/significance_ch.py +++ b/api/chalicelib/core/metrics/modules/significance/significance_ch.py @@ -7,6 +7,8 @@ import schemas from chalicelib.utils import ch_client from chalicelib.utils import exp_ch_helper from chalicelib.utils import helper +from chalicelib.utils import sql_helper as sh +from chalicelib.core import events logger = logging.getLogger(__name__) @@ -208,7 +210,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas sequences = [] projections = [] for i, s in enumerate(n_stages_query): - projections.append(f"SUM(T{i + 1}) AS stage{i + 1}") + projections.append(f"coalesce(SUM(T{i + 1}),0) AS stage{i + 1}") if i == 0: sequences.append(f"anyIf(1,{s}) AS T1") else: @@ -226,11 +228,10 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas FROM (SELECT {",".join(sequences)} FROM {MAIN_EVENTS_TABLE} AS e {extra_from} WHERE {" AND ".join(constraints)} - GROUP BY {group_by}) AS raw; - """ + GROUP BY {group_by}) AS raw;""" with ch_client.ClickHouseClient() as cur: - query = cur.format(n_stages_query, full_args) + query = cur.format(query=n_stages_query, parameters=full_args) logger.debug("---------------------------------------------------") logger.debug(query) logger.debug("---------------------------------------------------") diff --git a/api/chalicelib/utils/ch_client_exp.py b/api/chalicelib/utils/ch_client_exp.py index 8bdb4c20b..dc3c06041 100644 --- a/api/chalicelib/utils/ch_client_exp.py +++ b/api/chalicelib/utils/ch_client_exp.py @@ -35,7 +35,6 @@ if config("CH_COMPRESSION", cast=bool, default=True): def transform_result(original_function): @wraps(original_function) def wrapper(*args, **kwargs): - logger.info("Executing query on CH") result = original_function(*args, **kwargs) if isinstance(result, clickhouse_connect.driver.query.QueryResult): column_names = result.column_names From 383bbee2dc1ad69e148eb4a6912bdb9aaeed71d4 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 12 Dec 2024 17:56:41 +0100 Subject: [PATCH 03/10] Dev (#2868) * refactor(chalice): refactored product analytics --- api/chalicelib/core/metrics/__init__.py | 2 + .../core/metrics/product_analytics_ch.py | 1362 +++++++++++++++++ ee/api/.gitignore | 3 + ee/api/chalicelib/core/metrics/__init__.py | 1 + ee/api/clean-dev.sh | 3 + 5 files changed, 1371 insertions(+) create mode 100644 api/chalicelib/core/metrics/product_analytics_ch.py diff --git a/api/chalicelib/core/metrics/__init__.py b/api/chalicelib/core/metrics/__init__.py index 4f297cedb..49f94d989 100644 --- a/api/chalicelib/core/metrics/__init__.py +++ b/api/chalicelib/core/metrics/__init__.py @@ -8,6 +8,8 @@ if config("EXP_METRICS", cast=bool, default=False): logger.info(">>> Using experimental metrics") from chalicelib.core.metrics import heatmaps_ch as heatmaps from chalicelib.core.metrics import metrics_ch as metrics + from chalicelib.core.metrics import product_analytics_ch as product_analytics else: from chalicelib.core.metrics import heatmaps from chalicelib.core.metrics import metrics + from chalicelib.core.metrics import product_analytics diff --git a/api/chalicelib/core/metrics/product_analytics_ch.py b/api/chalicelib/core/metrics/product_analytics_ch.py new file mode 100644 index 000000000..15df2dda2 --- /dev/null +++ b/api/chalicelib/core/metrics/product_analytics_ch.py @@ -0,0 +1,1362 @@ +from typing import List + +import schemas +from chalicelib.core.metrics.metrics_ch import __get_basic_constraints, __get_meta_constraint +from chalicelib.core.metrics.metrics_ch import __get_constraint_values, __complete_missing_steps +from chalicelib.utils import ch_client, exp_ch_helper +from chalicelib.utils import helper, dev +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils import sql_helper as sh +from chalicelib.core import metadata +from time import time + +import logging +from chalicelib.core.metrics.product_analytics import __transform_journey + +logger = logging.getLogger(__name__) + +JOURNEY_TYPES = { + schemas.ProductAnalyticsSelectedEventType.LOCATION: {"eventType": "LOCATION", "column": "url_path"}, + schemas.ProductAnalyticsSelectedEventType.CLICK: {"eventType": "CLICK", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.INPUT: {"eventType": "INPUT", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.CUSTOM_EVENT: {"eventType": "CUSTOM", "column": "name"} +} + + +# Q6: use events as a sub_query to support filter of materialized columns when doing a join +# query: Q5, the result is correct, +# startPoints are computed before ranked_events to reduce the number of window functions over rows +# replaced time_to_target by time_from_previous +# compute avg_time_from_previous at the same level as sessions_count +# sort by top 5 according to sessions_count at the CTE level +# final part project data without grouping +# if start-point is selected, the selected event is ranked n°1 +def path_analysis(project_id: int, data: schemas.CardPathAnalysis): + sub_events = [] + start_points_conditions = [] + step_0_conditions = [] + if len(data.metric_value) == 0: + data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.LOCATION) + sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["column"], + "eventType": schemas.ProductAnalyticsSelectedEventType.LOCATION.value}) + else: + for v in data.metric_value: + if JOURNEY_TYPES.get(v): + sub_events.append({"column": JOURNEY_TYPES[v]["column"], + "eventType": JOURNEY_TYPES[v]["eventType"]}) + if len(sub_events) == 1: + main_column = sub_events[0]['column'] + else: + main_column = f"multiIf(%s,%s)" % ( + ','.join([f"event_type='{s['eventType']}',{s['column']}" for s in sub_events[:-1]]), + sub_events[-1]["column"]) + extra_values = {} + reverse = data.start_type == "end" + for i, sf in enumerate(data.start_point): + f_k = f"start_point_{i}" + op = sh.get_sql_operator(sf.operator) + sf.value = helper.values_for_operator(value=sf.value, op=sf.operator) + is_not = sh.is_negation_operator(sf.operator) + event_column = JOURNEY_TYPES[sf.type]['column'] + event_type = JOURNEY_TYPES[sf.type]['eventType'] + extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k), + f"start_event_type_{i}": event_type} + start_points_conditions.append(f"(event_type=%(start_event_type_{i})s AND " + + sh.multi_conditions(f'{event_column} {op} %({f_k})s', sf.value, is_not=is_not, + value_key=f_k) + + ")") + step_0_conditions.append(f"(event_type=%(start_event_type_{i})s AND " + + sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not, + value_key=f_k) + + ")") + if len(start_points_conditions) > 0: + start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")", + "events.project_id = toUInt16(%(project_id)s)", + "events.datetime >= toDateTime(%(startTimestamp)s / 1000)", + "events.datetime < toDateTime(%(endTimestamp)s / 1000)"] + step_0_conditions = ["(" + " OR ".join(step_0_conditions) + ")", + "pre_ranked_events.event_number_in_session = 1"] + + exclusions = {} + for i, ef in enumerate(data.excludes): + if len(ef.value) == 0: + continue + if ef.type in data.metric_value: + f_k = f"exclude_{i}" + ef.value = helper.values_for_operator(value=ef.value, op=ef.operator) + op = sh.get_sql_operator(ef.operator) + op = sh.reverse_sql_operator(op) + extra_values = {**extra_values, **sh.multi_values(ef.value, value_key=f_k)} + exclusions[ef.type] = [ + sh.multi_conditions(f'{JOURNEY_TYPES[ef.type]["column"]} {op} %({f_k})s', ef.value, is_not=True, + value_key=f_k)] + + sessions_conditions = [] + meta_keys = None + for i, f in enumerate(data.series[0].filter.filters): + op = sh.get_sql_operator(f.operator) + is_any = sh.isAny_opreator(f.operator) + is_not = sh.is_negation_operator(f.operator) + is_undefined = sh.isUndefined_operator(f.operator) + f_k = f"f_value_{i}" + extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)} + + if not is_any and len(f.value) == 0: + continue + + # ---- meta-filters + if f.type == schemas.FilterType.USER_BROWSER: + if is_any: + sessions_conditions.append('isNotNull(user_browser)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.USER_OS]: + if is_any: + sessions_conditions.append('isNotNull(user_os)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.USER_DEVICE]: + if is_any: + sessions_conditions.append('isNotNull(user_device)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.USER_COUNTRY]: + if is_any: + sessions_conditions.append('isNotNull(user_country)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.USER_CITY: + if is_any: + sessions_conditions.append('isNotNull(user_city)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.USER_STATE: + if is_any: + sessions_conditions.append('isNotNull(user_state)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.UTM_SOURCE]: + if is_any: + sessions_conditions.append('isNotNull(utm_source)') + elif is_undefined: + sessions_conditions.append('isNull(utm_source)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.UTM_MEDIUM]: + if is_any: + sessions_conditions.append('isNotNull(utm_medium)') + elif is_undefined: + sessions_conditions.append('isNull(utm_medium)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.UTM_CAMPAIGN]: + if is_any: + sessions_conditions.append('isNotNull(utm_campaign)') + elif is_undefined: + sessions_conditions.append('isNull(utm_campaign)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.DURATION: + if len(f.value) > 0 and f.value[0] is not None: + sessions_conditions.append("duration >= %(minDuration)s") + extra_values["minDuration"] = f.value[0] + if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: + sessions_conditions.append("duration <= %(maxDuration)s") + extra_values["maxDuration"] = f.value[1] + elif f.type == schemas.FilterType.REFERRER: + # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + if is_any: + sessions_conditions.append('isNotNull(base_referrer)') + else: + sessions_conditions.append( + sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + elif f.type == schemas.FilterType.METADATA: + # get metadata list only if you need it + if meta_keys is None: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if f.source in meta_keys.keys(): + if is_any: + sessions_conditions.append(f"isNotNull({metadata.index_to_colname(meta_keys[f.source])})") + elif is_undefined: + sessions_conditions.append(f"isNull({metadata.index_to_colname(meta_keys[f.source])})") + else: + sessions_conditions.append( + sh.multi_conditions( + f"{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)", + f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: + if is_any: + sessions_conditions.append('isNotNull(user_id)') + elif is_undefined: + sessions_conditions.append('isNull(user_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"user_id {op} toString(%({f_k})s)", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: + if is_any: + sessions_conditions.append('isNotNull(user_anonymous_id)') + elif is_undefined: + sessions_conditions.append('isNull(user_anonymous_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: + if is_any: + sessions_conditions.append('isNotNull(rev_id)') + elif is_undefined: + sessions_conditions.append('isNull(rev_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.PLATFORM: + # op = __ sh.get_sql_operator(f.operator) + sessions_conditions.append( + sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.ISSUE: + if is_any: + sessions_conditions.append("array_length(issue_types, 1) > 0") + else: + sessions_conditions.append( + sh.multi_conditions(f"has(issue_types,%({f_k})s)", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.EVENTS_COUNT: + sessions_conditions.append( + sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + + if reverse: + path_direction = "DESC" + else: + path_direction = "" + + ch_sub_query = __get_basic_constraints(table_name="events") + selected_event_type_sub_query = [] + for s in data.metric_value: + selected_event_type_sub_query.append(f"events.event_type = '{JOURNEY_TYPES[s]['eventType']}'") + if s in exclusions: + selected_event_type_sub_query[-1] += " AND (" + " AND ".join(exclusions[s]) + ")" + selected_event_type_sub_query = " OR ".join(selected_event_type_sub_query) + ch_sub_query.append(f"({selected_event_type_sub_query})") + + main_events_table = exp_ch_helper.get_main_events_table(data.startTimestamp) + " AS events" + main_sessions_table = exp_ch_helper.get_main_sessions_table(data.startTimestamp) + " AS sessions" + if len(sessions_conditions) > 0: + sessions_conditions.append(f"sessions.project_id = toUInt16(%(project_id)s)") + sessions_conditions.append(f"sessions.datetime >= toDateTime(%(startTimestamp)s / 1000)") + sessions_conditions.append(f"sessions.datetime < toDateTime(%(endTimestamp)s / 1000)") + sessions_conditions.append("sessions.events_count>1") + sessions_conditions.append("sessions.duration>0") + + initial_sessions_cte = f"""sub_sessions AS (SELECT DISTINCT session_id + FROM {main_sessions_table} + WHERE {" AND ".join(sessions_conditions)}),""" + else: + initial_sessions_cte = "" + + if len(start_points_conditions) == 0: + step_0_subquery = """SELECT DISTINCT session_id + FROM (SELECT event_type, e_value + FROM pre_ranked_events + WHERE event_number_in_session = 1 + GROUP BY event_type, e_value + ORDER BY count(1) DESC + LIMIT 1) AS top_start_events + INNER JOIN pre_ranked_events + ON (top_start_events.event_type = pre_ranked_events.event_type AND + top_start_events.e_value = pre_ranked_events.e_value) + WHERE pre_ranked_events.event_number_in_session = 1""" + initial_event_cte = "" + else: + step_0_subquery = f"""SELECT DISTINCT session_id + FROM pre_ranked_events + WHERE {" AND ".join(step_0_conditions)}""" + initial_event_cte = f"""\ + initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp + FROM {main_events_table} {"INNER JOIN sub_sessions USING (session_id)" if len(sessions_conditions) > 0 else ""} + WHERE {" AND ".join(start_points_conditions)} + GROUP BY 1),""" + ch_sub_query.append("events.datetime>=initial_event.start_event_timestamp") + main_events_table += " INNER JOIN initial_event ON (events.session_id = initial_event.session_id)" + sessions_conditions = [] + + steps_query = ["""n1 AS (SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + AVG(time_from_previous) AS avg_time_from_previous, + COUNT(1) AS sessions_count + FROM ranked_events + WHERE event_number_in_session = 1 + AND isNotNull(next_value) + GROUP BY event_number_in_session, event_type, e_value, next_type, next_value + ORDER BY sessions_count DESC + LIMIT %(eventThresholdNumberInGroup)s)"""] + projection_query = ["""SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + sessions_count, + avg_time_from_previous + FROM n1"""] + for i in range(2, data.density + 1): + steps_query.append(f"""n{i} AS (SELECT * + FROM (SELECT re.event_number_in_session AS event_number_in_session, + re.event_type AS event_type, + re.e_value AS e_value, + re.next_type AS next_type, + re.next_value AS next_value, + AVG(re.time_from_previous) AS avg_time_from_previous, + COUNT(1) AS sessions_count + FROM n{i - 1} INNER JOIN ranked_events AS re + ON (n{i - 1}.next_value = re.e_value AND n{i - 1}.next_type = re.event_type) + WHERE re.event_number_in_session = {i} + GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value) AS sub_level + ORDER BY sessions_count DESC + LIMIT %(eventThresholdNumberInGroup)s)""") + projection_query.append(f"""SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + sessions_count, + avg_time_from_previous + FROM n{i}""") + + with ch_client.ClickHouseClient(database="experimental") as ch: + time_key = TimeUTC.now() + _now = time() + params = {"project_id": project_id, "startTimestamp": data.startTimestamp, + "endTimestamp": data.endTimestamp, "density": data.density, + "eventThresholdNumberInGroup": 4 if data.hide_excess else 8, + **extra_values} + + ch_query1 = f"""\ +CREATE TEMPORARY TABLE pre_ranked_events_{time_key} AS +WITH {initial_sessions_cte} + {initial_event_cte} + pre_ranked_events AS (SELECT * + FROM (SELECT session_id, + event_type, + datetime, + {main_column} AS e_value, + row_number() OVER (PARTITION BY session_id + ORDER BY datetime {path_direction}, + message_id {path_direction} ) AS event_number_in_session + FROM {main_events_table} {"INNER JOIN sub_sessions ON (sub_sessions.session_id = events.session_id)" if len(sessions_conditions) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + ) AS full_ranked_events + WHERE event_number_in_session <= %(density)s) +SELECT * +FROM pre_ranked_events;""" + logger.debug("---------Q1-----------") + ch.execute(query=ch_query1, params=params) + if time() - _now > 2: + logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") + logger.warning(ch.format(ch_query1, params)) + logger.warning("----------------------") + _now = time() + + ch_query2 = f"""\ +CREATE TEMPORARY TABLE ranked_events_{time_key} AS +WITH pre_ranked_events AS (SELECT * + FROM pre_ranked_events_{time_key}), + start_points AS ({step_0_subquery}), + ranked_events AS (SELECT pre_ranked_events.*, + leadInFrame(e_value) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction} + ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value, + leadInFrame(toNullable(event_type)) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction} + ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type, + abs(lagInFrame(toNullable(datetime)) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction} + ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) + - pre_ranked_events.datetime) AS time_from_previous + FROM start_points INNER JOIN pre_ranked_events USING (session_id)) +SELECT * +FROM ranked_events;""" + logger.debug("---------Q2-----------") + ch.execute(query=ch_query2, params=params) + if time() - _now > 2: + logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") + logger.warning(ch.format(ch_query2, params)) + logger.warning("----------------------") + _now = time() + + ch_query3 = f"""\ +WITH ranked_events AS (SELECT * + FROM ranked_events_{time_key}), + {",".join(steps_query)} +SELECT * +FROM ({" UNION ALL ".join(projection_query)}) AS chart_steps +ORDER BY event_number_in_session;""" + logger.debug("---------Q3-----------") + rows = ch.execute(query=ch_query3, params=params) + if time() - _now > 2: + logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") + logger.warning(ch.format(ch_query3, params)) + logger.warning("----------------------") + + return __transform_journey(rows=rows, reverse_path=reverse) + +# +# def __compute_weekly_percentage(rows): +# if rows is None or len(rows) == 0: +# return rows +# t = -1 +# for r in rows: +# if r["week"] == 0: +# t = r["usersCount"] +# r["percentage"] = r["usersCount"] / t +# return rows +# +# +# def __complete_retention(rows, start_date, end_date=None): +# if rows is None: +# return [] +# max_week = 10 +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# if i < len(rows) \ +# and i != rows[i]["week"]: +# rows.insert(i, neutral) +# elif i >= len(rows): +# rows.append(neutral) +# return rows +# +# +# def __complete_acquisition(rows, start_date, end_date=None): +# if rows is None: +# return [] +# max_week = 10 +# week = 0 +# delta_date = 0 +# while max_week > 0: +# start_date += TimeUTC.MS_WEEK +# if end_date is not None and start_date >= end_date: +# break +# delta = 0 +# if delta_date + week >= len(rows) \ +# or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# rows.insert(delta_date + week + i, neutral) +# delta = i +# else: +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# if delta_date + week + i < len(rows) \ +# and i != rows[delta_date + week + i]["week"]: +# rows.insert(delta_date + week + i, neutral) +# elif delta_date + week + i >= len(rows): +# rows.append(neutral) +# delta = i +# week += delta +# max_week -= 1 +# delta_date += 1 +# return rows +# +# +# def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, +# COUNT(all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT DISTINCT user_id +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# LIMIT 1)) +# ) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# ) AS all_connexions USING (user_id) +# GROUP BY connexion_week +# ORDER BY connexion_week;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, +# week, +# users_count, +# connected_users +# FROM ( +# SELECT first_connexion_week, +# toInt8((connexion_week - first_connexion_week) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(20)(all_connexions.user_id) AS connected_users +# FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# LIMIT 1)) +# GROUP BY user_id) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# WHERE first_connexion_week <= connexion_week +# GROUP BY first_connexion_week, week +# ORDER BY first_connexion_week, week +# ) AS full_data;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query% params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query += meta_condition +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT DISTINCT user_id +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) +# WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bsess.project_id = %(project_id)s +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# AND bsess.{event_column}=%(value)s +# LIMIT 1)) +# ) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# GROUP BY connexion_week +# ORDER BY connexion_week;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query% params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# ch_sub_query += meta_condition +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, +# week, +# users_count, +# connected_users +# FROM ( +# SELECT first_connexion_week, +# toInt8((connexion_week - first_connexion_week) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week +# FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# INNER JOIN {event_table} AS bsess USING (session_id) +# WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bsess.project_id = %(project_id)s +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# AND bsess.{event_column} = %(value)s +# LIMIT 1)) +# GROUP BY user_id) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week +# FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# WHERE first_connexion_week <= connexion_week +# GROUP BY first_connexion_week, week +# ORDER BY first_connexion_week, week +# ) AS full_data;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# +# event_table = JOURNEY_TYPES["CLICK"]["table"] +# event_column = JOURNEY_TYPES["CLICK"]["column"] +# extra_values = {} +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_table = JOURNEY_TYPES[f["value"]]["table"] +# event_column = JOURNEY_TYPES[f["value"]]["column"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# +# with ch_client.ClickHouseClient() as ch: +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query += meta_condition +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(meta_condition)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# all_user_count = ch.execute(ch_query, params) +# if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: +# return [] +# all_user_count = all_user_count[0]["count"] +# ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND length({event_column})>2 +# GROUP BY value +# ORDER BY count DESC +# LIMIT 7;""" +# +# # print(ch_query % params) +# # print("---------------------") +# popularity = ch.execute(ch_query, params) +# params["values"] = [p["value"] for p in popularity] +# if len(params["values"]) == 0: +# return [] +# ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND {event_column} IN %(values)s +# GROUP BY value;""" +# +# # print(ch_query % params) +# # print("---------------------") +# frequencies = ch.execute(ch_query, params) +# total_usage = sum([f["count"] for f in frequencies]) +# frequencies = {f["value"]: f["count"] for f in frequencies} +# for p in popularity: +# p["popularity"] = p.pop("count") / all_user_count +# p["frequency"] = frequencies[p["value"]] / total_usage +# +# return popularity +# +# +# def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# # else: +# # print(f"no {event_table} most used value") +# # return {"target": 0, "adoption": 0, +# # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} +# +# extra_values["value"] = event_value +# +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# ch_sub_query += meta_condition +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(meta_condition)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# all_user_count = ch.execute(ch_query, params) +# if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: +# return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": event_value}], } +# all_user_count = all_user_count[0]["count"] +# +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# adoption = ch.execute(ch_query, params) +# adoption = adoption[0]["count"] / all_user_count +# return {"target": all_user_count, "adoption": adoption, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return {"users": [], +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} +# +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# ch_sub_query.append("user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id +# ORDER BY count DESC +# LIMIT 10;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return {"users": helper.list_to_camel_case(rows), +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# AND length({event_column}) > 2 +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count +# FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count +# FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY day +# ORDER BY day) AS raw_results;""" +# params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, +# density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, +# neutral={"count": 0}), +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# event_table = JOURNEY_TYPES["CLICK"]["table"] +# event_column = JOURNEY_TYPES["CLICK"]["column"] +# extra_values = {} +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_table = JOURNEY_TYPES[f["value"]]["table"] +# event_column = JOURNEY_TYPES[f["value"]]["column"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY avg DESC +# LIMIT 7;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# +# return rows +# +# +# PERIOD_TO_FUNCTION = { +# "DAY": "toStartOfDay", +# "WEEK": "toStartOfWeek" +# } +# +# +# def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# meta_condition = __get_meta_constraint(args) +# period = "DAY" +# extra_values = {} +# for f in filters: +# if f.type == "PERIOD" and f["value"] in ["DAY", "WEEK"]: +# period = f["value"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# extra_values["user_id"] = f["value"] +# period_function = PERIOD_TO_FUNCTION[period] +# ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg +# FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY period) AS daily_users;""" +# params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, +# "project_id": project_id, +# "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( +# startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), +# **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# avg = ch.execute(ch_query, params) +# if len(avg) == 0 or avg[0]["avg"] == 0: +# return {"avg": 0, "chart": []} +# avg = avg[0]["avg"] +# # TODO: optimize this when DB structure changes, optimization from 3s to 1s +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count +# FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY period +# ORDER BY period) AS raw_results;""" +# # print(ch_query % params) +# # print("---------------------") +# rows = ch.execute(ch_query, params) +# return {"avg": avg, "chart": rows} +# +# +# def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): +# ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg +# FROM(SELECT COUNT(user_id) AS count +# FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id) AS users_connexions +# GROUP BY number_of_days +# ORDER BY number_of_days) AS results;""" +# params = {"project_id": project_id, +# "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# # print("---------------------") +# avg = ch.execute(ch_query, params) +# if len(avg) == 0 or avg[0]["avg"] == 0: +# return {"avg": 0, "partition": []} +# avg = avg[0]["avg"] +# ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count +# FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id) AS users_connexions +# GROUP BY number_of_days +# ORDER BY number_of_days;""" +# +# # print(ch_query % params) +# # print("---------------------") +# rows = ch.execute(ch_query, params) +# +# return {"avg": avg, "partition": helper.list_to_camel_case(rows)} +# +# +# def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "list": [] +# } +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT user_id, +# toUnixTimestamp(last_time)*1000 AS last_time, +# interactions_count, +# toUnixTimestamp(first_seen) * 1000 AS first_seen, +# toUnixTimestamp(last_seen) * 1000 AS last_seen +# FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen +# FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) +# WHERE now() - last_time > 7 +# GROUP BY user_id, last_time, interactions_count +# ORDER BY interactions_count DESC, last_time DESC +# LIMIT 50) AS raw_results;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "list": helper.list_to_camel_case(rows) +# } +# +# +# def search(text, feature_type, project_id, platform=None): +# if not feature_type: +# resource_type = "ALL" +# data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) +# return data +# args = {} if platform is None else {"platform": platform} +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, +# "endTimestamp": TimeUTC.now(), +# "project_id": project_id, +# "value": text.lower(), +# "platform_0": platform} +# if feature_type == "ALL": +# with ch_client.ClickHouseClient() as ch: +# sub_queries = [] +# for e in JOURNEY_TYPES: +# sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" +# FROM {JOURNEY_TYPES[e]["table"]} AS feature +# WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 +# LIMIT 10)""") +# ch_query = "UNION ALL".join(sub_queries) +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# elif JOURNEY_TYPES.get(feature_type) is not None: +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" +# FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature +# WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 +# LIMIT 10;""" +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# else: +# return [] +# return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/api/.gitignore b/ee/api/.gitignore index fa2f87b39..7177344c0 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -200,6 +200,9 @@ Pipfile.lock /chalicelib/core/metrics/heatmaps.py /chalicelib/core/metrics/heatmaps_ch.py /chalicelib/core/metrics/metrics_ch.py +/chalicelib/core/metrics/product_analytics.py +/chalicelib/core/metrics/product_analytics_ch.py +/chalicelib/core/metrics/product_anaytics2.py /chalicelib/core/events.py /chalicelib/core/events_mobile.py /chalicelib/core/feature_flags.py diff --git a/ee/api/chalicelib/core/metrics/__init__.py b/ee/api/chalicelib/core/metrics/__init__.py index 1846e5c73..a9b4ae927 100644 --- a/ee/api/chalicelib/core/metrics/__init__.py +++ b/ee/api/chalicelib/core/metrics/__init__.py @@ -7,3 +7,4 @@ logger = logging.getLogger(__name__) from chalicelib.core.metrics import heatmaps_ch as heatmaps from chalicelib.core.metrics import metrics_ch as metrics from chalicelib.core.metrics import custom_metrics_ee as custom_metrics +from chalicelib.core.metrics import product_analytics_ch as product_analytics diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index e4087bfa0..9c31242b0 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -20,6 +20,9 @@ rm -rf ./chalicelib/core/metrics/dashboards.py rm -rf ./chalicelib/core/metrics/heatmaps.py rm -rf ./chalicelib/core/metrics/heatmaps_ch.py rm -rf ./chalicelib/core/metrics/metrics_ch.py +rm -rf ./chalicelib/core/metrics/product_analytics.py +rm -rf ./chalicelib/core/metrics/product_analytics_ch.py +rm -rf ./chalicelib/core/metrics/product_anaytics2.py rm -rf ./chalicelib/core/events.py rm -rf ./chalicelib/core/events_mobile.py rm -rf ./chalicelib/core/feature_flags.py From 0eae03f29d8c7442774937e8726fb65046341402 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 12 Dec 2024 18:11:28 +0100 Subject: [PATCH 04/10] Dev (#2869) * fix(chalice): fixed user's journey --- api/chalicelib/core/metrics/product_analytics_ch.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/metrics/product_analytics_ch.py b/api/chalicelib/core/metrics/product_analytics_ch.py index 15df2dda2..a21c175b7 100644 --- a/api/chalicelib/core/metrics/product_analytics_ch.py +++ b/api/chalicelib/core/metrics/product_analytics_ch.py @@ -384,7 +384,7 @@ WITH {initial_sessions_cte} SELECT * FROM pre_ranked_events;""" logger.debug("---------Q1-----------") - ch.execute(query=ch_query1, params=params) + ch.execute(query=ch_query1, parameters=params) if time() - _now > 2: logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") logger.warning(ch.format(ch_query1, params)) @@ -411,7 +411,7 @@ WITH pre_ranked_events AS (SELECT * SELECT * FROM ranked_events;""" logger.debug("---------Q2-----------") - ch.execute(query=ch_query2, params=params) + ch.execute(query=ch_query2, parameters=params) if time() - _now > 2: logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") logger.warning(ch.format(ch_query2, params)) @@ -426,7 +426,7 @@ SELECT * FROM ({" UNION ALL ".join(projection_query)}) AS chart_steps ORDER BY event_number_in_session;""" logger.debug("---------Q3-----------") - rows = ch.execute(query=ch_query3, params=params) + rows = ch.execute(query=ch_query3, parameters=params) if time() - _now > 2: logger.warning(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") logger.warning(ch.format(ch_query3, params)) From 48483be8f9893360d1b27829d9f0a48e056a9159 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 12 Dec 2024 18:31:54 +0100 Subject: [PATCH 05/10] Dev (#2870) * fix(chalice): fixed clickmap --- api/chalicelib/core/metrics/heatmaps_ch.py | 583 +++++++-------------- 1 file changed, 193 insertions(+), 390 deletions(-) diff --git a/api/chalicelib/core/metrics/heatmaps_ch.py b/api/chalicelib/core/metrics/heatmaps_ch.py index d4a6a72cb..e7e323410 100644 --- a/api/chalicelib/core/metrics/heatmaps_ch.py +++ b/api/chalicelib/core/metrics/heatmaps_ch.py @@ -70,11 +70,12 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema): # query_from += """ LEFT JOIN experimental.events AS issues_t ON (main_events.session_id=issues_t.session_id) # LEFT JOIN experimental.issues AS mis ON (issues_t.issue_id=mis.issue_id)""" with ch_client.ClickHouseClient() as cur: - query = cur.format(f"""SELECT main_events.normalized_x AS normalized_x, + query = cur.format(query=f"""SELECT main_events.normalized_x AS normalized_x, main_events.normalized_y AS normalized_y FROM {query_from} WHERE {" AND ".join(constraints)} - LIMIT 500;""", args) + LIMIT 500;""", + parameters=args) logger.debug("---------") logger.debug(query) logger.debug("---------") @@ -106,10 +107,11 @@ def get_x_y_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatM query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events" with ch_client.ClickHouseClient() as cur: - query = cur.format(f"""SELECT main_events.normalized_x AS normalized_x, + query = cur.format(query=f"""SELECT main_events.normalized_x AS normalized_x, main_events.normalized_y AS normalized_y FROM {query_from} - WHERE {" AND ".join(constraints)};""", args) + WHERE {" AND ".join(constraints)};""", + parameters=args) logger.debug("---------") logger.debug(query) logger.debug("---------") @@ -140,12 +142,13 @@ def get_selectors_by_url_and_session_id(project_id, session_id, data: schemas.Ge query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events" with ch_client.ClickHouseClient() as cur: - query = cur.format(f"""SELECT main_events.selector AS selector, + query = cur.format(query=f"""SELECT main_events.selector AS selector, COUNT(1) AS count FROM {query_from} WHERE {" AND ".join(constraints)} GROUP BY 1 - ORDER BY count DESC;""", args) + ORDER BY count DESC;""", + parameters=args) logger.debug("---------") logger.debug(query) logger.debug("---------") @@ -162,406 +165,206 @@ def get_selectors_by_url_and_session_id(project_id, session_id, data: schemas.Ge return helper.list_to_camel_case(rows) -if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - # this part is identical to FOSS - SESSION_PROJECTION_COLS = """s.project_id, - s.session_id::text AS session_id, - s.start_ts, - s.duration""" +# use CH +SESSION_PROJECTION_COLS = """s.project_id, +s.session_id AS session_id, +toUnixTimestamp(s.datetime)*1000 AS start_ts, +s.duration AS duration""" - def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int, - start_time: int, - end_time: int) -> str | None: - full_args = { - "sessionId": session_id, - "projectId": project_id, - "start_time": start_time, - "end_time": end_time, - } - sub_condition = ["session_id = %(sessionId)s"] - if location_condition and len(location_condition.value) > 0: - f_k = "LOC" - op = sh.get_sql_operator(location_condition.operator) - full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)} - sub_condition.append( - sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False, - value_key=f_k)) - with pg_client.PostgresClient() as cur: - main_query = cur.mogrify(f"""WITH paths AS (SELECT DISTINCT path - FROM events.clicks - WHERE {" AND ".join(sub_condition)}) - SELECT path, COUNT(1) AS count - FROM events.clicks - INNER JOIN public.sessions USING (session_id) - INNER JOIN paths USING (path) - WHERE sessions.project_id = %(projectId)s - AND clicks.timestamp >= %(start_time)s - AND clicks.timestamp <= %(end_time)s - AND start_ts >= %(start_time)s - AND start_ts <= %(end_time)s - AND duration IS NOT NULL - GROUP BY path - ORDER BY count DESC - LIMIT 1;""", full_args) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION -----------") - logger.warning(main_query.decode('UTF-8')) - logger.warning("--------- PAYLOAD -----------") - logger.warning(full_args) - logger.warning("--------------------") - raise err +def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int, + start_time: int, + end_time: int) -> str | None: + full_args = { + "sessionId": session_id, + "projectId": project_id, + "start_time": start_time, + "end_time": end_time, + } + sub_condition = ["session_id = %(sessionId)s", "event_type = 'CLICK'", "project_id = %(projectId)s"] + if location_condition and len(location_condition.value) > 0: + f_k = "LOC" + op = sh.get_sql_operator(location_condition.operator) + full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)} + sub_condition.append( + sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False, + value_key=f_k)) + with ch_client.ClickHouseClient() as cur: + main_query = cur.format(query=f"""WITH paths AS (SELECT DISTINCT url_path + FROM experimental.events + WHERE {" AND ".join(sub_condition)}) + SELECT url_path, COUNT(1) AS count + FROM experimental.events + INNER JOIN paths USING (url_path) + WHERE event_type = 'CLICK' + AND project_id = %(projectId)s + AND datetime >= toDateTime(%(start_time)s / 1000) + AND datetime <= toDateTime(%(end_time)s / 1000) + GROUP BY url_path + ORDER BY count DESC + LIMIT 1;""", + parameters=full_args) + logger.debug("--------------------") + logger.debug(main_query) + logger.debug("--------------------") + try: + url = cur.execute(main_query) + except Exception as err: + logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION CH-----------") + logger.warning(main_query.decode('UTF-8')) + logger.warning("--------- PAYLOAD -----------") + logger.warning(full_args) + logger.warning("--------------------") + raise err - url = cur.fetchone() - if url is None: - return None - return url["path"] + if url is None or len(url) == 0: + return None + return url[0]["url_path"] - def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id, - include_mobs: bool = True, exclude_sessions: list[str] = [], - _depth: int = 3): - no_platform = True - location_condition = None - no_click = True - for f in data.filters: - if f.type == schemas.FilterType.PLATFORM: - no_platform = False - break - for f in data.events: - if f.type == schemas.EventType.LOCATION: - if len(f.value) == 0: - f.operator = schemas.SearchEventOperator.IS_ANY - location_condition = f.model_copy() - elif f.type == schemas.EventType.CLICK: - no_click = False - if len(f.value) == 0: - f.operator = schemas.SearchEventOperator.IS_ANY - if location_condition and not no_click: - break +def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id, + include_mobs: bool = True, exclude_sessions: list[str] = [], + _depth: int = 3): + no_platform = True + location_condition = None + no_click = True + for f in data.filters: + if f.type == schemas.FilterType.PLATFORM: + no_platform = False + break + for f in data.events: + if f.type == schemas.EventType.LOCATION: + if len(f.value) == 0: + f.operator = schemas.SearchEventOperator.IS_ANY + location_condition = f.model_copy() + elif f.type == schemas.EventType.CLICK: + no_click = False + if len(f.value) == 0: + f.operator = schemas.SearchEventOperator.IS_ANY + if location_condition and not no_click: + break - if no_platform: - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, - value=[schemas.PlatformType.DESKTOP], - operator=schemas.SearchEventOperator.IS)) - if not location_condition: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, - value=[], - operator=schemas.SearchEventOperator.IS_ANY)) - if no_click: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK, - value=[], - operator=schemas.SearchEventOperator.IS_ANY)) + if no_platform: + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, + value=[schemas.PlatformType.DESKTOP], + operator=schemas.SearchEventOperator.IS)) + if not location_condition: + data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, + value=[], + operator=schemas.SearchEventOperator.IS_ANY)) + if no_click: + data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK, + value=[], + operator=schemas.SearchEventOperator.IS_ANY)) - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, - value=[0], - operator=schemas.MathOperator.GREATER)) + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, + value=[0], + operator=schemas.MathOperator.GREATER)) - full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=data.bookmarked, issue=None, - project_id=project_id, user_id=user_id) - full_args["exclude_sessions"] = tuple(exclude_sessions) - if len(exclude_sessions) > 0: - query_part += "\n AND session_id NOT IN %(exclude_sessions)s" - with pg_client.PostgresClient() as cur: - data.order = schemas.SortOrderType.DESC - data.sort = 'duration' - main_query = cur.mogrify(f"""SELECT * - FROM (SELECT {SESSION_PROJECTION_COLS} - {query_part} - --ignoring the sort made the query faster (from 6s to 100ms) - --ORDER BY {data.sort} {data.order.value} - LIMIT 20) AS raw - ORDER BY random() - LIMIT 1;""", full_args) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------") - logger.warning(main_query.decode('UTF-8')) - logger.warning("--------- PAYLOAD -----------") - logger.warning(data.model_dump_json()) - logger.warning("--------------------") - raise err + full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False, + favorite_only=data.bookmarked, issue=None, + project_id=project_id, user_id=user_id) + full_args["exclude_sessions"] = tuple(exclude_sessions) + if len(exclude_sessions) > 0: + query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)" + with ch_client.ClickHouseClient() as cur: + data.order = schemas.SortOrderType.DESC + data.sort = 'duration' + main_query = cur.format(query=f"""SELECT * + FROM (SELECT {SESSION_PROJECTION_COLS} + {query_part} + -- ORDER BY {data.sort} {data.order.value} + LIMIT 20) AS raw + ORDER BY rand() + LIMIT 1;""", + parameters=full_args) + logger.debug("--------------------") + logger.debug(main_query) + logger.debug("--------------------") + try: + session = cur.execute(main_query) + except Exception as err: + logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION CH -----------") + logger.warning(main_query) + logger.warning("--------- PAYLOAD -----------") + logger.warning(data.model_dump_json()) + logger.warning("--------------------") + raise err - session = cur.fetchone() - if session: - if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY: - session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"], - location_condition=location_condition, - start_time=data.startTimestamp, end_time=data.endTimestamp) - else: - session["path"] = location_condition.value[0] - - if include_mobs: - session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) - session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) - if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - return search_short_session(data=data, project_id=project_id, user_id=user_id, - include_mobs=include_mobs, - exclude_sessions=exclude_sessions + [session["session_id"]], - _depth=_depth - 1) - elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - logger.info("couldn't find an existing replay after 3 iterations for heatmap") - - session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id) + if len(session) > 0: + session = session[0] + if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY: + session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"], + location_condition=location_condition, + start_time=data.startTimestamp, end_time=data.endTimestamp) else: - logger.debug("No session found for heatmap") + session["path"] = location_condition.value[0] - return helper.dict_to_camel_case(session) - - - def get_selected_session(project_id, session_id): - with pg_client.PostgresClient() as cur: - main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS} - FROM public.sessions AS s - WHERE session_id=%(session_id)s;""", {"session_id": session_id}) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------") - logger.warning(main_query.decode('UTF-8')) - raise err - - session = cur.fetchone() - if session: + if include_mobs: session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) - if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - session["_issue"] = "mob file not found" - logger.info("can't find selected mob file for heatmap") - session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id) + if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: + return search_short_session(data=data, project_id=project_id, user_id=user_id, + include_mobs=include_mobs, + exclude_sessions=exclude_sessions + [session["session_id"]], + _depth=_depth - 1) + elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: + logger.info("couldn't find an existing replay after 3 iterations for heatmap") - return helper.dict_to_camel_case(session) + session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"], + event_type=schemas.EventType.LOCATION) + else: + return None + + return helper.dict_to_camel_case(session) - def get_page_events(session_id, project_id): - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""\ - SELECT - message_id, - timestamp, - host, - path, - path AS value, - path AS url, - 'LOCATION' AS type - FROM events.pages - WHERE session_id = %(session_id)s - ORDER BY timestamp,message_id;""", {"session_id": session_id})) - rows = cur.fetchall() - rows = helper.list_to_camel_case(rows) - return rows +def get_selected_session(project_id, session_id): + with ch_client.ClickHouseClient() as cur: + main_query = cur.format(query=f"""SELECT {SESSION_PROJECTION_COLS} + FROM experimental.sessions AS s + WHERE session_id=%(session_id)s;""", + parameters={"session_id": session_id}) + logger.debug("--------------------") + logger.debug(main_query) + logger.debug("--------------------") + try: + session = cur.execute(main_query) + except Exception as err: + logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------") + logger.warning(main_query.decode('UTF-8')) + raise err + if len(session) > 0: + session = session[0] + else: + session = None -else: - # use CH - SESSION_PROJECTION_COLS = """s.project_id, - s.session_id AS session_id, - toUnixTimestamp(s.datetime)*1000 AS start_ts, - s.duration AS duration""" + if session: + session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) + session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) + if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: + session["_issue"] = "mob file not found" + logger.info("can't find selected mob file for heatmap") + session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id) + + return helper.dict_to_camel_case(session) - def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int, - start_time: int, - end_time: int) -> str | None: - full_args = { - "sessionId": session_id, - "projectId": project_id, - "start_time": start_time, - "end_time": end_time, - } - sub_condition = ["session_id = %(sessionId)s", "event_type = 'CLICK'", "project_id = %(projectId)s"] - if location_condition and len(location_condition.value) > 0: - f_k = "LOC" - op = sh.get_sql_operator(location_condition.operator) - full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)} - sub_condition.append( - sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False, - value_key=f_k)) - with ch_client.ClickHouseClient() as cur: - main_query = cur.format(f"""WITH paths AS (SELECT DISTINCT url_path - FROM experimental.events - WHERE {" AND ".join(sub_condition)}) - SELECT url_path, COUNT(1) AS count - FROM experimental.events - INNER JOIN paths USING (url_path) - WHERE event_type = 'CLICK' - AND project_id = %(projectId)s - AND datetime >= toDateTime(%(start_time)s / 1000) - AND datetime <= toDateTime(%(end_time)s / 1000) - GROUP BY url_path - ORDER BY count DESC - LIMIT 1;""", full_args) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - url = cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION CH-----------") - logger.warning(main_query.decode('UTF-8')) - logger.warning("--------- PAYLOAD -----------") - logger.warning(full_args) - logger.warning("--------------------") - raise err - - if url is None or len(url) == 0: - return None - return url[0]["url_path"] - - - def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id, - include_mobs: bool = True, exclude_sessions: list[str] = [], - _depth: int = 3): - no_platform = True - location_condition = None - no_click = True - for f in data.filters: - if f.type == schemas.FilterType.PLATFORM: - no_platform = False - break - for f in data.events: - if f.type == schemas.EventType.LOCATION: - if len(f.value) == 0: - f.operator = schemas.SearchEventOperator.IS_ANY - location_condition = f.model_copy() - elif f.type == schemas.EventType.CLICK: - no_click = False - if len(f.value) == 0: - f.operator = schemas.SearchEventOperator.IS_ANY - if location_condition and not no_click: - break - - if no_platform: - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, - value=[schemas.PlatformType.DESKTOP], - operator=schemas.SearchEventOperator.IS)) - if not location_condition: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, - value=[], - operator=schemas.SearchEventOperator.IS_ANY)) - if no_click: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK, - value=[], - operator=schemas.SearchEventOperator.IS_ANY)) - - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, - value=[0], - operator=schemas.MathOperator.GREATER)) - - full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False, - favorite_only=data.bookmarked, issue=None, - project_id=project_id, user_id=user_id) - full_args["exclude_sessions"] = tuple(exclude_sessions) - if len(exclude_sessions) > 0: - query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)" - with ch_client.ClickHouseClient() as cur: - data.order = schemas.SortOrderType.DESC - data.sort = 'duration' - main_query = cur.format(f"""SELECT * - FROM (SELECT {SESSION_PROJECTION_COLS} - {query_part} - -- ORDER BY {data.sort} {data.order.value} - LIMIT 20) AS raw - ORDER BY rand() - LIMIT 1;""", full_args) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - session = cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION CH -----------") - logger.warning(main_query) - logger.warning("--------- PAYLOAD -----------") - logger.warning(data.model_dump_json()) - logger.warning("--------------------") - raise err - - if len(session) > 0: - session = session[0] - if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY: - session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"], - location_condition=location_condition, - start_time=data.startTimestamp, end_time=data.endTimestamp) - else: - session["path"] = location_condition.value[0] - - if include_mobs: - session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) - session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) - if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - return search_short_session(data=data, project_id=project_id, user_id=user_id, - include_mobs=include_mobs, - exclude_sessions=exclude_sessions + [session["session_id"]], - _depth=_depth - 1) - elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - logger.info("couldn't find an existing replay after 3 iterations for heatmap") - - session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"], - event_type=schemas.EventType.LOCATION) - else: - return None - - return helper.dict_to_camel_case(session) - - - def get_selected_session(project_id, session_id): - with ch_client.ClickHouseClient() as cur: - main_query = cur.format(f"""SELECT {SESSION_PROJECTION_COLS} - FROM experimental.sessions AS s - WHERE session_id=%(session_id)s;""", {"session_id": session_id}) - logger.debug("--------------------") - logger.debug(main_query) - logger.debug("--------------------") - try: - session = cur.execute(main_query) - except Exception as err: - logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------") - logger.warning(main_query.decode('UTF-8')) - raise err - if len(session) > 0: - session = session[0] - else: - session = None - - if session: - session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id) - session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"]) - if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0: - session["_issue"] = "mob file not found" - logger.info("can't find selected mob file for heatmap") - session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id) - - return helper.dict_to_camel_case(session) - - - def get_page_events(session_id, project_id): - with ch_client.ClickHouseClient() as cur: - rows = cur.execute("""\ - SELECT - message_id, - toUnixTimestamp(datetime)*1000 AS timestamp, - url_host AS host, - url_path AS path, - url_path AS value, - url_path AS url, - 'LOCATION' AS type - FROM experimental.events - WHERE session_id = %(session_id)s - AND event_type='LOCATION' - AND project_id= %(project_id)s - ORDER BY datetime,message_id;""", {"session_id": session_id, "project_id": project_id}) - rows = helper.list_to_camel_case(rows) - return rows +def get_page_events(session_id, project_id): + with ch_client.ClickHouseClient() as cur: + rows = cur.execute("""\ + SELECT + message_id, + toUnixTimestamp(datetime)*1000 AS timestamp, + url_host AS host, + url_path AS path, + url_path AS value, + url_path AS url, + 'LOCATION' AS type + FROM experimental.events + WHERE session_id = %(session_id)s + AND event_type='LOCATION' + AND project_id= %(project_id)s + ORDER BY datetime,message_id;""", {"session_id": session_id, "project_id": project_id}) + rows = helper.list_to_camel_case(rows) + return rows From 69ef083abe7c6532919ddb7fed09ca41f65a92bc Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 13 Dec 2024 14:08:03 +0100 Subject: [PATCH 06/10] feat(pa): cards endpoints (#2871) * feat(analytics): dashboards * feat(analytics): cards api endpoints * feat(analytics): validator dependency --- backend/go.mod | 5 + backend/go.sum | 10 + backend/pkg/analytics/api/card-handlers.go | 268 ++++++++++++++++++ backend/pkg/analytics/api/card.go | 92 ++++++ .../pkg/analytics/api/dashboard-handlers.go | 18 +- backend/pkg/analytics/api/handlers.go | 9 +- backend/pkg/analytics/api/model.go | 2 +- 7 files changed, 393 insertions(+), 11 deletions(-) create mode 100644 backend/pkg/analytics/api/card-handlers.go create mode 100644 backend/pkg/analytics/api/card.go diff --git a/backend/go.mod b/backend/go.mod index c3123f334..c14121a16 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -55,8 +55,12 @@ require ( github.com/distribution/reference v0.6.0 // indirect github.com/elastic/elastic-transport-go/v8 v8.5.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.23.0 // indirect github.com/goccy/go-json v0.10.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect @@ -70,6 +74,7 @@ require ( github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jackc/puddle v1.3.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/paulmach/orb v0.7.1 // indirect github.com/pierrec/lz4/v4 v4.1.15 // indirect diff --git a/backend/go.sum b/backend/go.sum index c088a9a3a..1a73eb0f8 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -171,6 +171,8 @@ github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ul github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/getsentry/sentry-go v0.29.0 h1:YtWluuCFg9OfcqnaujpY918N/AhCCwarIDWOYSBAjCA= github.com/getsentry/sentry-go v0.29.0/go.mod h1:jhPesDAL0Q0W2+2YEuVOvdWmVtdsr1+jtBrlDEVWwLY= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= @@ -192,6 +194,12 @@ github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZ github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o= +github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -364,6 +372,8 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= diff --git a/backend/pkg/analytics/api/card-handlers.go b/backend/pkg/analytics/api/card-handlers.go new file mode 100644 index 000000000..41079c330 --- /dev/null +++ b/backend/pkg/analytics/api/card-handlers.go @@ -0,0 +1,268 @@ +package models + +import ( + "encoding/json" + "fmt" + "github.com/gorilla/mux" + "net/http" + "openreplay/backend/pkg/server/api" + "openreplay/backend/pkg/server/user" + "strconv" + "time" + + "github.com/go-playground/validator/v10" +) + +// getCardId returns the ID from the request +func getCardId(r *http.Request) (int64, error) { + vars := mux.Vars(r) + idStr := vars["id"] + if idStr == "" { + return 0, fmt.Errorf("invalid Card ID") + } + + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return 0, fmt.Errorf("invalid Card ID") + } + + return id, nil +} + +func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) + return + } + bodySize = len(bodyBytes) + + req := &CardCreateRequest{} + if err := json.Unmarshal(bodyBytes, req); err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + validate := validator.New() + err = validate.Struct(req) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + // TODO save card to DB + + resp := &CardGetResponse{ + Card: Card{ + CardID: 1, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + DeletedAt: nil, + EditedAt: nil, + ProjectID: 1, + UserID: 1, + CardBase: CardBase{ + Name: req.Name, + IsPublic: req.IsPublic, + Thumbnail: req.Thumbnail, + MetricType: req.MetricType, + MetricOf: req.MetricOf, + Series: req.Series, + }, + }, + } + + currentUser := r.Context().Value("userData").(*user.User) + e.log.Info(r.Context(), "User ID: ", currentUser.ID) + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} + +// getCard +func (e *handlersImpl) getCard(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + id, err := getCardId(r) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + thumbnail := "https://example.com/image.png" + + // TODO get card from DB + + resp := &CardGetResponse{ + Card: Card{ + CardID: id, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + DeletedAt: nil, + EditedAt: nil, + ProjectID: 1, + UserID: 1, + CardBase: CardBase{ + Name: "My Card", + IsPublic: true, + Thumbnail: &thumbnail, + MetricType: "timeseries", + MetricOf: "session_count", + }, + }, + } + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} + +// get cards paginated +func (e *handlersImpl) getCards(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + // TODO get cards from DB + thumbnail := "https://example.com/image.png" + + resp := &GetCardsResponse{ + Cards: []Card{ + { + CardID: 1, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + DeletedAt: nil, + EditedAt: nil, + ProjectID: 1, + UserID: 1, + CardBase: CardBase{ + Name: "My Card", + IsPublic: true, + Thumbnail: &thumbnail, + MetricType: "timeseries", + MetricOf: "session_count", + }, + }, + }, + Total: 10, + } + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} + +func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + id, err := getCardId(r) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) + return + } + bodySize = len(bodyBytes) + + req := &CardUpdateRequest{} + if err := json.Unmarshal(bodyBytes, req); err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + validate := validator.New() + err = validate.Struct(req) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + // TODO update card in DB + + resp := &CardGetResponse{ + Card: Card{ + CardID: id, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + DeletedAt: nil, + EditedAt: nil, + ProjectID: 1, + UserID: 1, + CardBase: CardBase{ + Name: req.Name, + IsPublic: req.IsPublic, + Thumbnail: req.Thumbnail, + MetricType: req.MetricType, + MetricOf: req.MetricOf, + }, + }, + } + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} + +func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + _, err := getCardId(r) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + // TODO delete card from DB + + e.responser.ResponseWithJSON(e.log, r.Context(), w, nil, startTime, r.URL.Path, bodySize) +} + +func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) + return + } + bodySize = len(bodyBytes) + + req := &GetCardChartDataRequest{} + if err := json.Unmarshal(bodyBytes, req); err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + validate := validator.New() + err = validate.Struct(req) + + // TODO get card chart data from ClickHouse + jsonInput := ` + { + "data": [ + { + "timestamp": 1733934939000, + "Series A": 100, + "Series B": 200 + }, + { + "timestamp": 1733935939000, + "Series A": 150, + "Series B": 250 + } + ] + }` + + var resp GetCardChartDataResponse + err = json.Unmarshal([]byte(jsonInput), &resp) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) + return + } + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} diff --git a/backend/pkg/analytics/api/card.go b/backend/pkg/analytics/api/card.go new file mode 100644 index 000000000..271d8c080 --- /dev/null +++ b/backend/pkg/analytics/api/card.go @@ -0,0 +1,92 @@ +package models + +import ( + "time" +) + +// CardBase Common fields for the Card entity +type CardBase struct { + Name string `json:"name" validate:"required"` + IsPublic bool `json:"isPublic" validate:"omitempty"` + DefaultConfig map[string]any `json:"defaultConfig"` + Thumbnail *string `json:"thumbnail" validate:"omitempty,url"` + MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` + MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` + MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` + ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"` + MetricValue []string `json:"metricValue" validate:"omitempty"` + SessionID *int64 `json:"sessionId" validate:"omitempty"` + Series []CardSeries `json:"series" validate:"required,dive"` +} + +// Card Fields specific to database operations +type Card struct { + CardBase + ProjectID int64 `json:"projectId" validate:"required"` + UserID int64 `json:"userId" validate:"required"` + CardID int64 `json:"cardId"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt *time.Time `json:"deleted_at,omitempty"` + EditedAt *time.Time `json:"edited_at,omitempty"` +} + +type CardSeries struct { + SeriesID int64 `json:"seriesId" validate:"omitempty"` + MetricID int64 `json:"metricId" validate:"omitempty"` + Name string `json:"name" validate:"required"` + CreatedAt time.Time `json:"createdAt" validate:"omitempty"` + DeletedAt *time.Time `json:"deletedAt" validate:"omitempty"` + Index int64 `json:"index" validate:"required"` + Filter SeriesFilter `json:"filter"` +} + +type SeriesFilter struct { + EventOrder string `json:"eventOrder" validate:"required,oneof=then or and"` + Filters []FilterItem `json:"filters"` +} + +type FilterItem struct { + Type string `json:"type" validate:"required"` + Operator string `json:"operator" validate:"required"` + Source string `json:"source" validate:"required"` + SourceOperator string `json:"sourceOperator" validate:"required"` + Value []string `json:"value" validate:"required,dive,required"` + IsEvent bool `json:"isEvent"` +} + +// CardCreateRequest Fields required for creating a card (from the frontend) +type CardCreateRequest struct { + CardBase +} + +type CardGetResponse struct { + Card +} + +type CardUpdateRequest struct { + CardBase +} + +type GetCardsResponse struct { + Cards []Card `json:"cards"` + Total int64 `json:"total"` +} + +type DataPoint struct { + Timestamp int64 `json:"timestamp"` + Series map[string]int64 `json:"series"` +} + +type GetCardChartDataRequest struct { + ProjectID int64 `json:"projectId" validate:"required"` + MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` + MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` + MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` + SessionID int64 `json:"sessionId" validate:"required"` + Series []CardSeries `json:"series"` +} + +type GetCardChartDataResponse struct { + Data []DataPoint `json:"data"` +} diff --git a/backend/pkg/analytics/api/dashboard-handlers.go b/backend/pkg/analytics/api/dashboard-handlers.go index ca8e22ba2..777180847 100644 --- a/backend/pkg/analytics/api/dashboard-handlers.go +++ b/backend/pkg/analytics/api/dashboard-handlers.go @@ -1,4 +1,4 @@ -package api +package models import ( "encoding/json" @@ -11,7 +11,7 @@ import ( "time" ) -func getId(r *http.Request) (int, error) { +func getDashboardId(r *http.Request) (int, error) { vars := mux.Vars(r) idStr := vars["id"] if idStr == "" { @@ -64,7 +64,7 @@ func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return @@ -90,7 +90,7 @@ func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 - id, err := getId(r) + id, err := getDashboardId(r) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -113,7 +113,7 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return @@ -149,7 +149,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return @@ -163,7 +163,7 @@ func (e *handlersImpl) pinDashboard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return @@ -179,7 +179,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return @@ -195,7 +195,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re startTime := time.Now() bodySize := 0 - //id, err := getId(r) + //id, err := getDashboardId(r) //if err != nil { // e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) // return diff --git a/backend/pkg/analytics/api/handlers.go b/backend/pkg/analytics/api/handlers.go index 05ee6dbfc..e3ad6ac6d 100644 --- a/backend/pkg/analytics/api/handlers.go +++ b/backend/pkg/analytics/api/handlers.go @@ -1,4 +1,4 @@ -package api +package models import ( config "openreplay/backend/internal/config/analytics" @@ -25,6 +25,13 @@ func (e *handlersImpl) GetAll() []*api.Description { {"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"}, {"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"}, {"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"}, + {"/v1/analytics/{projectId}/cards", e.createCard, "POST"}, + {"/v1/analytics/{projectId}/cards", e.getCards, "GET"}, + {"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"}, + {"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"}, + {"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"}, + {"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, + {"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"}, } } diff --git a/backend/pkg/analytics/api/model.go b/backend/pkg/analytics/api/model.go index 3342a4b81..a5c231159 100644 --- a/backend/pkg/analytics/api/model.go +++ b/backend/pkg/analytics/api/model.go @@ -1,4 +1,4 @@ -package api +package models type Dashboard struct { DashboardID int `json:"dashboard_id"` From 92a6379e2c645bdf8a56484a9ab500ba6c5a07b2 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 16 Dec 2024 10:39:38 +0100 Subject: [PATCH 07/10] ui: fix prismjs loading --- frontend/app/assets/index.html | 9 ++++++ .../app/components/ui/CodeBlock/CodeBlock.tsx | 28 +++++++++---------- frontend/babel.config.js | 16 ----------- 3 files changed, 23 insertions(+), 30 deletions(-) diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html index edb51ad24..4c037eec1 100644 --- a/frontend/app/assets/index.html +++ b/frontend/app/assets/index.html @@ -14,6 +14,15 @@ + + + + + + + diff --git a/frontend/app/components/ui/CodeBlock/CodeBlock.tsx b/frontend/app/components/ui/CodeBlock/CodeBlock.tsx index 176cd1cfb..ae5c7c6dd 100644 --- a/frontend/app/components/ui/CodeBlock/CodeBlock.tsx +++ b/frontend/app/components/ui/CodeBlock/CodeBlock.tsx @@ -1,20 +1,20 @@ -import React, { useEffect } from "react"; -import Prism from "prismjs"; +import React, { useEffect } from 'react'; -interface IProps { - code: string; - language: string; -} - -const CodeBlock = ({ code, language }: IProps) => { +export default function CodeBlock({ code }) { + const language = 'javascript' useEffect(() => { - Prism.highlightAll(false); - }, []); + setTimeout(() => { + if (window.Prism) { + Prism.highlightAll(); + } + }, 0) + }, [code, language]); + return (
-      
+      
+        {code}
+      
     
); -}; - -export default CodeBlock; \ No newline at end of file +} diff --git a/frontend/babel.config.js b/frontend/babel.config.js index 16518fa23..aaf309964 100644 --- a/frontend/babel.config.js +++ b/frontend/babel.config.js @@ -7,21 +7,5 @@ module.exports = { plugins: [ 'babel-plugin-react-require', ['@babel/plugin-proposal-decorators', { legacy: true }], - [ - 'prismjs', - { - languages: [ - 'javascript', - 'css', - 'bash', - 'typescript', - 'jsx', - 'kotlin', - 'swift', - ], - theme: 'default', - css: true, - }, - ], ], }; From bc2259aef3f7392c9a88cf75db65a8d7306ad620 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 16 Dec 2024 10:52:06 +0100 Subject: [PATCH 08/10] ui: fix prismjs loading --- frontend/app/initialize.tsx | 2 +- frontend/app/styles/{global.scss => global.css} | 0 frontend/tailwind.config.js | 5 +++++ frontend/webpack.config.ts | 3 ++- 4 files changed, 8 insertions(+), 2 deletions(-) rename frontend/app/styles/{global.scss => global.css} (100%) diff --git a/frontend/app/initialize.tsx b/frontend/app/initialize.tsx index eee5fba02..f56dc05c0 100644 --- a/frontend/app/initialize.tsx +++ b/frontend/app/initialize.tsx @@ -1,5 +1,5 @@ import './styles/index.css'; -import './styles/global.scss' +import './styles/global.css' import React from 'react'; import { createRoot } from 'react-dom/client'; import './init'; diff --git a/frontend/app/styles/global.scss b/frontend/app/styles/global.css similarity index 100% rename from frontend/app/styles/global.scss rename to frontend/app/styles/global.css diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js index 50f471bb9..ac900cbd6 100644 --- a/frontend/tailwind.config.js +++ b/frontend/tailwind.config.js @@ -1,6 +1,11 @@ const colors = require('./app/theme/colors'); const defaultColors = require('tailwindcss/colors'); +const deprecatedDefaults = ['lightBlue', 'warmGray', 'trueGray', 'coolGray', 'blueGray'] +deprecatedDefaults.forEach(color => { + delete defaultColors[color] +}) + module.exports = { content: [ './app/**/*.tsx', diff --git a/frontend/webpack.config.ts b/frontend/webpack.config.ts index a3c2a1ddf..b54b11c7b 100644 --- a/frontend/webpack.config.ts +++ b/frontend/webpack.config.ts @@ -122,6 +122,7 @@ const config: Configuration = { }, }, plugins: [ + new webpack.ProgressPlugin(), (isDevelopment ? false : new CompressionPlugin({ test: /\.(js|css|html|svg)$/, algorithm: 'brotliCompress', @@ -141,7 +142,7 @@ const config: Configuration = { ], }), new MiniCssExtractPlugin({ ignoreOrder: true }), - ], +], devtool: isDevelopment ? "inline-source-map" : false, performance: { hints: false, From efa0a2878bfc72c0044e0aacd5810f31ebe5ebbb Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 16 Dec 2024 11:05:01 +0100 Subject: [PATCH 09/10] ui: fix default language --- frontend/app/components/ui/CodeBlock/CodeBlock.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/frontend/app/components/ui/CodeBlock/CodeBlock.tsx b/frontend/app/components/ui/CodeBlock/CodeBlock.tsx index ae5c7c6dd..fff236d8b 100644 --- a/frontend/app/components/ui/CodeBlock/CodeBlock.tsx +++ b/frontend/app/components/ui/CodeBlock/CodeBlock.tsx @@ -1,7 +1,6 @@ import React, { useEffect } from 'react'; -export default function CodeBlock({ code }) { - const language = 'javascript' +export default function CodeBlock({ code, language = 'javascript' }) { useEffect(() => { setTimeout(() => { if (window.Prism) { From 77673b15f8dd688aba2ccf3b26cbfa7c2d207d2b Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 16 Dec 2024 11:17:08 +0100 Subject: [PATCH 10/10] ui: bundle prism locally --- frontend/app/assets/index.html | 16 +- frontend/app/assets/prism/prism-bash.min.js | 1 + .../app/assets/prism/prism-javascript.min.js | 1 + frontend/app/assets/prism/prism-jsx.min.js | 1 + frontend/app/assets/prism/prism-kotlin.min.js | 1 + frontend/app/assets/prism/prism-swift.min.js | 1 + .../app/assets/prism/prism-typescript.min.js | 1 + frontend/app/assets/prism/prism.css | 140 ++++++++++++++++++ frontend/app/assets/prism/prism.min.js | 16 ++ 9 files changed, 170 insertions(+), 8 deletions(-) create mode 100644 frontend/app/assets/prism/prism-bash.min.js create mode 100644 frontend/app/assets/prism/prism-javascript.min.js create mode 100644 frontend/app/assets/prism/prism-jsx.min.js create mode 100644 frontend/app/assets/prism/prism-kotlin.min.js create mode 100644 frontend/app/assets/prism/prism-swift.min.js create mode 100644 frontend/app/assets/prism/prism-typescript.min.js create mode 100644 frontend/app/assets/prism/prism.css create mode 100644 frontend/app/assets/prism/prism.min.js diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html index 4c037eec1..e386ecfbf 100644 --- a/frontend/app/assets/index.html +++ b/frontend/app/assets/index.html @@ -15,14 +15,14 @@ - - - - - - + + + + + + + + diff --git a/frontend/app/assets/prism/prism-bash.min.js b/frontend/app/assets/prism/prism-bash.min.js new file mode 100644 index 000000000..f1659f1e3 --- /dev/null +++ b/frontend/app/assets/prism/prism-bash.min.js @@ -0,0 +1 @@ +!function(e){var t="\\b(?:BASH|BASHOPTS|BASH_ALIASES|BASH_ARGC|BASH_ARGV|BASH_CMDS|BASH_COMPLETION_COMPAT_DIR|BASH_LINENO|BASH_REMATCH|BASH_SOURCE|BASH_VERSINFO|BASH_VERSION|COLORTERM|COLUMNS|COMP_WORDBREAKS|DBUS_SESSION_BUS_ADDRESS|DEFAULTS_PATH|DESKTOP_SESSION|DIRSTACK|DISPLAY|EUID|GDMSESSION|GDM_LANG|GNOME_KEYRING_CONTROL|GNOME_KEYRING_PID|GPG_AGENT_INFO|GROUPS|HISTCONTROL|HISTFILE|HISTFILESIZE|HISTSIZE|HOME|HOSTNAME|HOSTTYPE|IFS|INSTANCE|JOB|LANG|LANGUAGE|LC_ADDRESS|LC_ALL|LC_IDENTIFICATION|LC_MEASUREMENT|LC_MONETARY|LC_NAME|LC_NUMERIC|LC_PAPER|LC_TELEPHONE|LC_TIME|LESSCLOSE|LESSOPEN|LINES|LOGNAME|LS_COLORS|MACHTYPE|MAILCHECK|MANDATORY_PATH|NO_AT_BRIDGE|OLDPWD|OPTERR|OPTIND|ORBIT_SOCKETDIR|OSTYPE|PAPERSIZE|PATH|PIPESTATUS|PPID|PS1|PS2|PS3|PS4|PWD|RANDOM|REPLY|SECONDS|SELINUX_INIT|SESSION|SESSIONTYPE|SESSION_MANAGER|SHELL|SHELLOPTS|SHLVL|SSH_AUTH_SOCK|TERM|UID|UPSTART_EVENTS|UPSTART_INSTANCE|UPSTART_JOB|UPSTART_SESSION|USER|WINDOWID|XAUTHORITY|XDG_CONFIG_DIRS|XDG_CURRENT_DESKTOP|XDG_DATA_DIRS|XDG_GREETER_DATA_DIR|XDG_MENU_PREFIX|XDG_RUNTIME_DIR|XDG_SEAT|XDG_SEAT_PATH|XDG_SESSION_DESKTOP|XDG_SESSION_ID|XDG_SESSION_PATH|XDG_SESSION_TYPE|XDG_VTNR|XMODIFIERS)\\b",a={pattern:/(^(["']?)\w+\2)[ \t]+\S.*/,lookbehind:!0,alias:"punctuation",inside:null},n={bash:a,environment:{pattern:RegExp("\\$"+t),alias:"constant"},variable:[{pattern:/\$?\(\([\s\S]+?\)\)/,greedy:!0,inside:{variable:[{pattern:/(^\$\(\([\s\S]+)\)\)/,lookbehind:!0},/^\$\(\(/],number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee]-?\d+)?/,operator:/--|\+\+|\*\*=?|<<=?|>>=?|&&|\|\||[=!+\-*/%<>^&|]=?|[?~:]/,punctuation:/\(\(?|\)\)?|,|;/}},{pattern:/\$\((?:\([^)]+\)|[^()])+\)|`[^`]+`/,greedy:!0,inside:{variable:/^\$\(|^`|\)$|`$/}},{pattern:/\$\{[^}]+\}/,greedy:!0,inside:{operator:/:[-=?+]?|[!\/]|##?|%%?|\^\^?|,,?/,punctuation:/[\[\]]/,environment:{pattern:RegExp("(\\{)"+t),lookbehind:!0,alias:"constant"}}},/\$(?:\w+|[#?*!@$])/],entity:/\\(?:[abceEfnrtv\\"]|O?[0-7]{1,3}|U[0-9a-fA-F]{8}|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{1,2})/};e.languages.bash={shebang:{pattern:/^#!\s*\/.*/,alias:"important"},comment:{pattern:/(^|[^"{\\$])#.*/,lookbehind:!0},"function-name":[{pattern:/(\bfunction\s+)[\w-]+(?=(?:\s*\(?:\s*\))?\s*\{)/,lookbehind:!0,alias:"function"},{pattern:/\b[\w-]+(?=\s*\(\s*\)\s*\{)/,alias:"function"}],"for-or-select":{pattern:/(\b(?:for|select)\s+)\w+(?=\s+in\s)/,alias:"variable",lookbehind:!0},"assign-left":{pattern:/(^|[\s;|&]|[<>]\()\w+(?:\.\w+)*(?=\+?=)/,inside:{environment:{pattern:RegExp("(^|[\\s;|&]|[<>]\\()"+t),lookbehind:!0,alias:"constant"}},alias:"variable",lookbehind:!0},parameter:{pattern:/(^|\s)-{1,2}(?:\w+:[+-]?)?\w+(?:\.\w+)*(?=[=\s]|$)/,alias:"variable",lookbehind:!0},string:[{pattern:/((?:^|[^<])<<-?\s*)(\w+)\s[\s\S]*?(?:\r?\n|\r)\2/,lookbehind:!0,greedy:!0,inside:n},{pattern:/((?:^|[^<])<<-?\s*)(["'])(\w+)\2\s[\s\S]*?(?:\r?\n|\r)\3/,lookbehind:!0,greedy:!0,inside:{bash:a}},{pattern:/(^|[^\\](?:\\\\)*)"(?:\\[\s\S]|\$\([^)]+\)|\$(?!\()|`[^`]+`|[^"\\`$])*"/,lookbehind:!0,greedy:!0,inside:n},{pattern:/(^|[^$\\])'[^']*'/,lookbehind:!0,greedy:!0},{pattern:/\$'(?:[^'\\]|\\[\s\S])*'/,greedy:!0,inside:{entity:n.entity}}],environment:{pattern:RegExp("\\$?"+t),alias:"constant"},variable:n.variable,function:{pattern:/(^|[\s;|&]|[<>]\()(?:add|apropos|apt|apt-cache|apt-get|aptitude|aspell|automysqlbackup|awk|basename|bash|bc|bconsole|bg|bzip2|cal|cargo|cat|cfdisk|chgrp|chkconfig|chmod|chown|chroot|cksum|clear|cmp|column|comm|composer|cp|cron|crontab|csplit|curl|cut|date|dc|dd|ddrescue|debootstrap|df|diff|diff3|dig|dir|dircolors|dirname|dirs|dmesg|docker|docker-compose|du|egrep|eject|env|ethtool|expand|expect|expr|fdformat|fdisk|fg|fgrep|file|find|fmt|fold|format|free|fsck|ftp|fuser|gawk|git|gparted|grep|groupadd|groupdel|groupmod|groups|grub-mkconfig|gzip|halt|head|hg|history|host|hostname|htop|iconv|id|ifconfig|ifdown|ifup|import|install|ip|java|jobs|join|kill|killall|less|link|ln|locate|logname|logrotate|look|lpc|lpr|lprint|lprintd|lprintq|lprm|ls|lsof|lynx|make|man|mc|mdadm|mkconfig|mkdir|mke2fs|mkfifo|mkfs|mkisofs|mknod|mkswap|mmv|more|most|mount|mtools|mtr|mutt|mv|nano|nc|netstat|nice|nl|node|nohup|notify-send|npm|nslookup|op|open|parted|passwd|paste|pathchk|ping|pkill|pnpm|podman|podman-compose|popd|pr|printcap|printenv|ps|pushd|pv|quota|quotacheck|quotactl|ram|rar|rcp|reboot|remsync|rename|renice|rev|rm|rmdir|rpm|rsync|scp|screen|sdiff|sed|sendmail|seq|service|sftp|sh|shellcheck|shuf|shutdown|sleep|slocate|sort|split|ssh|stat|strace|su|sudo|sum|suspend|swapon|sync|sysctl|tac|tail|tar|tee|time|timeout|top|touch|tr|traceroute|tsort|tty|umount|uname|unexpand|uniq|units|unrar|unshar|unzip|update-grub|uptime|useradd|userdel|usermod|users|uudecode|uuencode|v|vcpkg|vdir|vi|vim|virsh|vmstat|wait|watch|wc|wget|whereis|which|who|whoami|write|xargs|xdg-open|yarn|yes|zenity|zip|zsh|zypper)(?=$|[)\s;|&])/,lookbehind:!0},keyword:{pattern:/(^|[\s;|&]|[<>]\()(?:case|do|done|elif|else|esac|fi|for|function|if|in|select|then|until|while)(?=$|[)\s;|&])/,lookbehind:!0},builtin:{pattern:/(^|[\s;|&]|[<>]\()(?:\.|:|alias|bind|break|builtin|caller|cd|command|continue|declare|echo|enable|eval|exec|exit|export|getopts|hash|help|let|local|logout|mapfile|printf|pwd|read|readarray|readonly|return|set|shift|shopt|source|test|times|trap|type|typeset|ulimit|umask|unalias|unset)(?=$|[)\s;|&])/,lookbehind:!0,alias:"class-name"},boolean:{pattern:/(^|[\s;|&]|[<>]\()(?:false|true)(?=$|[)\s;|&])/,lookbehind:!0},"file-descriptor":{pattern:/\B&\d\b/,alias:"important"},operator:{pattern:/\d?<>|>\||\+=|=[=~]?|!=?|<<[<-]?|[&\d]?>>|\d[<>]&?|[<>][&=]?|&[>&]?|\|[&|]?/,inside:{"file-descriptor":{pattern:/^\d/,alias:"important"}}},punctuation:/\$?\(\(?|\)\)?|\.\.|[{}[\];\\]/,number:{pattern:/(^|\s)(?:[1-9]\d*|0)(?:[.,]\d+)?\b/,lookbehind:!0}},a.inside=e.languages.bash;for(var s=["comment","function-name","for-or-select","assign-left","parameter","string","environment","function","keyword","builtin","boolean","file-descriptor","operator","punctuation","number"],o=n.variable[1].inside,i=0;i|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|extends|implements|instanceof|interface|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:RegExp("((?:^|[^$\\w\\xA0-\\uFFFF.\"'\\])\\s]|\\b(?:return|yield))\\s*)/(?:(?:\\[(?:[^\\]\\\\\r\n]|\\\\.)*\\]|\\\\.|[^/\\\\\\[\r\n])+/[dgimyus]{0,7}|(?:\\[(?:[^[\\]\\\\\r\n]|\\\\.|\\[(?:[^[\\]\\\\\r\n]|\\\\.|\\[(?:[^[\\]\\\\\r\n]|\\\\.)*\\])*\\])*\\]|\\\\.|[^/\\\\\\[\r\n])+/[dgimyus]{0,7}v[dgimyus]{0,7})(?=(?:\\s|/\\*(?:[^*]|\\*(?!/))*\\*/)*(?:$|[\r\n,.;:})\\]]|//))"),lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:Prism.languages.regex},"regex-delimiter":/^\/|\/$/,"regex-flags":/^[a-z]+$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$a-z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{hashbang:{pattern:/^#!.*/,greedy:!0,alias:"comment"},"template-string":{pattern:/`(?:\\[\s\S]|\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}|(?!\$\{)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}},"string-property":{pattern:/((?:^|[,{])[ \t]*)(["'])(?:\\(?:\r\n|[\s\S])|(?!\2)[^\\\r\n])*\2(?=\s*:)/m,lookbehind:!0,greedy:!0,alias:"property"}}),Prism.languages.insertBefore("javascript","operator",{"literal-property":{pattern:/((?:^|[,{])[ \t]*)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/m,lookbehind:!0,alias:"property"}}),Prism.languages.markup&&(Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.markup.tag.addAttribute("on(?:abort|blur|change|click|composition(?:end|start|update)|dblclick|error|focus(?:in|out)?|key(?:down|up)|load|mouse(?:down|enter|leave|move|out|over|up)|reset|resize|scroll|select|slotchange|submit|unload|wheel)","javascript")),Prism.languages.js=Prism.languages.javascript; \ No newline at end of file diff --git a/frontend/app/assets/prism/prism-jsx.min.js b/frontend/app/assets/prism/prism-jsx.min.js new file mode 100644 index 000000000..8aa6362fa --- /dev/null +++ b/frontend/app/assets/prism/prism-jsx.min.js @@ -0,0 +1 @@ +!function(t){var n=t.util.clone(t.languages.javascript),e="(?:\\{*\\.{3}(?:[^{}]|)*\\})";function a(t,n){return t=t.replace(//g,(function(){return"(?:\\s|//.*(?!.)|/\\*(?:[^*]|\\*(?!/))\\*/)"})).replace(//g,(function(){return"(?:\\{(?:\\{(?:\\{[^{}]*\\}|[^{}])*\\}|[^{}])*\\})"})).replace(//g,(function(){return e})),RegExp(t,n)}e=a(e).source,t.languages.jsx=t.languages.extend("markup",n),t.languages.jsx.tag.pattern=a("+(?:[\\w.:$-]+(?:=(?:\"(?:\\\\[^]|[^\\\\\"])*\"|'(?:\\\\[^]|[^\\\\'])*'|[^\\s{'\"/>=]+|))?|))**/?)?>"),t.languages.jsx.tag.inside.tag.pattern=/^<\/?[^\s>\/]*/,t.languages.jsx.tag.inside["attr-value"].pattern=/=(?!\{)(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s'">]+)/,t.languages.jsx.tag.inside.tag.inside["class-name"]=/^[A-Z]\w*(?:\.[A-Z]\w*)*$/,t.languages.jsx.tag.inside.comment=n.comment,t.languages.insertBefore("inside","attr-name",{spread:{pattern:a(""),inside:t.languages.jsx}},t.languages.jsx.tag),t.languages.insertBefore("inside","special-attr",{script:{pattern:a("="),alias:"language-javascript",inside:{"script-punctuation":{pattern:/^=(?=\{)/,alias:"punctuation"},rest:t.languages.jsx}}},t.languages.jsx.tag);var s=function(t){return t?"string"==typeof t?t:"string"==typeof t.content?t.content:t.content.map(s).join(""):""},g=function(n){for(var e=[],a=0;a0&&e[e.length-1].tagName===s(o.content[0].content[1])&&e.pop():"/>"===o.content[o.content.length-1].content||e.push({tagName:s(o.content[0].content[1]),openedBraces:0}):e.length>0&&"punctuation"===o.type&&"{"===o.content?e[e.length-1].openedBraces++:e.length>0&&e[e.length-1].openedBraces>0&&"punctuation"===o.type&&"}"===o.content?e[e.length-1].openedBraces--:i=!0),(i||"string"==typeof o)&&e.length>0&&0===e[e.length-1].openedBraces){var r=s(o);a0&&("string"==typeof n[a-1]||"plain-text"===n[a-1].type)&&(r=s(n[a-1])+r,n.splice(a-1,1),a--),n[a]=new t.Token("plain-text",r,null,r)}o.content&&"string"!=typeof o.content&&g(o.content)}};t.hooks.add("after-tokenize",(function(t){"jsx"!==t.language&&"tsx"!==t.language||g(t.tokens)}))}(Prism); \ No newline at end of file diff --git a/frontend/app/assets/prism/prism-kotlin.min.js b/frontend/app/assets/prism/prism-kotlin.min.js new file mode 100644 index 000000000..78a905718 --- /dev/null +++ b/frontend/app/assets/prism/prism-kotlin.min.js @@ -0,0 +1 @@ +!function(n){n.languages.kotlin=n.languages.extend("clike",{keyword:{pattern:/(^|[^.])\b(?:abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|to|try|typealias|val|var|vararg|when|where|while)\b/,lookbehind:!0},function:[{pattern:/(?:`[^\r\n`]+`|\b\w+)(?=\s*\()/,greedy:!0},{pattern:/(\.)(?:`[^\r\n`]+`|\w+)(?=\s*\{)/,lookbehind:!0,greedy:!0}],number:/\b(?:0[xX][\da-fA-F]+(?:_[\da-fA-F]+)*|0[bB][01]+(?:_[01]+)*|\d+(?:_\d+)*(?:\.\d+(?:_\d+)*)?(?:[eE][+-]?\d+(?:_\d+)*)?[fFL]?)\b/,operator:/\+[+=]?|-[-=>]?|==?=?|!(?:!|==?)?|[\/*%<>]=?|[?:]:?|\.\.|&&|\|\||\b(?:and|inv|or|shl|shr|ushr|xor)\b/}),delete n.languages.kotlin["class-name"];var e={"interpolation-punctuation":{pattern:/^\$\{?|\}$/,alias:"punctuation"},expression:{pattern:/[\s\S]+/,inside:n.languages.kotlin}};n.languages.insertBefore("kotlin","string",{"string-literal":[{pattern:/"""(?:[^$]|\$(?:(?!\{)|\{[^{}]*\}))*?"""/,alias:"multiline",inside:{interpolation:{pattern:/\$(?:[a-z_]\w*|\{[^{}]*\})/i,inside:e},string:/[\s\S]+/}},{pattern:/"(?:[^"\\\r\n$]|\\.|\$(?:(?!\{)|\{[^{}]*\}))*"/,alias:"singleline",inside:{interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$(?:[a-z_]\w*|\{[^{}]*\})/i,lookbehind:!0,inside:e},string:/[\s\S]+/}}],char:{pattern:/'(?:[^'\\\r\n]|\\(?:.|u[a-fA-F0-9]{0,4}))'/,greedy:!0}}),delete n.languages.kotlin.string,n.languages.insertBefore("kotlin","keyword",{annotation:{pattern:/\B@(?:\w+:)?(?:[A-Z]\w*|\[[^\]]+\])/,alias:"builtin"}}),n.languages.insertBefore("kotlin","function",{label:{pattern:/\b\w+@|@\w+\b/,alias:"symbol"}}),n.languages.kt=n.languages.kotlin,n.languages.kts=n.languages.kotlin}(Prism); \ No newline at end of file diff --git a/frontend/app/assets/prism/prism-swift.min.js b/frontend/app/assets/prism/prism-swift.min.js new file mode 100644 index 000000000..b4f87f463 --- /dev/null +++ b/frontend/app/assets/prism/prism-swift.min.js @@ -0,0 +1 @@ +Prism.languages.swift={comment:{pattern:/(^|[^\\:])(?:\/\/.*|\/\*(?:[^/*]|\/(?!\*)|\*(?!\/)|\/\*(?:[^*]|\*(?!\/))*\*\/)*\*\/)/,lookbehind:!0,greedy:!0},"string-literal":[{pattern:RegExp('(^|[^"#])(?:"(?:\\\\(?:\\((?:[^()]|\\([^()]*\\))*\\)|\r\n|[^(])|[^\\\\\r\n"])*"|"""(?:\\\\(?:\\((?:[^()]|\\([^()]*\\))*\\)|[^(])|[^\\\\"]|"(?!""))*""")(?!["#])'),lookbehind:!0,greedy:!0,inside:{interpolation:{pattern:/(\\\()(?:[^()]|\([^()]*\))*(?=\))/,lookbehind:!0,inside:null},"interpolation-punctuation":{pattern:/^\)|\\\($/,alias:"punctuation"},punctuation:/\\(?=[\r\n])/,string:/[\s\S]+/}},{pattern:RegExp('(^|[^"#])(#+)(?:"(?:\\\\(?:#+\\((?:[^()]|\\([^()]*\\))*\\)|\r\n|[^#])|[^\\\\\r\n])*?"|"""(?:\\\\(?:#+\\((?:[^()]|\\([^()]*\\))*\\)|[^#])|[^\\\\])*?""")\\2'),lookbehind:!0,greedy:!0,inside:{interpolation:{pattern:/(\\#+\()(?:[^()]|\([^()]*\))*(?=\))/,lookbehind:!0,inside:null},"interpolation-punctuation":{pattern:/^\)|\\#+\($/,alias:"punctuation"},string:/[\s\S]+/}}],directive:{pattern:RegExp("#(?:(?:elseif|if)\\b(?:[ \t]*(?:![ \t]*)?(?:\\b\\w+\\b(?:[ \t]*\\((?:[^()]|\\([^()]*\\))*\\))?|\\((?:[^()]|\\([^()]*\\))*\\))(?:[ \t]*(?:&&|\\|\\|))?)+|(?:else|endif)\\b)"),alias:"property",inside:{"directive-name":/^#\w+/,boolean:/\b(?:false|true)\b/,number:/\b\d+(?:\.\d+)*\b/,operator:/!|&&|\|\||[<>]=?/,punctuation:/[(),]/}},literal:{pattern:/#(?:colorLiteral|column|dsohandle|file(?:ID|Literal|Path)?|function|imageLiteral|line)\b/,alias:"constant"},"other-directive":{pattern:/#\w+\b/,alias:"property"},attribute:{pattern:/@\w+/,alias:"atrule"},"function-definition":{pattern:/(\bfunc\s+)\w+/,lookbehind:!0,alias:"function"},label:{pattern:/\b(break|continue)\s+\w+|\b[a-zA-Z_]\w*(?=\s*:\s*(?:for|repeat|while)\b)/,lookbehind:!0,alias:"important"},keyword:/\b(?:Any|Protocol|Self|Type|actor|as|assignment|associatedtype|associativity|async|await|break|case|catch|class|continue|convenience|default|defer|deinit|didSet|do|dynamic|else|enum|extension|fallthrough|fileprivate|final|for|func|get|guard|higherThan|if|import|in|indirect|infix|init|inout|internal|is|isolated|lazy|left|let|lowerThan|mutating|none|nonisolated|nonmutating|open|operator|optional|override|postfix|precedencegroup|prefix|private|protocol|public|repeat|required|rethrows|return|right|safe|self|set|some|static|struct|subscript|super|switch|throw|throws|try|typealias|unowned|unsafe|var|weak|where|while|willSet)\b/,boolean:/\b(?:false|true)\b/,nil:{pattern:/\bnil\b/,alias:"constant"},"short-argument":/\$\d+\b/,omit:{pattern:/\b_\b/,alias:"keyword"},number:/\b(?:[\d_]+(?:\.[\de_]+)?|0x[a-f0-9_]+(?:\.[a-f0-9p_]+)?|0b[01_]+|0o[0-7_]+)\b/i,"class-name":/\b[A-Z](?:[A-Z_\d]*[a-z]\w*)?\b/,function:/\b[a-z_]\w*(?=\s*\()/i,constant:/\b(?:[A-Z_]{2,}|k[A-Z][A-Za-z_]+)\b/,operator:/[-+*/%=!<>&|^~?]+|\.[.\-+*/%=!<>&|^~?]+/,punctuation:/[{}[\]();,.:\\]/},Prism.languages.swift["string-literal"].forEach((function(e){e.inside.interpolation.inside=Prism.languages.swift})); \ No newline at end of file diff --git a/frontend/app/assets/prism/prism-typescript.min.js b/frontend/app/assets/prism/prism-typescript.min.js new file mode 100644 index 000000000..b512c1617 --- /dev/null +++ b/frontend/app/assets/prism/prism-typescript.min.js @@ -0,0 +1 @@ +!function(e){e.languages.typescript=e.languages.extend("javascript",{"class-name":{pattern:/(\b(?:class|extends|implements|instanceof|interface|new|type)\s+)(?!keyof\b)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?:\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>)?/,lookbehind:!0,greedy:!0,inside:null},builtin:/\b(?:Array|Function|Promise|any|boolean|console|never|number|string|symbol|unknown)\b/}),e.languages.typescript.keyword.push(/\b(?:abstract|declare|is|keyof|readonly|require)\b/,/\b(?:asserts|infer|interface|module|namespace|type)\b(?=\s*(?:[{_$a-zA-Z\xA0-\uFFFF]|$))/,/\btype\b(?=\s*(?:[\{*]|$))/),delete e.languages.typescript.parameter,delete e.languages.typescript["literal-property"];var s=e.languages.extend("typescript",{});delete s["class-name"],e.languages.typescript["class-name"].inside=s,e.languages.insertBefore("typescript","function",{decorator:{pattern:/@[$\w\xA0-\uFFFF]+/,inside:{at:{pattern:/^@/,alias:"operator"},function:/^[\s\S]+/}},"generic-function":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>(?=\s*\()/,greedy:!0,inside:{function:/^#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:s}}}}),e.languages.ts=e.languages.typescript}(Prism); \ No newline at end of file diff --git a/frontend/app/assets/prism/prism.css b/frontend/app/assets/prism/prism.css new file mode 100644 index 000000000..5b8ed2d13 --- /dev/null +++ b/frontend/app/assets/prism/prism.css @@ -0,0 +1,140 @@ +/** + * prism.js default theme for JavaScript, CSS and HTML + * Based on dabblet (http://dabblet.com) + * @author Lea Verou + */ + +code[class*="language-"], +pre[class*="language-"] { + color: black; + background: none; + text-shadow: 0 1px white; + font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; + font-size: 1em; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection, +code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection { + text-shadow: none; + background: #b3d4fc; +} + +pre[class*="language-"]::selection, pre[class*="language-"] ::selection, +code[class*="language-"]::selection, code[class*="language-"] ::selection { + text-shadow: none; + background: #b3d4fc; +} + +@media print { + code[class*="language-"], + pre[class*="language-"] { + text-shadow: none; + } +} + +/* Code blocks */ +pre[class*="language-"] { + padding: 1em; + margin: .5em 0; + overflow: auto; +} + +:not(pre) > code[class*="language-"], +pre[class*="language-"] { + background: #f5f2f0; +} + +/* Inline code */ +:not(pre) > code[class*="language-"] { + padding: .1em; + border-radius: .3em; + white-space: normal; +} + +.token.comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: slategray; +} + +.token.punctuation { + color: #999; +} + +.token.namespace { + opacity: .7; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.constant, +.token.symbol, +.token.deleted { + color: #905; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.builtin, +.token.inserted { + color: #690; +} + +.token.operator, +.token.entity, +.token.url, +.language-css .token.string, +.style .token.string { + color: #9a6e3a; + /* This background color was intended by the author of this theme. */ + background: hsla(0, 0%, 100%, .5); +} + +.token.atrule, +.token.attr-value, +.token.keyword { + color: #07a; +} + +.token.function, +.token.class-name { + color: #DD4A68; +} + +.token.regex, +.token.important, +.token.variable { + color: #e90; +} + +.token.important, +.token.bold { + font-weight: bold; +} +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} diff --git a/frontend/app/assets/prism/prism.min.js b/frontend/app/assets/prism/prism.min.js new file mode 100644 index 000000000..ebe7afbf1 --- /dev/null +++ b/frontend/app/assets/prism/prism.min.js @@ -0,0 +1,16 @@ +/** + * Minified by jsDelivr using Terser v5.19.2. + * Original file: /npm/prismjs@1.29.0/prism.js + * + * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files + */ +var _self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{},Prism=function(e){var t=/(?:^|\s)lang(?:uage)?-([\w-]+)(?=\s|$)/i,n=0,a={},r={manual:e.Prism&&e.Prism.manual,disableWorkerMessageHandler:e.Prism&&e.Prism.disableWorkerMessageHandler,util:{encode:function e(t){return t instanceof s?new s(t.type,e(t.content),t.alias):Array.isArray(t)?t.map(e):t.replace(/&/g,"&").replace(/=c.reach);w+=k.value.length,k=k.next){var A=k.value;if(t.length>e.length)return;if(!(A instanceof s)){var P,$=1;if(b){if(!(P=i(x,w,e,v))||P.index>=e.length)break;var S=P.index,E=P.index+P[0].length,_=w;for(_+=k.value.length;S>=_;)_+=(k=k.next).value.length;if(w=_-=k.value.length,k.value instanceof s)continue;for(var j=k;j!==t.tail&&(_c.reach&&(c.reach=O);var T=k.prev;if(L&&(T=u(t,T,L),w+=L.length),g(t,T,$),k=u(t,T,new s(d,f?r.tokenize(C,f):C,y,C)),z&&u(t,k,z),$>1){var M={cause:d+","+m,reach:O};o(e,t,n,k.prev,w,M),c&&M.reach>c.reach&&(c.reach=M.reach)}}}}}}function l(){var e={value:null,prev:null,next:null},t={value:null,prev:e,next:null};e.next=t,this.head=e,this.tail=t,this.length=0}function u(e,t,n){var a=t.next,r={value:n,prev:t,next:a};return t.next=r,a.prev=r,e.length++,r}function g(e,t,n){for(var a=t.next,r=0;r"+s.content+""},!e.document)return e.addEventListener?(r.disableWorkerMessageHandler||e.addEventListener("message",(function(t){var n=JSON.parse(t.data),a=n.language,s=n.code,i=n.immediateClose;e.postMessage(r.highlight(s,r.languages[a],a)),i&&e.close()}),!1),r):r;var c=r.util.currentScript();function d(){r.manual||r.highlightAll()}if(c&&(r.filename=c.src,c.hasAttribute("data-manual")&&(r.manual=!0)),!r.manual){var p=document.readyState;"loading"===p||"interactive"===p&&c&&c.defer?document.addEventListener("DOMContentLoaded",d):window.requestAnimationFrame?window.requestAnimationFrame(d):window.setTimeout(d,16)}return r}(_self); +/** + * Prism: Lightweight, robust, elegant syntax highlighting + * + * @license MIT + * @author Lea Verou + * @namespace + * @public + */"undefined"!=typeof module&&module.exports&&(module.exports=Prism),"undefined"!=typeof global&&(global.Prism=Prism),Prism.languages.markup={comment:{pattern://,greedy:!0},prolog:{pattern:/<\?[\s\S]+?\?>/,greedy:!0},doctype:{pattern:/"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:[^<"'\]]|"[^"]*"|'[^']*'|<(?!!--)|)*\]\s*)?>/i,greedy:!0,inside:{"internal-subset":{pattern:/(^[^\[]*\[)[\s\S]+(?=\]>$)/,lookbehind:!0,greedy:!0,inside:null},string:{pattern:/"[^"]*"|'[^']*'/,greedy:!0},punctuation:/^$|[[\]]/,"doctype-tag":/^DOCTYPE/i,name:/[^\s<>'"]+/}},cdata:{pattern://i,greedy:!0},tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"special-attr":[],"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{punctuation:[{pattern:/^=/,alias:"attr-equals"},{pattern:/^(\s*)["']|["']$/,lookbehind:!0}]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:[{pattern:/&[\da-z]{1,8};/i,alias:"named-entity"},/&#x?[\da-f]{1,8};/i]},Prism.languages.markup.tag.inside["attr-value"].inside.entity=Prism.languages.markup.entity,Prism.languages.markup.doctype.inside["internal-subset"].inside=Prism.languages.markup,Prism.hooks.add("wrap",(function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))})),Object.defineProperty(Prism.languages.markup.tag,"addInlined",{value:function(e,t){var n={};n["language-"+t]={pattern:/(^$)/i,lookbehind:!0,inside:Prism.languages[t]},n.cdata=/^$/i;var a={"included-cdata":{pattern://i,inside:n}};a["language-"+t]={pattern:/[\s\S]+/,inside:Prism.languages[t]};var r={};r[e]={pattern:RegExp(/(<__[^>]*>)(?:))*\]\]>|(?!)/.source.replace(/__/g,(function(){return e})),"i"),lookbehind:!0,greedy:!0,inside:a},Prism.languages.insertBefore("markup","cdata",r)}}),Object.defineProperty(Prism.languages.markup.tag,"addAttribute",{value:function(e,t){Prism.languages.markup.tag.inside["special-attr"].push({pattern:RegExp(/(^|["'\s])/.source+"(?:"+e+")"+/\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))/.source,"i"),lookbehind:!0,inside:{"attr-name":/^[^\s=]+/,"attr-value":{pattern:/=[\s\S]+/,inside:{value:{pattern:/(^=\s*(["']|(?!["'])))\S[\s\S]*(?=\2$)/,lookbehind:!0,alias:[t,"language-"+t],inside:Prism.languages[t]},punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}}}})}}),Prism.languages.html=Prism.languages.markup,Prism.languages.mathml=Prism.languages.markup,Prism.languages.svg=Prism.languages.markup,Prism.languages.xml=Prism.languages.extend("markup",{}),Prism.languages.ssml=Prism.languages.xml,Prism.languages.atom=Prism.languages.xml,Prism.languages.rss=Prism.languages.xml,function(e){var t=/(?:"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n])*')/;e.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:RegExp("@[\\w-](?:"+/[^;{\s"']|\s+(?!\s)/.source+"|"+t.source+")*?"+/(?:;|(?=\s*\{))/.source),inside:{rule:/^@[\w-]+/,"selector-function-argument":{pattern:/(\bselector\s*\(\s*(?![\s)]))(?:[^()\s]|\s+(?![\s)])|\((?:[^()]|\([^()]*\))*\))+(?=\s*\))/,lookbehind:!0,alias:"selector"},keyword:{pattern:/(^|[^\w-])(?:and|not|only|or)(?![\w-])/,lookbehind:!0}}},url:{pattern:RegExp("\\burl\\((?:"+t.source+"|"+/(?:[^\\\r\n()"']|\\[\s\S])*/.source+")\\)","i"),greedy:!0,inside:{function:/^url/i,punctuation:/^\(|\)$/,string:{pattern:RegExp("^"+t.source+"$"),alias:"url"}}},selector:{pattern:RegExp("(^|[{}\\s])[^{}\\s](?:[^{};\"'\\s]|\\s+(?![\\s{])|"+t.source+")*(?=\\s*\\{)"),lookbehind:!0},string:{pattern:t,greedy:!0},property:{pattern:/(^|[^-\w\xA0-\uFFFF])(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*(?=\s*:)/i,lookbehind:!0},important:/!important\b/i,function:{pattern:/(^|[^-a-z0-9])[-a-z0-9]+(?=\()/i,lookbehind:!0},punctuation:/[(){};:,]/},e.languages.css.atrule.inside.rest=e.languages.css;var n=e.languages.markup;n&&(n.tag.addInlined("style","css"),n.tag.addAttribute("style","css"))}(Prism),Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0,greedy:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/(\b(?:class|extends|implements|instanceof|interface|new|trait)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:break|catch|continue|do|else|finally|for|function|if|in|instanceof|new|null|return|throw|try|while)\b/,boolean:/\b(?:false|true)\b/,function:/\b\w+(?=\()/,number:/\b0x[\da-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/},Prism.languages.javascript=Prism.languages.extend("clike",{"class-name":[Prism.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$A-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\.(?:constructor|prototype))/,lookbehind:!0}],keyword:[{pattern:/((?:^|\})\s*)catch\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|assert(?=\s*\{)|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally(?=\s*(?:\{|$))|for|from(?=\s*(?:['"]|$))|function|(?:get|set)(?=\s*(?:[#\[$\w\xA0-\uFFFF]|$))|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],function:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,number:{pattern:RegExp(/(^|[^\w$])/.source+"(?:"+/NaN|Infinity/.source+"|"+/0[bB][01]+(?:_[01]+)*n?/.source+"|"+/0[oO][0-7]+(?:_[0-7]+)*n?/.source+"|"+/0[xX][\dA-Fa-f]+(?:_[\dA-Fa-f]+)*n?/.source+"|"+/\d+(?:_\d+)*n/.source+"|"+/(?:\d+(?:_\d+)*(?:\.(?:\d+(?:_\d+)*)?)?|\.\d+(?:_\d+)*)(?:[Ee][+-]?\d+(?:_\d+)*)?/.source+")"+/(?![\w$])/.source),lookbehind:!0},operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|extends|implements|instanceof|interface|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:RegExp(/((?:^|[^$\w\xA0-\uFFFF."'\])\s]|\b(?:return|yield))\s*)/.source+/\//.source+"(?:"+/(?:\[(?:[^\]\\\r\n]|\\.)*\]|\\.|[^/\\\[\r\n])+\/[dgimyus]{0,7}/.source+"|"+/(?:\[(?:[^[\]\\\r\n]|\\.|\[(?:[^[\]\\\r\n]|\\.|\[(?:[^[\]\\\r\n]|\\.)*\])*\])*\]|\\.|[^/\\\[\r\n])+\/[dgimyus]{0,7}v[dgimyus]{0,7}/.source+")"+/(?=(?:\s|\/\*(?:[^*]|\*(?!\/))*\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/.source),lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:Prism.languages.regex},"regex-delimiter":/^\/|\/$/,"regex-flags":/^[a-z]+$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$a-z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{hashbang:{pattern:/^#!.*/,greedy:!0,alias:"comment"},"template-string":{pattern:/`(?:\\[\s\S]|\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}|(?!\$\{)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}},"string-property":{pattern:/((?:^|[,{])[ \t]*)(["'])(?:\\(?:\r\n|[\s\S])|(?!\2)[^\\\r\n])*\2(?=\s*:)/m,lookbehind:!0,greedy:!0,alias:"property"}}),Prism.languages.insertBefore("javascript","operator",{"literal-property":{pattern:/((?:^|[,{])[ \t]*)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/m,lookbehind:!0,alias:"property"}}),Prism.languages.markup&&(Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.markup.tag.addAttribute(/on(?:abort|blur|change|click|composition(?:end|start|update)|dblclick|error|focus(?:in|out)?|key(?:down|up)|load|mouse(?:down|enter|leave|move|out|over|up)|reset|resize|scroll|select|slotchange|submit|unload|wheel)/.source,"javascript")),Prism.languages.js=Prism.languages.javascript,function(){if(void 0!==Prism&&"undefined"!=typeof document){Element.prototype.matches||(Element.prototype.matches=Element.prototype.msMatchesSelector||Element.prototype.webkitMatchesSelector);var e={js:"javascript",py:"python",rb:"ruby",ps1:"powershell",psm1:"powershell",sh:"bash",bat:"batch",h:"c",tex:"latex"},t="data-src-status",n="loading",a="loaded",r="pre[data-src]:not(["+t+'="'+a+'"]):not(['+t+'="'+n+'"])';Prism.hooks.add("before-highlightall",(function(e){e.selector+=", "+r})),Prism.hooks.add("before-sanity-check",(function(s){var i=s.element;if(i.matches(r)){s.code="",i.setAttribute(t,n);var o=i.appendChild(document.createElement("CODE"));o.textContent="Loading…";var l=i.getAttribute("data-src"),u=s.language;if("none"===u){var g=(/\.(\w+)$/.exec(l)||[,"none"])[1];u=e[g]||g}Prism.util.setLanguage(o,u),Prism.util.setLanguage(i,u);var c=Prism.plugins.autoloader;c&&c.loadLanguages(u),function(e,t,n){var a=new XMLHttpRequest;a.open("GET",e,!0),a.onreadystatechange=function(){4==a.readyState&&(a.status<400&&a.responseText?t(a.responseText):a.status>=400?n("✖ Error "+a.status+" while fetching file: "+a.statusText):n("✖ Error: File does not exist or is empty"))},a.send(null)}(l,(function(e){i.setAttribute(t,a);var n=function(e){var t=/^\s*(\d+)\s*(?:(,)\s*(?:(\d+)\s*)?)?$/.exec(e||"");if(t){var n=Number(t[1]),a=t[2],r=t[3];return a?r?[n,Number(r)]:[n,void 0]:[n,n]}}(i.getAttribute("data-range"));if(n){var r=e.split(/\r\n?|\n/g),s=n[0],l=null==n[1]?r.length:n[1];s<0&&(s+=r.length),s=Math.max(0,Math.min(s-1,r.length)),l<0&&(l+=r.length),l=Math.max(0,Math.min(l,r.length)),e=r.slice(s,l).join("\n"),i.hasAttribute("data-start")||i.setAttribute("data-start",String(s+1))}o.textContent=e,Prism.highlightElement(o)}),(function(e){i.setAttribute(t,"failed"),o.textContent=e}))}})),Prism.plugins.fileHighlight={highlight:function(e){for(var t,n=(e||document).querySelectorAll(r),a=0;t=n[a++];)Prism.highlightElement(t)}};var s=!1;Prism.fileHighlight=function(){s||(console.warn("Prism.fileHighlight is deprecated. Use `Prism.plugins.fileHighlight.highlight` instead."),s=!0),Prism.plugins.fileHighlight.highlight.apply(this,arguments)}}}(); +//# sourceMappingURL=/sm/1f55244a569fc0911044b5e4725a97872b6d85b0662a3429a809bb62f713c381.map \ No newline at end of file